Exemplo n.º 1
0
  /**
   * Find all column families that are replicated from this cluster
   *
   * @return the full list of the replicated column families of this cluster as: tableName, family
   *     name, replicationType
   *     <p>Currently replicationType is Global. In the future, more replication types may be
   *     extended here. For example 1) the replication may only apply to selected peers instead of
   *     all peers 2) the replicationType may indicate the host Cluster servers as Slave for the
   *     table:columnFam.
   */
  public List<HashMap<String, String>> listReplicated() throws IOException {
    List<HashMap<String, String>> replicationColFams = new ArrayList<HashMap<String, String>>();

    Admin admin = connection.getAdmin();
    HTableDescriptor[] tables;
    try {
      tables = admin.listTables();
    } finally {
      if (admin != null) admin.close();
    }

    for (HTableDescriptor table : tables) {
      HColumnDescriptor[] columns = table.getColumnFamilies();
      String tableName = table.getNameAsString();
      for (HColumnDescriptor column : columns) {
        if (column.getScope() != HConstants.REPLICATION_SCOPE_LOCAL) {
          // At this moment, the columfam is replicated to all peers
          HashMap<String, String> replicationEntry = new HashMap<String, String>();
          replicationEntry.put(TNAME, tableName);
          replicationEntry.put(CFNAME, column.getNameAsString());
          replicationEntry.put(REPLICATIONTYPE, REPLICATIONGLOBAL);
          replicationColFams.add(replicationEntry);
        }
      }
    }

    return replicationColFams;
  }
Exemplo n.º 2
0
 /**
  * Serialize column family to data block encoding map to configuration. Invoked while configuring
  * the MR job for incremental load.
  *
  * @param tableDescriptor to read the properties from
  * @param conf to persist serialized values into
  * @throws IOException on failure to read column family descriptors
  */
 @VisibleForTesting
 static void configureDataBlockEncoding(HTableDescriptor tableDescriptor, Configuration conf)
     throws UnsupportedEncodingException {
   if (tableDescriptor == null) {
     // could happen with mock table instance
     return;
   }
   StringBuilder dataBlockEncodingConfigValue = new StringBuilder();
   Collection<HColumnDescriptor> families = tableDescriptor.getFamilies();
   int i = 0;
   for (HColumnDescriptor familyDescriptor : families) {
     if (i++ > 0) {
       dataBlockEncodingConfigValue.append('&');
     }
     dataBlockEncodingConfigValue.append(
         URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
     dataBlockEncodingConfigValue.append('=');
     DataBlockEncoding encoding = familyDescriptor.getDataBlockEncoding();
     if (encoding == null) {
       encoding = DataBlockEncoding.NONE;
     }
     dataBlockEncodingConfigValue.append(URLEncoder.encode(encoding.toString(), "UTF-8"));
   }
   conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY, dataBlockEncodingConfigValue.toString());
 }
Exemplo n.º 3
0
 /**
  * Check whether region has Reference file
  *
  * @param htd table desciptor of the region
  * @return true if region has reference file
  * @throws IOException
  */
 public boolean hasReferences(final HTableDescriptor htd) throws IOException {
   for (HColumnDescriptor family : htd.getFamilies()) {
     if (hasReferences(family.getNameAsString())) {
       return true;
     }
   }
   return false;
 }
 /**
  * Add column to a table
  * @param tableName
  * @param hcd
  * @return Modified HTableDescriptor with new column added.
  * @throws IOException
  */
 public HTableDescriptor addColumn(byte[] tableName, HColumnDescriptor hcd)
     throws IOException {
   LOG.info("AddColumn. Table = " + Bytes.toString(tableName) + " HCD = " +
     hcd.toString());
   HTableDescriptor htd = this.services.getTableDescriptors().get(tableName);
   if (htd == null) {
     throw new InvalidFamilyOperationException("Family '" +
       hcd.getNameAsString() + "' cannot be modified as HTD is null");
   }
   htd.addFamily(hcd);
   this.services.getTableDescriptors().add(htd);
   return htd;
 }
Exemplo n.º 5
0
  private void populateMappingComboAndFamilyStuff() {
    String tableName = "";
    if (!Const.isEmpty(m_existingTableNamesCombo.getText().trim())) {
      tableName = m_existingTableNamesCombo.getText().trim();

      if (tableName.indexOf('@') > 0) {
        tableName = tableName.substring(0, tableName.indexOf('@'));
      }
    }

    // defaults if we fail to connect, table doesn't exist etc..
    m_familyCI.setComboValues(new String[] {""});
    m_existingMappingNamesCombo.removeAll();

    if (m_admin != null && !Const.isEmpty(tableName)) {
      try {

        // first get the existing mapping names (if any)
        List<String> mappingNames = m_admin.getMappingNames(tableName);
        for (String m : mappingNames) {
          m_existingMappingNamesCombo.add(m);
        }

        // now get family information for this table
        Configuration conf = m_admin.getConnection();
        HBaseAdmin admin = new HBaseAdmin(conf);

        if (admin.tableExists(tableName)) {
          HTableDescriptor descriptor = admin.getTableDescriptor(Bytes.toBytes(tableName));

          Collection<HColumnDescriptor> families = descriptor.getFamilies();
          String[] familyNames = new String[families.size()];
          int i = 0;
          for (HColumnDescriptor d : families) {
            familyNames[i++] = d.getNameAsString();
          }

          m_familyCI.setComboValues(familyNames);
        } else {
          m_familyCI.setComboValues(new String[] {""});
        }

        m_familiesInvalidated = false;
        return;

      } catch (Exception e) {
        // TODO popup error dialog
        e.printStackTrace();
      }
    }
  }
Exemplo n.º 6
0
  @Test
  public void testColumnFamily() throws Exception {
    DatasetProperties props =
        DatasetProperties.builder().add(Table.PROPERTY_COLUMN_FAMILY, "t").build();
    HBaseTableDefinition tableDefinition = new HBaseTableDefinition("foo");
    String tableName = "testcf";
    DatasetSpecification spec = tableDefinition.configure(tableName, props);

    DatasetAdmin admin =
        new HBaseTableAdmin(
            CONTEXT1,
            spec,
            testHBase.getConfiguration(),
            hBaseTableUtil,
            CConfiguration.create(),
            new LocalLocationFactory(tmpFolder.newFolder()));
    admin.create();
    final HBaseTable table =
        new HBaseTable(CONTEXT1, spec, cConf, testHBase.getConfiguration(), hBaseTableUtil);

    TransactionSystemClient txClient = new DetachedTxSystemClient();
    TransactionExecutor executor = new DefaultTransactionExecutor(txClient, table);
    executor.execute(
        new TransactionExecutor.Subroutine() {
          @Override
          public void apply() throws Exception {
            table.put(new Put("row", "column", "testValue"));
          }
        });

    final HBaseTable table2 =
        new HBaseTable(CONTEXT1, spec, cConf, testHBase.getConfiguration(), hBaseTableUtil);
    executor = new DefaultTransactionExecutor(txClient, table2);
    executor.execute(
        new TransactionExecutor.Subroutine() {
          @Override
          public void apply() throws Exception {
            Assert.assertEquals(
                "testValue", table2.get(new Get("row", "column")).getString("column"));
          }
        });

    // Verify the column family name
    HTableDescriptor htd =
        hBaseTableUtil.getHTableDescriptor(
            testHBase.getHBaseAdmin(), TableId.from(CONTEXT1.getNamespaceId(), tableName));
    HColumnDescriptor hcd = htd.getFamily(Bytes.toBytes("t"));
    Assert.assertNotNull(hcd);
    Assert.assertEquals("t", hcd.getNameAsString());
  }
Exemplo n.º 7
0
  public void addMobRegion(HRegionInfo regionInfo, HColumnDescriptor[] hcds) throws IOException {
    // 0. Get the ManifestBuilder/RegionVisitor
    RegionVisitor visitor = createRegionVisitor(desc);

    // 1. dump region meta info into the snapshot directory
    LOG.debug("Storing mob region '" + regionInfo + "' region-info for snapshot.");
    Object regionData = visitor.regionOpen(regionInfo);
    monitor.rethrowException();

    // 2. iterate through all the stores in the region
    LOG.debug("Creating references for mob files");

    Path mobRegionPath = MobUtils.getMobRegionPath(conf, regionInfo.getTable());
    for (HColumnDescriptor hcd : hcds) {
      // 2.1. build the snapshot reference for the store if it's a mob store
      if (!hcd.isMobEnabled()) {
        continue;
      }
      Object familyData = visitor.familyOpen(regionData, hcd.getName());
      monitor.rethrowException();

      Path storePath = MobUtils.getMobFamilyPath(mobRegionPath, hcd.getNameAsString());
      List<StoreFileInfo> storeFiles = getStoreFiles(storePath);
      if (storeFiles == null) {
        if (LOG.isDebugEnabled()) {
          LOG.debug("No mob files under family: " + hcd.getNameAsString());
        }
        continue;
      }

      addReferenceFiles(visitor, regionData, familyData, storeFiles, true);

      visitor.familyClose(regionData, familyData);
    }
    visitor.regionClose(regionData);
  }
Exemplo n.º 8
0
  /**
   * 新增单列
   *
   * @param tableName 表名
   * @param rowkey 行键名
   * @param col 列
   */
  public void addColumn(String tableName, String rowkey, HbaseColumn col) {
    try {
      HTable table = new HTable(conf, Bytes.toBytes(tableName)); // HTabel负责跟记录相关的操作如增删改查等//
      HColumnDescriptor[] cfstmp = table.getTableDescriptor().getColumnFamilies(); // 获取所有的列族
      ArrayList<String> cfs = new ArrayList<>();
      for (HColumnDescriptor cf : cfstmp) {
        cfs.add(cf.getNameAsString()); // 获取列族名
      }

      Put put = new Put(Bytes.toBytes(rowkey)); // 设置rowkey
      if (cfs.contains(col.cf)) {
        put.add(Bytes.toBytes(col.cf), Bytes.toBytes(col.col), Bytes.toBytes(col.value));
      }
      table.put(put);
    } catch (Exception e) {
      logger.error("addColumn failed", e);
    }
  }
Exemplo n.º 9
0
  /**
   * 新增数据
   *
   * @param tableName 表名
   * @param rowKey 行键名
   * @param columnFamily 列族名
   * @param column 列名
   * @param value 列值
   */
  public void addData(
      String tableName, String rowKey, String columnFamily, String column, String value) {
    try {
      // 获取表
      HTable table = new HTable(conf, Bytes.toBytes(tableName)); // HTabel负责跟记录相关的操作如增删改查等//
      HColumnDescriptor[] cfs = table.getTableDescriptor().getColumnFamilies(); // 获取所有的列族
      Put put = new Put(Bytes.toBytes(rowKey)); // 设置rowkey

      for (HColumnDescriptor cf : cfs) {
        String familyName = cf.getNameAsString(); // 获取列族名
        if (!familyName.equals(columnFamily)) {
          continue;
        }
        // 列族相同put数据
        put.add(Bytes.toBytes(familyName), Bytes.toBytes(column), Bytes.toBytes(value));
      }
      table.put(put);
    } catch (Exception e) {
      logger.error("addData failed", e);
    }
  }
Exemplo n.º 10
0
 /**
  * Serialize column family to block size map to configuration. Invoked while configuring the MR
  * job for incremental load.
  *
  * @param tableDescriptor to read the properties from
  * @param conf to persist serialized values into
  * @throws IOException on failure to read column family descriptors
  */
 @VisibleForTesting
 static void configureBlockSize(HTableDescriptor tableDescriptor, Configuration conf)
     throws UnsupportedEncodingException {
   StringBuilder blockSizeConfigValue = new StringBuilder();
   if (tableDescriptor == null) {
     // could happen with mock table instance
     return;
   }
   Collection<HColumnDescriptor> families = tableDescriptor.getFamilies();
   int i = 0;
   for (HColumnDescriptor familyDescriptor : families) {
     if (i++ > 0) {
       blockSizeConfigValue.append('&');
     }
     blockSizeConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
     blockSizeConfigValue.append('=');
     blockSizeConfigValue.append(
         URLEncoder.encode(String.valueOf(familyDescriptor.getBlocksize()), "UTF-8"));
   }
   // Get rid of the last ampersand
   conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, blockSizeConfigValue.toString());
 }
Exemplo n.º 11
0
 public MemStoreWrapper(
     Context context,
     FileSystem fs,
     BufferedMutator table,
     HColumnDescriptor hcd,
     MemStore memstore,
     CacheConfig cacheConfig)
     throws IOException {
   this.memstore = memstore;
   this.context = context;
   this.fs = fs;
   this.table = table;
   this.hcd = hcd;
   this.conf = context.getConfiguration();
   this.cacheConfig = cacheConfig;
   flushSize =
       this.conf.getLong(
           MobConstants.MOB_SWEEP_TOOL_COMPACTION_MEMSTORE_FLUSH_SIZE,
           MobConstants.DEFAULT_MOB_SWEEP_TOOL_COMPACTION_MEMSTORE_FLUSH_SIZE);
   mobFamilyDir = MobUtils.getMobFamilyPath(conf, table.getName(), hcd.getNameAsString());
   cryptoContext = EncryptionUtil.createEncryptionContext(conf, hcd);
 }
Exemplo n.º 12
0
 /**
  * Serialize column family to bloom type map to configuration. Invoked while configuring the MR
  * job for incremental load.
  *
  * @param tableDescriptor to read the properties from
  * @param conf to persist serialized values into
  * @throws IOException on failure to read column family descriptors
  */
 @VisibleForTesting
 static void configureBloomType(HTableDescriptor tableDescriptor, Configuration conf)
     throws UnsupportedEncodingException {
   if (tableDescriptor == null) {
     // could happen with mock table instance
     return;
   }
   StringBuilder bloomTypeConfigValue = new StringBuilder();
   Collection<HColumnDescriptor> families = tableDescriptor.getFamilies();
   int i = 0;
   for (HColumnDescriptor familyDescriptor : families) {
     if (i++ > 0) {
       bloomTypeConfigValue.append('&');
     }
     bloomTypeConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
     bloomTypeConfigValue.append('=');
     String bloomType = familyDescriptor.getBloomFilterType().toString();
     if (bloomType == null) {
       bloomType = HColumnDescriptor.DEFAULT_BLOOMFILTER;
     }
     bloomTypeConfigValue.append(URLEncoder.encode(bloomType, "UTF-8"));
   }
   conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY, bloomTypeConfigValue.toString());
 }
Exemplo n.º 13
0
 /**
  * Serialize column family to compression algorithm map to configuration. Invoked while
  * configuring the MR job for incremental load.
  *
  * @param tableDescriptor to read the properties from
  * @param conf to persist serialized values into
  * @throws IOException on failure to read column family descriptors
  */
 @edu.umd.cs.findbugs.annotations.SuppressWarnings(
     value = "RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
 @VisibleForTesting
 static void configureCompression(Configuration conf, HTableDescriptor tableDescriptor)
     throws UnsupportedEncodingException {
   StringBuilder compressionConfigValue = new StringBuilder();
   if (tableDescriptor == null) {
     // could happen with mock table instance
     return;
   }
   Collection<HColumnDescriptor> families = tableDescriptor.getFamilies();
   int i = 0;
   for (HColumnDescriptor familyDescriptor : families) {
     if (i++ > 0) {
       compressionConfigValue.append('&');
     }
     compressionConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
     compressionConfigValue.append('=');
     compressionConfigValue.append(
         URLEncoder.encode(familyDescriptor.getCompressionType().getName(), "UTF-8"));
   }
   // Get rid of the last ampersand
   conf.set(COMPRESSION_FAMILIES_CONF_KEY, compressionConfigValue.toString());
 }
Exemplo n.º 14
0
 private String getColumnFamilyName() {
   return cfDescriptor.getNameAsString();
 }
Exemplo n.º 15
0
  @Test(timeout = 30000)
  public void testBulkLoad() throws IOException {
    // Create table then get the single region for our new table.
    LOG.debug("Creating test table");
    HTableDescriptor hdt = HTU.createTableDescriptor("testBulkLoad");
    hdt.setRegionReplication(NB_SERVERS);
    hdt.addCoprocessor(SlowMeCopro.class.getName());
    Table table = HTU.createTable(hdt, new byte[][] {f}, HTU.getConfiguration());

    // create hfiles to load.
    LOG.debug("Creating test data");
    Path dir = HTU.getDataTestDirOnTestFS("testBulkLoad");
    final int numRows = 10;
    final byte[] qual = Bytes.toBytes("qual");
    final byte[] val = Bytes.toBytes("val");
    final List<Pair<byte[], String>> famPaths = new ArrayList<Pair<byte[], String>>();
    for (HColumnDescriptor col : hdt.getColumnFamilies()) {
      Path hfile = new Path(dir, col.getNameAsString());
      TestHRegionServerBulkLoad.createHFile(
          HTU.getTestFileSystem(), hfile, col.getName(), qual, val, numRows);
      famPaths.add(new Pair<byte[], String>(col.getName(), hfile.toString()));
    }

    // bulk load HFiles
    LOG.debug("Loading test data");
    @SuppressWarnings("deprecation")
    final HConnection conn = HTU.getHBaseAdmin().getConnection();
    RegionServerCallable<Void> callable =
        new RegionServerCallable<Void>(
            conn, hdt.getTableName(), TestHRegionServerBulkLoad.rowkey(0)) {
          @Override
          public Void call(int timeout) throws Exception {
            LOG.debug(
                "Going to connect to server "
                    + getLocation()
                    + " for row "
                    + Bytes.toStringBinary(getRow()));
            byte[] regionName = getLocation().getRegionInfo().getRegionName();
            BulkLoadHFileRequest request =
                RequestConverter.buildBulkLoadHFileRequest(famPaths, regionName, true);
            getStub().bulkLoadHFile(null, request);
            return null;
          }
        };
    RpcRetryingCallerFactory factory = new RpcRetryingCallerFactory(HTU.getConfiguration());
    RpcRetryingCaller<Void> caller = factory.<Void>newCaller();
    caller.callWithRetries(callable, 10000);

    // verify we can read them from the primary
    LOG.debug("Verifying data load");
    for (int i = 0; i < numRows; i++) {
      byte[] row = TestHRegionServerBulkLoad.rowkey(i);
      Get g = new Get(row);
      Result r = table.get(g);
      Assert.assertFalse(r.isStale());
    }

    // verify we can read them from the replica
    LOG.debug("Verifying replica queries");
    try {
      SlowMeCopro.cdl.set(new CountDownLatch(1));
      for (int i = 0; i < numRows; i++) {
        byte[] row = TestHRegionServerBulkLoad.rowkey(i);
        Get g = new Get(row);
        g.setConsistency(Consistency.TIMELINE);
        Result r = table.get(g);
        Assert.assertTrue(r.isStale());
      }
      SlowMeCopro.cdl.get().countDown();
    } finally {
      SlowMeCopro.cdl.get().countDown();
      SlowMeCopro.sleepTime.set(0);
    }

    HTU.getHBaseAdmin().disableTable(hdt.getTableName());
    HTU.deleteTable(hdt.getTableName());
  }