@Override
 void perform() throws IOException {
   Admin admin = connection.getAdmin();
   try {
     HTableDescriptor htd = createTableDesc();
     TableName tableName = htd.getTableName();
     if (admin.tableExists(tableName)) {
       return;
     }
     String numRegionKey = String.format(NUM_REGIONS_KEY, this.getClass().getSimpleName());
     numRegions = getConf().getInt(numRegionKey, DEFAULT_NUM_REGIONS);
     byte[] startKey = Bytes.toBytes("row-0000000000");
     byte[] endKey = Bytes.toBytes("row-" + Integer.MAX_VALUE);
     LOG.info("Creating table:" + htd);
     admin.createTable(htd, startKey, endKey, numRegions);
     Assert.assertTrue("Table: " + htd + " was not created", admin.tableExists(tableName));
     HTableDescriptor freshTableDesc = admin.getTableDescriptor(tableName);
     enabledTables.put(tableName, freshTableDesc);
     LOG.info("Created table:" + freshTableDesc);
   } catch (Exception e) {
     LOG.warn("Caught exception in action: " + this.getClass());
     throw e;
   } finally {
     admin.close();
   }
   verifyTables();
 }
 public static void CreateTable(HBaseConfiguration conf) throws IOException {
   HBaseAdmin admin = new HBaseAdmin(conf);
   HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf("people"));
   tableDescriptor.addFamily(new HColumnDescriptor("personal"));
   tableDescriptor.addFamily(new HColumnDescriptor("contactinfo"));
   tableDescriptor.addFamily(new HColumnDescriptor("creditcard"));
   admin.createTable(tableDescriptor);
 }
 private HTableDescriptor createTableDesc() {
   String tableName =
       "ittable-" + String.format("%010d", RandomUtils.nextInt(Integer.MAX_VALUE));
   String familyName = "cf-" + Math.abs(RandomUtils.nextInt());
   HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
   // add random column family
   htd.addFamily(new HColumnDescriptor(familyName));
   return htd;
 }
 @Override
 public HTableDescriptor createHtd(final String tableName) {
   HTableDescriptor htd = new HTableDescriptor(tableName);
   HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAMILY);
   hcd.setMobEnabled(true);
   hcd.setMobThreshold(0L);
   htd.addFamily(hcd);
   return htd;
 }
Exemple #5
0
 public void createTable(String tableName, List<String> columnFamilies) {
   try {
     Admin admin = connection.getAdmin();
     HTableDescriptor descriptor = new HTableDescriptor(TableName.valueOf(tableName));
     for (String family : columnFamilies) {
       descriptor.addFamily(new HColumnDescriptor(family));
     }
     admin.createTable(descriptor);
   } catch (IOException e) {
     e.printStackTrace();
   }
 }
    @Override
    void perform() throws IOException {
      HTableDescriptor selected = selectTable(disabledTables);
      if (selected == null) {
        return;
      }
      HColumnDescriptor columnDesc = selectFamily(selected);
      if (columnDesc == null) {
        return;
      }

      Admin admin = connection.getAdmin();
      int versions = RandomUtils.nextInt(10) + 3;
      try {
        TableName tableName = selected.getTableName();
        LOG.info(
            "Altering versions of column family: "
                + columnDesc
                + " to: "
                + versions
                + " in table: "
                + tableName);
        columnDesc.setMinVersions(versions);
        columnDesc.setMaxVersions(versions);
        admin.modifyTable(tableName, selected);
        // assertion
        HTableDescriptor freshTableDesc = admin.getTableDescriptor(tableName);
        HColumnDescriptor freshColumnDesc = freshTableDesc.getFamily(columnDesc.getName());
        Assert.assertEquals(
            "Column family: " + columnDesc + " was not altered",
            freshColumnDesc.getMaxVersions(),
            versions);
        Assert.assertEquals(
            "Column family: " + freshColumnDesc + " was not altered",
            freshColumnDesc.getMinVersions(),
            versions);
        LOG.info(
            "Altered versions of column family: "
                + columnDesc
                + " to: "
                + versions
                + " in table: "
                + tableName);
        disabledTables.put(tableName, freshTableDesc);
      } catch (Exception e) {
        LOG.warn("Caught exception in action: " + this.getClass());
        throw e;
      } finally {
        admin.close();
      }
      verifyTables();
    }
    @Override
    void perform() throws IOException {
      HTableDescriptor selected = selectTable(disabledTables);
      if (selected == null) {
        return;
      }
      HColumnDescriptor columnDesc = selectFamily(selected);
      if (columnDesc == null) {
        return;
      }

      Admin admin = connection.getAdmin();
      try {
        TableName tableName = selected.getTableName();
        // possible DataBlockEncoding ids
        int[] possibleIds = {0, 2, 3, 4, 6};
        short id = (short) possibleIds[RandomUtils.nextInt(possibleIds.length)];
        LOG.info(
            "Altering encoding of column family: "
                + columnDesc
                + " to: "
                + id
                + " in table: "
                + tableName);
        columnDesc.setDataBlockEncoding(DataBlockEncoding.getEncodingById(id));
        admin.modifyTable(tableName, selected);
        // assertion
        HTableDescriptor freshTableDesc = admin.getTableDescriptor(tableName);
        HColumnDescriptor freshColumnDesc = freshTableDesc.getFamily(columnDesc.getName());
        Assert.assertEquals(
            "Encoding of column family: " + columnDesc + " was not altered",
            freshColumnDesc.getDataBlockEncoding().getId(),
            id);
        LOG.info(
            "Altered encoding of column family: "
                + freshColumnDesc
                + " to: "
                + id
                + " in table: "
                + tableName);
        disabledTables.put(tableName, freshTableDesc);
      } catch (Exception e) {
        LOG.warn("Caught exception in action: " + this.getClass());
        throw e;
      } finally {
        admin.close();
      }
      verifyTables();
    }
    // populate tables
    @Override
    void perform() throws IOException {
      HTableDescriptor selected = selectTable(enabledTables);
      if (selected == null) {
        return;
      }

      Admin admin = connection.getAdmin();
      TableName tableName = selected.getTableName();
      try (Table table = connection.getTable(tableName)) {
        ArrayList<HRegionInfo> regionInfos =
            new ArrayList<HRegionInfo>(admin.getTableRegions(selected.getTableName()));
        int numRegions = regionInfos.size();
        // average number of rows to be added per action to each region
        int average_rows = 1;
        int numRows = average_rows * numRegions;
        LOG.info("Adding " + numRows + " rows to table: " + selected);
        for (int i = 0; i < numRows; i++) {
          // nextInt(Integer.MAX_VALUE)) to return positive numbers only
          byte[] rowKey =
              Bytes.toBytes(
                  "row-" + String.format("%010d", RandomUtils.nextInt(Integer.MAX_VALUE)));
          HColumnDescriptor cfd = selectFamily(selected);
          if (cfd == null) {
            return;
          }
          byte[] family = cfd.getName();
          byte[] qualifier = Bytes.toBytes("col-" + RandomUtils.nextInt(Integer.MAX_VALUE) % 10);
          byte[] value = Bytes.toBytes("val-" + RandomStringUtils.randomAlphanumeric(10));
          Put put = new Put(rowKey);
          put.addColumn(family, qualifier, value);
          table.put(put);
        }
        HTableDescriptor freshTableDesc = admin.getTableDescriptor(tableName);
        enabledTables.put(tableName, freshTableDesc);
        LOG.info("Added " + numRows + " rows to table: " + selected);
      } catch (Exception e) {
        LOG.warn("Caught exception in action: " + this.getClass());
        throw e;
      } finally {
        admin.close();
      }
      verifyTables();
    }
    @Override
    void perform() throws IOException {

      HTableDescriptor selected = selectTable(disabledTables);
      if (selected == null) {
        return;
      }

      Admin admin = connection.getAdmin();
      try {
        TableName tableName = selected.getTableName();
        LOG.info("Enabling table :" + selected);
        admin.enableTable(tableName);
        Assert.assertTrue(
            "Table: " + selected + " was not enabled", admin.isTableEnabled(tableName));
        HTableDescriptor freshTableDesc = admin.getTableDescriptor(tableName);
        enabledTables.put(tableName, freshTableDesc);
        LOG.info("Enabled table :" + freshTableDesc);
      } catch (Exception e) {
        LOG.warn("Caught exception in action: " + this.getClass());
        // TODO workaround
        // loose restriction for TableNotDisabledException/TableNotEnabledException thrown in sync
        // operations 1) when enable/disable starts, the table state is changed to
        // ENABLING/DISABLING (ZK node in 1.x), which will be further changed to ENABLED/DISABLED
        // once the operation completes 2) if master failover happens in the middle of the
        // enable/disable operation, the new master will try to recover the tables in
        // ENABLING/DISABLING state, as programmed in
        // AssignmentManager#recoverTableInEnablingState() and
        // AssignmentManager#recoverTableInDisablingState()
        // 3) after the new master initialization completes, the procedure tries to re-do the
        // enable/disable operation, which was already done. Ignore those exceptions before
        // change of behaviors of AssignmentManager in presence of PV2
        if (e instanceof TableNotDisabledException) {
          LOG.warn("Caught TableNotDisabledException in action: " + this.getClass());
          e.printStackTrace();
        } else {
          throw e;
        }
      } finally {
        admin.close();
      }
      verifyTables();
    }
 // ColumnAction has implemented selectFamily() shared by multiple family Actions
 protected HColumnDescriptor selectFamily(HTableDescriptor htd) {
   if (htd == null) {
     return null;
   }
   HColumnDescriptor[] families = htd.getColumnFamilies();
   if (families.length == 0) {
     LOG.info("No column families in table: " + htd);
     return null;
   }
   HColumnDescriptor randomCfd = families[RandomUtils.nextInt(families.length)];
   return randomCfd;
 }
 /** Create the Mob Table. */
 public static void createMobTable(
     final HBaseTestingUtility util,
     final TableName tableName,
     int regionReplication,
     final byte[]... families)
     throws IOException, InterruptedException {
   HTableDescriptor htd = new HTableDescriptor(tableName);
   htd.setRegionReplication(regionReplication);
   for (byte[] family : families) {
     HColumnDescriptor hcd = new HColumnDescriptor(family);
     hcd.setMobEnabled(true);
     hcd.setMobThreshold(0L);
     htd.addFamily(hcd);
   }
   byte[][] splitKeys = SnapshotTestingUtils.getSplitKeys();
   util.getHBaseAdmin().createTable(htd, splitKeys);
   SnapshotTestingUtils.waitForTableToBeOnline(util, tableName);
   assertEquals(
       (splitKeys.length + 1) * regionReplication,
       util.getHBaseAdmin().getTableRegions(tableName).size());
 }
    @Override
    void perform() throws IOException {
      HTableDescriptor selected = selectTable(disabledTables);
      HColumnDescriptor cfd = selectFamily(selected);
      if (selected == null || cfd == null) {
        return;
      }

      Admin admin = connection.getAdmin();
      try {
        if (selected.getColumnFamilies().length < 2) {
          LOG.info("No enough column families to delete in table " + selected.getTableName());
          return;
        }
        TableName tableName = selected.getTableName();
        LOG.info("Deleting column family: " + cfd + " from table: " + tableName);
        admin.deleteColumnFamily(tableName, cfd.getName());
        // assertion
        HTableDescriptor freshTableDesc = admin.getTableDescriptor(tableName);
        Assert.assertFalse(
            "Column family: " + cfd + " was not added", freshTableDesc.hasFamily(cfd.getName()));
        LOG.info("Deleted column family: " + cfd + " from table: " + tableName);
        disabledTables.put(tableName, freshTableDesc);
      } catch (Exception e) {
        LOG.warn("Caught exception in action: " + this.getClass());
        throw e;
      } finally {
        admin.close();
      }
      verifyTables();
    }
    @Override
    void perform() throws IOException {
      HTableDescriptor selected = selectTable(disabledTables);
      if (selected == null) {
        return;
      }

      Admin admin = connection.getAdmin();
      try {
        HColumnDescriptor cfd = createFamilyDesc();
        if (selected.hasFamily(cfd.getName())) {
          LOG.info(
              new String(cfd.getName()) + " already exists in table " + selected.getTableName());
          return;
        }
        TableName tableName = selected.getTableName();
        LOG.info("Adding column family: " + cfd + " to table: " + tableName);
        admin.addColumn(tableName, cfd);
        // assertion
        HTableDescriptor freshTableDesc = admin.getTableDescriptor(tableName);
        Assert.assertTrue(
            "Column family: " + cfd + " was not added", freshTableDesc.hasFamily(cfd.getName()));
        LOG.info("Added column family: " + cfd + " to table: " + tableName);
        disabledTables.put(tableName, freshTableDesc);
      } catch (Exception e) {
        LOG.warn("Caught exception in action: " + this.getClass());
        throw e;
      } finally {
        admin.close();
      }
      verifyTables();
    }
 /**
  * Create a Mob table.
  *
  * @param util
  * @param tableName
  * @param families
  * @return An HTable instance for the created table.
  * @throws IOException
  */
 public static Table createMobTable(
     final HBaseTestingUtility util, final TableName tableName, final byte[]... families)
     throws IOException {
   HTableDescriptor htd = new HTableDescriptor(tableName);
   for (byte[] family : families) {
     HColumnDescriptor hcd = new HColumnDescriptor(family);
     // Disable blooms (they are on by default as of 0.95) but we disable them
     // here because
     // tests have hard coded counts of what to expect in block cache, etc.,
     // and blooms being
     // on is interfering.
     hcd.setBloomFilterType(BloomType.NONE);
     hcd.setMobEnabled(true);
     hcd.setMobThreshold(0L);
     htd.addFamily(hcd);
   }
   util.getHBaseAdmin().createTable(htd);
   // HBaseAdmin only waits for regions to appear in hbase:meta we should wait
   // until they are assigned
   util.waitUntilAllRegionsAssigned(htd.getTableName());
   return ConnectionFactory.createConnection(util.getConfiguration()).getTable(htd.getTableName());
 }
    @Override
    void perform() throws IOException {

      HTableDescriptor selected = selectTable(disabledTables);
      if (selected == null) {
        return;
      }

      Admin admin = connection.getAdmin();
      try {
        TableName tableName = selected.getTableName();
        LOG.info("Deleting table :" + selected);
        admin.deleteTable(tableName);
        Assert.assertFalse("Table: " + selected + " was not deleted", admin.tableExists(tableName));
        deletedTables.put(tableName, selected);
        LOG.info("Deleted table :" + selected);
      } catch (Exception e) {
        LOG.warn("Caught exception in action: " + this.getClass());
        throw e;
      } finally {
        admin.close();
      }
      verifyTables();
    }
  @Test
  public void testHostRank() throws Exception {

    if (System.getProperty("prop.mapred.job.tracker") != null) {
      if (LOG.isInfoEnabled())
        LOG.info("testHBaseInputOutput: Ignore this test if not local mode.");
      return;
    }

    File jarTest = new File(System.getProperty("prop.jarLocation"));
    if (!jarTest.exists()) {
      fail(
          "Could not find Giraph jar at "
              + "location specified by 'prop.jarLocation'. "
              + "Make sure you built the main Giraph artifact?.");
    }

    MiniHBaseCluster cluster = null;
    MiniZooKeeperCluster zkCluster = null;
    FileSystem fs = null;

    try {
      // using the restart method allows us to avoid having the hbase
      // root directory overwritten by /home/$username
      zkCluster = testUtil.startMiniZKCluster();
      testUtil.restartHBaseCluster(2);
      cluster = testUtil.getMiniHBaseCluster();

      final byte[] OL_BYTES = Bytes.toBytes("ol");
      final byte[] S_BYTES = Bytes.toBytes("s");
      final byte[] METADATA_BYTES = Bytes.toBytes("mtdt");
      final byte[] HR_BYTES = Bytes.toBytes("_hr_");
      final byte[] TAB = Bytes.toBytes(TABLE_NAME);

      Configuration conf = cluster.getConfiguration();
      HTableDescriptor desc = new HTableDescriptor(TAB);
      desc.addFamily(new HColumnDescriptor(OL_BYTES));
      desc.addFamily(new HColumnDescriptor(S_BYTES));
      desc.addFamily(new HColumnDescriptor(METADATA_BYTES));
      HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
      if (hbaseAdmin.isTableAvailable(TABLE_NAME)) {
        hbaseAdmin.disableTable(TABLE_NAME);
        hbaseAdmin.deleteTable(TABLE_NAME);
      }
      hbaseAdmin.createTable(desc);

      /**
       * Enter the initial data (a,b), (b,c), (a,c) a = 1.0 - google b = 1.0 - yahoo c = 1.0 - bing
       */
      HTable table = new HTable(conf, TABLE_NAME);

      Put p1 = new Put(Bytes.toBytes("com.google.www"));
      p1.add(OL_BYTES, Bytes.toBytes("www.yahoo.com"), Bytes.toBytes("ab"));

      Put p2 = new Put(Bytes.toBytes("com.google.www"));
      p2.add(OL_BYTES, Bytes.toBytes("www.bing.com"), Bytes.toBytes("ac"));
      p2.add(OL_BYTES, Bytes.toBytes("www.bing.com"), Bytes.toBytes("invalid1"));
      p2.add(OL_BYTES, Bytes.toBytes("www.google.com"), Bytes.toBytes("invalid2"));

      Put p3 = new Put(Bytes.toBytes("com.yahoo.www"));
      p3.add(OL_BYTES, Bytes.toBytes("www.bing.com"), Bytes.toBytes("bc"));
      // p3.add(OL_BYTES, Bytes.toBytes(""), Bytes.toBytes("invalid4"));

      Put p4 = new Put(Bytes.toBytes("com.bing.www"));
      // TODO: Handle below case. use apache isValid method.
      p4.add(OL_BYTES, Bytes.toBytes("http://invalidurl"), Bytes.toBytes("invalid5"));
      p4.add(S_BYTES, S_BYTES, Bytes.toBytes(10.0d));

      Put p5 = new Put(Bytes.toBytes("dummy"));
      p5.add(S_BYTES, S_BYTES, Bytes.toBytes(10.0d));

      table.put(p1);
      table.put(p2);
      table.put(p3);
      table.put(p4);
      table.put(p5);

      // Set Giraph configuration
      // now operate over HBase using Vertex I/O formats
      conf.set(TableInputFormat.INPUT_TABLE, TABLE_NAME);
      conf.set(TableOutputFormat.OUTPUT_TABLE, TABLE_NAME);

      // Start the giraph job
      GiraphJob giraphJob = new GiraphJob(conf, BspCase.getCallingMethodName());
      GiraphConfiguration giraphConf = giraphJob.getConfiguration();
      giraphConf.setZooKeeperConfiguration(cluster.getMaster().getZooKeeper().getQuorum());
      setupConfiguration(giraphJob);
      giraphConf.setComputationClass(LinkRankComputation.class);
      giraphConf.setMasterComputeClass(LinkRankVertexMasterCompute.class);
      giraphConf.setOutEdgesClass(ByteArrayEdges.class);
      giraphConf.setVertexInputFormatClass(Nutch2HostInputFormat.class);
      giraphConf.setVertexOutputFormatClass(Nutch2HostOutputFormat.class);
      giraphConf.setInt("giraph.linkRank.superstepCount", 10);
      giraphConf.setInt("giraph.linkRank.scale", 10);
      giraphConf.set("giraph.linkRank.family", "mtdt");
      giraphConf.set("giraph.linkRank.qualifier", "_hr_");
      giraphConf.setVertexInputFilterClass(HostRankVertexFilter.class);
      assertTrue(giraphJob.run(true));

      if (LOG.isInfoEnabled()) LOG.info("Giraph job successful. Checking output qualifier.");

      /** Check the results * */
      Result result;
      String key;
      byte[] calculatedScoreByte;
      HashMap expectedValues = new HashMap<String, Double>();
      expectedValues.put("com.google.www", 1.3515060339386287d);
      expectedValues.put("com.yahoo.www", 4.144902009567587d);
      expectedValues.put("com.bing.www", 9.063893290511482d);

      for (Object keyObject : expectedValues.keySet()) {
        key = keyObject.toString();
        result = table.get(new Get(key.getBytes()));
        calculatedScoreByte = result.getValue(METADATA_BYTES, HR_BYTES);
        assertNotNull(calculatedScoreByte);
        assertTrue(calculatedScoreByte.length > 0);
        Assert.assertEquals(
            "Scores are not the same",
            (Double) expectedValues.get(key),
            Bytes.toDouble(calculatedScoreByte),
            DELTA);
      }
    } finally {
      if (cluster != null) {
        cluster.shutdown();
      }
      if (zkCluster != null) {
        zkCluster.shutdown();
      }
      // clean test files
      if (fs != null) {
        fs.delete(hbaseRootdir);
      }
    }
  }