public static void main(String[] args) throws Exception { conf.set("hbase.zookeeper.quorum", "hadoop271.itversity.com"); conf.set("hbase.zookeeper.property.clientPort", "2181"); Connection connection = ConnectionFactory.createConnection(conf); Table table = connection.getTable(TableName.valueOf("demo")); Scan scan1 = new Scan(); ResultScanner scanner1 = table.getScanner(scan1); for (Result res : scanner1) { System.out.println(Bytes.toString(res.getRow())); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column1".getBytes()))); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column2".getBytes()))); } scanner1.close(); Put put = new Put("3".getBytes()); put.addColumn("cf1".getBytes(), "column1".getBytes(), "value1".getBytes()); put.addColumn("cf1".getBytes(), "column2".getBytes(), "value2".getBytes()); table.put(put); Get get = new Get("3".getBytes()); Result getResult = table.get(get); System.out.println("Printing colunns for rowkey 3"); System.out.println(Bytes.toString(getResult.getValue("cf1".getBytes(), "column1".getBytes()))); System.out.println(Bytes.toString(getResult.getValue("cf1".getBytes(), "column2".getBytes()))); scanner1 = table.getScanner(scan1); System.out.println("Before Delete"); for (Result res : scanner1) { System.out.println(Bytes.toString(res.getRow())); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column1".getBytes()))); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column2".getBytes()))); } scanner1.close(); Delete del = new Delete("3".getBytes()); table.delete(del); System.out.println("After Delete"); scanner1 = table.getScanner(scan1); for (Result res : scanner1) { System.out.println(Bytes.toString(res.getRow())); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column1".getBytes()))); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column2".getBytes()))); } scanner1.close(); table.close(); connection.close(); }
public static void main(String[] args) throws IOException, SolrServerException { final Configuration conf; HttpSolrServer solrServer = new HttpSolrServer("http://c1master:8983/solr"); conf = HBaseConfiguration.create(); // Define Hbase Table Name HTable table = new HTable(conf, "test_global_shop"); Scan scan = new Scan(); // Define Hbase Column Family scan.addFamily(Bytes.toBytes("shop")); scan.setCaching(1000); scan.setCacheBlocks(false); ResultScanner ss = table.getScanner(scan); System.out.println("start Storing..."); int i = 0; try { for (Result r : ss) { SolrInputDocument solrDoc = new SolrInputDocument(); solrDoc.addField("key", new String(r.getRow())); for (KeyValue kv : r.raw()) { String fieldName = new String(kv.getQualifier()); String fieldValue = new String(kv.getValue()); if (fieldName.equalsIgnoreCase("address") || fieldName.equalsIgnoreCase("category") || fieldName.equalsIgnoreCase("name") || fieldName.equalsIgnoreCase("province") || fieldName.equalsIgnoreCase("tel")) { solrDoc.addField(fieldName, fieldValue); } } solrServer.add(solrDoc); solrServer.commit(true, true, true); i = i + 1; System.out.println("Already Succcess " + i + " number data"); } ss.close(); table.close(); System.out.println("done !"); } catch (IOException e) { } finally { ss.close(); table.close(); System.out.println("error !"); } }
@Test public void testCheckpointRollback() throws Exception { // start a transaction, using checkpoints between writes transactionContext.start(); transactionAwareHTable.put( new Put(TestBytes.row).add(TestBytes.family, TestBytes.qualifier, TestBytes.value)); transactionContext.checkpoint(); transactionAwareHTable.put( new Put(TestBytes.row2).add(TestBytes.family, TestBytes.qualifier, TestBytes.value2)); transactionContext.checkpoint(); transactionAwareHTable.put( new Put(TestBytes.row3).add(TestBytes.family, TestBytes.qualifier, TestBytes.value)); transactionContext.abort(); transactionContext.start(); verifyRow(transactionAwareHTable, TestBytes.row, null); verifyRow(transactionAwareHTable, TestBytes.row2, null); verifyRow(transactionAwareHTable, TestBytes.row3, null); Scan scan = new Scan(); ResultScanner scanner = transactionAwareHTable.getScanner(scan); assertNull(scanner.next()); scanner.close(); transactionContext.finish(); }
public static void main(String[] args) throws Exception { if (args.length < 2) { throw new Exception("Table name not specified."); } Configuration conf = HBaseConfiguration.create(); HTable table = new HTable(conf, args[0]); String startKey = args[1]; TimeCounter executeTimer = new TimeCounter(); executeTimer.begin(); executeTimer.enter(); Expression exp = ExpressionFactory.eq( ExpressionFactory.toLong( ExpressionFactory.toString(ExpressionFactory.columnValue("family", "longStr2"))), ExpressionFactory.constant(Long.parseLong("99"))); ExpressionFilter expressionFilter = new ExpressionFilter(exp); Scan scan = new Scan(Bytes.toBytes(startKey), expressionFilter); int count = 0; ResultScanner scanner = table.getScanner(scan); Result r = scanner.next(); while (r != null) { count++; r = scanner.next(); } System.out.println("++ Scanning finished with count : " + count + " ++"); scanner.close(); executeTimer.leave(); executeTimer.end(); System.out.println("++ Time cost for scanning: " + executeTimer.getTimeString() + " ++"); }
/** * Performs a full scan of a catalog table. * * @param catalogTracker * @param visitor Visitor invoked against each row. * @param startrow Where to start the scan. Pass null if want to begin scan at first row. * @param scanRoot True if we are to scan <code>-ROOT-</code> rather than <code>.META.</code>, the * default (pass false to scan .META.) * @throws IOException */ static void fullScan( CatalogTracker catalogTracker, final Visitor visitor, final byte[] startrow, final boolean scanRoot) throws IOException { Scan scan = new Scan(); if (startrow != null) scan.setStartRow(startrow); if (startrow == null && !scanRoot) { int caching = catalogTracker .getConnection() .getConfiguration() .getInt(HConstants.HBASE_META_SCANNER_CACHING, 100); scan.setCaching(caching); } scan.addFamily(HConstants.CATALOG_FAMILY); HTable metaTable = scanRoot ? getRootHTable(catalogTracker) : getMetaHTable(catalogTracker); ResultScanner scanner = metaTable.getScanner(scan); try { Result data; while ((data = scanner.next()) != null) { if (data.isEmpty()) continue; // Break if visit returns false. if (!visitor.visit(data)) break; } } finally { scanner.close(); metaTable.close(); } return; }
public String[] getObjectIDs(String objectType, String... tags) throws IOException { List<String> ret = new ArrayList<String>(); FilterList list = new FilterList(FilterList.Operator.MUST_PASS_ALL); SingleColumnValueFilter filter1 = new SingleColumnValueFilter( "tags".getBytes(), "OBJECTTYPE".getBytes(), CompareOp.EQUAL, Bytes.toBytes(objectType)); list.addFilter(filter1); for (String tag : tags) { SingleColumnValueFilter filter2 = new SingleColumnValueFilter( "tags".getBytes(), tag.toUpperCase().getBytes(), CompareOp.EQUAL, Bytes.toBytes(1)); filter2.setFilterIfMissing(true); list.addFilter(filter2); } Scan s = new Scan(); s.setFilter(list); s.setMaxVersions(1); ResultScanner scanner = htable.getScanner(s); try { for (Result rr = scanner.next(); rr != null; rr = scanner.next()) { String localObjectType = new String(rr.getValue("tags".getBytes(), "OBJECTTYPE".getBytes())); String localObjectId = new String(rr.getValue("tags".getBytes(), "OBJECTID".getBytes())); ret.add(localObjectId); } } finally { scanner.close(); } return ret.toArray(new String[] {}); }
private HRegionInfo nextRegion() throws IOException { try { Result results = getMetaRow(); if (results == null) { return null; } byte[] regionInfoValue = results.getValue(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); if (regionInfoValue == null || regionInfoValue.length == 0) { throw new NoSuchElementException( "meta region entry missing " + Bytes.toString(HConstants.CATALOG_FAMILY) + ":" + Bytes.toString(HConstants.REGIONINFO_QUALIFIER)); } HRegionInfo region = Writables.getHRegionInfo(regionInfoValue); if (!Bytes.equals(region.getTableName(), this.tableName)) { return null; } return region; } catch (IOException e) { e = RemoteExceptionHandler.checkIOException(e); LOG.error("meta scanner error", e); metaScanner.close(); throw e; } }
public void close() { if (rs != null) { rs.close(); rs = null; } closed = true; }
public static List<Delete> GetDeleteEventsBetween( Table VTEvent_Table, String imo_str, long first_timestamp, long last_timestamp) throws IOException { // scan // 'cdb_vessel:vessel_event',{FILTER=>"(PrefixFilter('0000003162')"} Scan GetEventsBetween = new Scan(); GetEventsBetween.setStartRow( Bytes.toBytes(imo_str + LpadNum(Long.MAX_VALUE - last_timestamp, 19) + "0000000000")) .setStopRow( Bytes.toBytes( imo_str + LpadNum(Long.MAX_VALUE - first_timestamp + 1, 19) + "9999999999")) .addColumn(details, exittime); GetEventsBetween.setCaching(100); Filter ExistTimeValuefilter = new ValueFilter( CompareFilter.CompareOp.LESS_OR_EQUAL, new BinaryComparator( Bytes.toBytes(new DateTime(last_timestamp).toString(rawformatter)))); GetEventsBetween.setFilter(ExistTimeValuefilter); ResultScanner Result_ExistingEvents = VTEvent_Table.getScanner(GetEventsBetween); List<Delete> deletes = new ArrayList<Delete>(); for (Result res : Result_ExistingEvents) { deletes.add(new Delete(res.getRow())); } Result_ExistingEvents.close(); return deletes; }
/** * 遍历多行 * * @param tableName 表名 * @param start_rowkey 开始行键 * @param stop_rowkey 结束行键 * @return 行列表 */ public ArrayList<HbaseRow> scanRows(String tableName, String start_rowkey, String stop_rowkey) { ResultScanner rowstmp = null; ArrayList<HbaseRow> rows = null; try { Scan scan = new Scan(); scan.setStartRow(Bytes.toBytes(start_rowkey)); scan.setStopRow(Bytes.toBytes(stop_rowkey)); HTable table = new HTable(conf, Bytes.toBytes(tableName)); rowstmp = table.getScanner(scan); rows = new ArrayList<>(); for (Result rowtmp : rowstmp) { HbaseRow row = new HbaseRow(); row.rowkey = Bytes.toString(rowtmp.getRow()); for (Cell cell : rowtmp.listCells()) { HbaseColumn col = new HbaseColumn(cell); row.cols.add(col); } rows.add(row); } } catch (Exception e) { logger.error("scanRows failed", e); } finally { rowstmp.close(); } return rows; }
public String[] getTags(String objectType, String objectId) throws IOException { List<String> ret = new ArrayList<String>(); String rowKey = objectType + "_" + objectId; Scan s = new Scan(rowKey.getBytes(), rowKey.getBytes()); s.setMaxVersions(1); ResultScanner scanner = htable.getScanner(s); try { for (Result rr = scanner.next(); rr != null; rr = scanner.next()) { String localObjectType = new String(rr.getValue("tags".getBytes(), "OBJECTTYPE".getBytes())); String localObjectId = new String(rr.getValue("tags".getBytes(), "OBJECTID".getBytes())); NavigableMap<byte[], byte[]> map = rr.getFamilyMap("tags".getBytes()); Iterator<Entry<byte[], byte[]>> it = map.entrySet().iterator(); while (it.hasNext()) { Entry<byte[], byte[]> entry = it.next(); String key = new String(entry.getKey()); if (!key.startsWith("OBJECT")) { int val = Bytes.toInt(entry.getValue()); if (val > 0) ret.add(key); } } } } finally { scanner.close(); } return ret.toArray(new String[] {}); }
/* * Add to each of the regions in .META. a value. Key is the startrow of the * region (except its 'aaa' for first region). Actual value is the row name. * @param expected * @return * @throws IOException */ private static int addToEachStartKey(final int expected) throws IOException { HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); HTable meta = new HTable(TEST_UTIL.getConfiguration(), HConstants.META_TABLE_NAME); int rows = 0; Scan scan = new Scan(); scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); ResultScanner s = meta.getScanner(scan); for (Result r = null; (r = s.next()) != null; ) { byte[] b = r.getValue(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); if (b == null || b.length <= 0) break; HRegionInfo hri = Writables.getHRegionInfo(b); // If start key, add 'aaa'. byte[] row = getStartKey(hri); Put p = new Put(row); p.setWriteToWAL(false); p.add(getTestFamily(), getTestQualifier(), row); t.put(p); rows++; } s.close(); Assert.assertEquals(expected, rows); t.close(); meta.close(); return rows; }
/** * Looks at every value of the mapreduce output and verifies that indeed the values have been * reversed. * * @param table Table to scan. * @throws IOException * @throws NullPointerException if we failed to find a cell value */ private void verifyAttempt(final Table table) throws IOException, NullPointerException { Scan scan = new Scan(); scan.addFamily(INPUT_FAMILY); scan.addFamily(OUTPUT_FAMILY); ResultScanner scanner = table.getScanner(scan); try { Iterator<Result> itr = scanner.iterator(); assertTrue(itr.hasNext()); while (itr.hasNext()) { Result r = itr.next(); if (LOG.isDebugEnabled()) { if (r.size() > 2) { throw new IOException("Too many results, expected 2 got " + r.size()); } } byte[] firstValue = null; byte[] secondValue = null; int count = 0; for (Cell kv : r.listCells()) { if (count == 0) { firstValue = CellUtil.cloneValue(kv); } else if (count == 1) { secondValue = CellUtil.cloneValue(kv); } else if (count == 2) { break; } count++; } String first = ""; if (firstValue == null) { throw new NullPointerException(Bytes.toString(r.getRow()) + ": first value is null"); } first = Bytes.toString(firstValue); String second = ""; if (secondValue == null) { throw new NullPointerException(Bytes.toString(r.getRow()) + ": second value is null"); } byte[] secondReversed = new byte[secondValue.length]; for (int i = 0, j = secondValue.length - 1; j >= 0; j--, i++) { secondReversed[i] = secondValue[j]; } second = Bytes.toString(secondReversed); if (first.compareTo(second) != 0) { if (LOG.isDebugEnabled()) { LOG.debug( "second key is not the reverse of first. row=" + Bytes.toStringBinary(r.getRow()) + ", first value=" + first + ", second value=" + second); } fail(); } } } finally { scanner.close(); } }
public void close() throws IOException { if (null != rs) { rs.close(); } if (null != htable) { htable.close(); } }
private void verifyRowCount(Table table, int expectedRegionNum) throws IOException { ResultScanner scanner = table.getScanner(new Scan()); int rowCount = 0; while (scanner.next() != null) { rowCount++; } assertEquals(expectedRegionNum, rowCount); scanner.close(); }
// Get all events with exit at last location public static Map<Integer, VesselEvent> getAllEventsStartBeforeEndAfterBeforeLocation( Table VTEvent_Table, String IMO_str, VesselLocation location) throws IOException { Scan getAllEventsWithExistAtLastLocation = new Scan(); getAllEventsWithExistAtLastLocation .setStartRow( Bytes.toBytes( IMO_str + LpadNum(Long.MAX_VALUE - location.recordtime, 19) + "0000000000")) .setStopRow(Bytes.toBytes(IMO_str + LpadNum(Long.MAX_VALUE, 19) + "9999999999")) .addColumn(details, exittime); getAllEventsWithExistAtLastLocation.setCaching(100); Filter ExistTimeValuefilter = new ValueFilter( CompareFilter.CompareOp.GREATER_OR_EQUAL, new BinaryComparator( Bytes.toBytes(new DateTime(location.recordtime).toString(rawformatter)))); getAllEventsWithExistAtLastLocation.setFilter(ExistTimeValuefilter); ResultScanner Result_event = VTEvent_Table.getScanner(getAllEventsWithExistAtLastLocation); Map<Integer, VesselEvent> events = new HashMap<Integer, VesselEvent>(); for (Result res : Result_event) { Get get = new Get(res.getRow()); get.addColumn(details, entrytime); get.addColumn(details, entrycoordinates); Result result = VTEvent_Table.get(get); String rowkey = Bytes.toString(result.getRow()); String polygonid = rowkey.substring(26); VesselEvent VE = new VesselEvent(); VE.exittime = location.recordtime; VE.exitcoordinates = location.coordinates; VE.destination = location.destination; VE.polygonid = Integer.parseInt(polygonid); for (Cell cell : result.rawCells()) { String Qualifier = Bytes.toString(CellUtil.cloneQualifier(cell)); String Value = Bytes.toString(CellUtil.cloneValue(cell)); if (Qualifier.equals("entertime")) { VE.entrytime = DateTime.parse(Value, rawformatter).getMillis(); } else if (Qualifier.equals("entercoordinates")) { VE.entrycoordinates = Value; } } events.put(VE.polygonid, VE); } Result_event.close(); return events; }
public static void main(String[] args) throws IOException { Configuration conf = HBaseConfiguration.create(); HBaseHelper helper = HBaseHelper.getHelper(conf); helper.dropTable("testtable"); helper.createTable("testtable", "colfam1", "colfam2", "colfam3", "colfam4"); System.out.println("Adding rows to table..."); helper.fillTable("testtable", 1, 10, 2, "colfam1", "colfam2", "colfam3", "colfam4"); HTable table = new HTable(conf, "testtable"); // vv FamilyFilterExample Filter filter1 = new FamilyFilter( CompareFilter.CompareOp .LESS, // co FamilyFilterExample-1-Filter Create filter, while specifying the // comparison operator and comparator. new BinaryComparator(Bytes.toBytes("colfam3"))); Scan scan = new Scan(); scan.setFilter(filter1); ResultScanner scanner = table.getScanner( scan); // co FamilyFilterExample-2-Scan Scan over table while applying the filter. // ^^ FamilyFilterExample System.out.println("Scanning table... "); // vv FamilyFilterExample for (Result result : scanner) { System.out.println(result); } scanner.close(); Get get1 = new Get(Bytes.toBytes("row-5")); get1.setFilter(filter1); Result result1 = table.get(get1); // co FamilyFilterExample-3-Get Get a row while applying the same filter. System.out.println("Result of get(): " + result1); Filter filter2 = new FamilyFilter( CompareFilter.CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("colfam3"))); Get get2 = new Get( Bytes.toBytes( "row-5")); // co FamilyFilterExample-4-Mismatch Create a filter on one column family // while trying to retrieve another. get2.addFamily(Bytes.toBytes("colfam1")); get2.setFilter(filter2); Result result2 = table.get( get2); // co FamilyFilterExample-5-Get2 Get the same row while applying the new filter, // this will return "NONE". System.out.println("Result of get(): " + result2); // ^^ FamilyFilterExample }
/* * @return Count of rows in TABLENAME * @throws IOException */ private static int count() throws IOException { HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); int rows = 0; Scan scan = new Scan(); ResultScanner s = t.getScanner(scan); for (Result r = null; (r = s.next()) != null; ) { rows++; } s.close(); LOG.info("Counted=" + rows); return rows; }
@Override public void tweet(User user, String tweetText) throws IOException { final long epoch = System.currentTimeMillis(); final long tranposeEpoch = Long.MAX_VALUE - epoch; final byte[] epochBytes = Bytes.toBytes(epoch); final byte[] tweetBytes = Bytes.toBytes(tweetText); byte[] nameBytes = Bytes.toBytes(user.getName()); /** put tweet into tweets */ Put tweetRowPut = new Put(generateTweetId(user)); tweetRowPut.add(_DEFAULT, _NAME, nameBytes); tweetRowPut.add(_DEFAULT, _MAIL, Bytes.toBytes(user.getEmail())); tweetRowPut.add(_DEFAULT, _TWEET, tweetBytes); tweetRowPut.add(_DEFAULT, _TIME, epochBytes); tweetsTable.put(tweetRowPut); /** put tweets for followers */ Scan followerScan = new Scan(); followerScan.setStartRow(Bytes.toBytes(user.getUserId() + "-")); followerScan.setStopRow(Bytes.toBytes((user.getUserId() + 1) + "-")); ResultScanner followerRS = followersTable.getScanner(followerScan); /** put users on tweet to her own tweetline */ Put put = new Put(Bytes.toBytes(user.getUserId() + "-" + tranposeEpoch + "-" + user.getUserId())); put.add(_DEFAULT, _NAME, nameBytes); put.add(_DEFAULT, _TWEET, tweetBytes); put.add(_DEFAULT, _TIME, epochBytes); List<Row> puts = new ArrayList<Row>(); puts.add(put); for (Result result : followerRS) { Long followerid = Bytes.toLong(result.getColumnLatest(_DEFAULT, _USERID).getValue()); put = new Put(Bytes.toBytes(followerid + "-" + tranposeEpoch + "-" + user.getUserId())); put.add(_DEFAULT, _NAME, nameBytes); put.add(_DEFAULT, _TWEET, tweetBytes); put.add(_DEFAULT, _TIME, epochBytes); puts.add(put); } followerRS.close(); try { tweetlineTable.batch(puts); } catch (InterruptedException e) { e.printStackTrace(); // @TODO log and handle properly. } }
/** * 过滤行键 * * @throws IOException */ @Test public void filterQualifier() throws IOException { Scan scan = new Scan(); // 创建一个列过滤器 列名等于thumbUrl 应该打印出row2 Filter filter1 = new QualifierFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("thumbUrl"))); scan.setFilter(filter1); ResultScanner rs1 = table.getScanner(scan); for (Result result : rs1) { System.out.println("filter1:" + result); } rs1.close(); Filter filter2 = new QualifierFilter(CompareOp.EQUAL, new SubstringComparator("a")); scan.setFilter(filter2); ResultScanner rs2 = table.getScanner(scan); for (Result result : rs2) { System.out.println("filter2:" + result); } rs2.close(); }
public static void main(String[] args) throws IOException { Configuration conf = HBaseConfiguration.create(); HBaseHelper helper = HBaseHelper.getHelper(conf); helper.dropTable("testtable"); helper.createTable("testtable", "colfam1", "colfam2"); System.out.println("Adding rows to table..."); helper.fillTable("testtable", 1, 10, 10, "colfam1", "colfam2"); Connection connection = ConnectionFactory.createConnection(conf); Table table = connection.getTable(TableName.valueOf("testtable")); // vv SingleColumnValueFilterExample SingleColumnValueFilter filter = new SingleColumnValueFilter( Bytes.toBytes("colfam1"), Bytes.toBytes("col-5"), CompareFilter.CompareOp.NOT_EQUAL, new SubstringComparator("val-5")); filter.setFilterIfMissing(true); Scan scan = new Scan(); scan.setFilter(filter); ResultScanner scanner = table.getScanner(scan); // ^^ SingleColumnValueFilterExample System.out.println("Results of scan:"); // vv SingleColumnValueFilterExample for (Result result : scanner) { for (Cell cell : result.rawCells()) { System.out.println( "Cell: " + cell + ", Value: " + Bytes.toString( cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); } } scanner.close(); Get get = new Get(Bytes.toBytes("row-6")); get.setFilter(filter); Result result = table.get(get); System.out.println("Result of get: "); for (Cell cell : result.rawCells()) { System.out.println( "Cell: " + cell + ", Value: " + Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); } // ^^ SingleColumnValueFilterExample }
@BeforeClass public static void setUpBeforeClass() throws Exception { // Start up our mini cluster on top of an 0.92 root.dir that has data from // a 0.92 hbase run -- it has a table with 100 rows in it -- and see if // we can migrate from 0.92 TEST_UTIL.startMiniZKCluster(); TEST_UTIL.startMiniDFSCluster(1); Path testdir = TEST_UTIL.getDataTestDir("TestMetaMigrationConvertToPB"); // Untar our test dir. File untar = untar(new File(testdir.toString())); // Now copy the untar up into hdfs so when we start hbase, we'll run from it. Configuration conf = TEST_UTIL.getConfiguration(); FsShell shell = new FsShell(conf); FileSystem fs = FileSystem.get(conf); // find where hbase will root itself, so we can copy filesystem there Path hbaseRootDir = TEST_UTIL.getDefaultRootDirPath(); if (!fs.isDirectory(hbaseRootDir.getParent())) { // mkdir at first fs.mkdirs(hbaseRootDir.getParent()); } doFsCommand(shell, new String[] {"-put", untar.toURI().toString(), hbaseRootDir.toString()}); // windows fix: tgz file has .META. directory renamed as -META- since the original is an illegal // name under windows. So we rename it back. See // src/test/data//TestMetaMigrationConvertingToPB.README and // https://issues.apache.org/jira/browse/HBASE-6821 doFsCommand( shell, new String[] { "-mv", new Path(hbaseRootDir, "-META-").toString(), new Path(hbaseRootDir, ".META.").toString() }); // See whats in minihdfs. doFsCommand(shell, new String[] {"-lsr", "/"}); TEST_UTIL.startMiniHBaseCluster(1, 1); // Assert we are running against the copied-up filesystem. The copied-up // rootdir should have had a table named 'TestTable' in it. Assert it // present. HTable t = new HTable(TEST_UTIL.getConfiguration(), TESTTABLE); ResultScanner scanner = t.getScanner(new Scan()); int count = 0; while (scanner.next() != null) { count++; } // Assert that we find all 100 rows that are in the data we loaded. If // so then we must have migrated it from 0.90 to 0.92. Assert.assertEquals(ROW_COUNT, count); scanner.close(); t.close(); }
public static VesselLocation getLocationBefore( Table VTLocation_Table, String IMO_str, long timestamp) throws IOException { Scan getLastLocation = new Scan(); getLastLocation.setStartRow( Bytes.toBytes(IMO_str + LpadNum(Long.MAX_VALUE - timestamp + 1, 19))); getLastLocation.setMaxResultSize(1); ResultScanner Result_LastLocation = VTLocation_Table.getScanner(getLastLocation); for (Result res : Result_LastLocation) { VesselLocation VL = new VesselLocation(); for (Cell cell : res.rawCells()) { String Qualifier = Bytes.toString(CellUtil.cloneQualifier(cell)); String Value = Bytes.toString(CellUtil.cloneValue(cell)); if (Qualifier.equals("coordinates")) { VL.coordinates = Value; } else if (Qualifier.equals("speed")) { VL.speed = Value; } else if (Qualifier.equals("destination")) { VL.destination = Value; } else if (Qualifier.equals("timestamp")) { VL.recordtime = DateTime.parse(Value, rawformatter).getMillis(); } else if (Qualifier.equals("previouslocation")) { VL.previouslocation = Value; } else if (Qualifier.equals("nextlocation")) { VL.nextlocation = Value; } } Result_LastLocation.close(); return VL; } Result_LastLocation.close(); return null; }
public static void main(String[] args) throws IOException { Configuration conf = HBaseClientHelper.loadDefaultConfiguration(); Connection connection = ConnectionFactory.createConnection(conf); try { Table table = connection.getTable(TableName.valueOf("testtable")); try { // 1 Put Put p = new Put(Bytes.toBytes("row1")); p.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1"), Bytes.toBytes("val1")); table.put(p); // 2 Get Get g = new Get(Bytes.toBytes("row1")); Result r = table.get(g); byte[] value = r.getValue(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1")); String valueStr = Bytes.toString(value); System.out.println("GET: " + valueStr); // 3 Scan Scan s = new Scan(); s.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1")); ResultScanner scanner = table.getScanner(s); try { for (Result rr = scanner.next(); rr != null; rr = scanner.next()) { System.out.println("Found row: " + rr); } // The other approach is to use a foreach loop. Scanners are // iterable! // for (Result rr : scanner) { // System.out.println("Found row: " + rr); // } } finally { scanner.close(); } // Close your table and cluster connection. } finally { if (table != null) table.close(); } } finally { connection.close(); } }
/** Return the number of rows in the given table. */ public static int countMobRows(final Table table) throws IOException { Scan scan = new Scan(); ResultScanner results = table.getScanner(scan); int count = 0; for (Result res : results) { count++; List<Cell> cells = res.listCells(); for (Cell cell : cells) { // Verify the value Assert.assertTrue(CellUtil.cloneValue(cell).length > 0); } } results.close(); return count; }
@Test // HBase-3583 public void testHBase3583() throws IOException { TableName tableName = TableName.valueOf("testHBase3583"); util.createTable(tableName, new byte[][] {A, B, C}); util.waitUntilAllRegionsAssigned(tableName); verifyMethodResult( SimpleRegionObserver.class, new String[] {"hadPreGet", "hadPostGet", "wasScannerNextCalled", "wasScannerCloseCalled"}, tableName, new Boolean[] {false, false, false, false}); HTable table = new HTable(util.getConfiguration(), tableName); Put put = new Put(ROW); put.add(A, A, A); table.put(put); Get get = new Get(ROW); get.addColumn(A, A); table.get(get); // verify that scannerNext and scannerClose upcalls won't be invoked // when we perform get(). verifyMethodResult( SimpleRegionObserver.class, new String[] {"hadPreGet", "hadPostGet", "wasScannerNextCalled", "wasScannerCloseCalled"}, tableName, new Boolean[] {true, true, false, false}); Scan s = new Scan(); ResultScanner scanner = table.getScanner(s); try { for (Result rr = scanner.next(); rr != null; rr = scanner.next()) {} } finally { scanner.close(); } // now scanner hooks should be invoked. verifyMethodResult( SimpleRegionObserver.class, new String[] {"wasScannerNextCalled", "wasScannerCloseCalled"}, tableName, new Boolean[] {true, true}); util.deleteTable(tableName); table.close(); }
public static void updateExistingEventsToEndAtLastLocation( Table VTEvent_Table, long imo, VesselLocation lastlocation) throws IOException { // update existing events that started BEFORE the first new location and end after the first // to end as the last location // Find existing events that started BEFORE the first new location and end after the first Scan getEventStartedBeforeAndEndAfter = new Scan(); ; getEventStartedBeforeAndEndAfter .setStartRow( Bytes.toBytes( LpadNum(imo, 7) + LpadNum(Long.MAX_VALUE - lastlocation.recordtime, 19) + "0000000000")) .setStopRow(Bytes.toBytes(LpadNum(imo, 7) + LpadNum(Long.MAX_VALUE, 19) + "9999999999")) .addColumn(details, exittime); getEventStartedBeforeAndEndAfter.setCaching(100); Filter ExistTimeValuefilter = new ValueFilter( CompareFilter.CompareOp.GREATER, new BinaryComparator( Bytes.toBytes(new DateTime(lastlocation.recordtime).toString(rawformatter)))); getEventStartedBeforeAndEndAfter.setFilter(ExistTimeValuefilter); ResultScanner Result_eventcross = VTEvent_Table.getScanner(getEventStartedBeforeAndEndAfter); List<Put> puts = new ArrayList<Put>(); for (Result res : Result_eventcross) { // vessel event table // rowkey: imo(7)+timestamp(19 desc)+polygonid(8) // qualifier:entrytime,entrycoordinates,exittime,exitcoordinates,destination byte[] rowkey = res.getRow(); Put updateevent = new Put(rowkey); updateevent.addColumn( details, exittime, Bytes.toBytes(new DateTime(lastlocation.recordtime).toString(rawformatter))); updateevent.addColumn(details, coordinates, Bytes.toBytes(lastlocation.coordinates)); updateevent.addColumn(details, destination, Bytes.toBytes(lastlocation.destination)); puts.add(updateevent); } Result_eventcross.close(); VTEvent_Table.put(puts); }
/* * 遍历查询hbase表 * * @tableName 表名 */ public static void getResultScann(String tableName) throws IOException { Scan scan = new Scan(); ResultScanner rs = null; HTableInterface table = conn.getTable(tableName); try { // String split = StringUtils.S001; // QualifierFilter ff = new QualifierFilter(CompareOp.EQUAL, new BinaryComparator( // Bytes.toBytes("A"))); // scan.setFilter(ff); rs = table.getScanner(scan); int count = 0; for (Result r : rs) { count++; for (KeyValue kv : r.list()) { System.out.println("row:" + Bytes.toString(kv.getRow())); // System.out.println("family:" + Bytes.toString(kv.getFamily())); // System.out.println("qualifier:" + Bytes.toString(kv.getQualifier())); System.out.println("value:" + Bytes.toString(kv.getValue())); // // System.out.println("timestamp:" + kv.getTimestamp()); // StringBuilder sb = new StringBuilder(); // sb.append(Bytes.toString(r.getRow())); // sb.append(split); // sb.append(Bytes.toString(kv.getValue())); // EntBaseinfo baseInfo = new EntBaseinfo(); // baseInfo.parseFromString(sb.toString()); // System.out.println(baseInfo.getENTNAME()); // if(baseInfo.getNAME()!=null&&baseInfo.getNAME().isEmpty()){ // System.out.println(baseInfo.getENTNAME()); // } // // // if(baseInfo.getDOM()!=null&&baseInfo.getNAME().isEmpty()){ // System.out.println(baseInfo.getENTNAME()); // } } if (count > 1000) { return; } } } finally { rs.close(); } }
public boolean hasNextComment() { if ((resultIterator == null) || (!resultIterator.hasNext())) { if (scanner != null) { scanner.close(); } return false; } else { Result row = resultIterator.next(); NavigableMap<byte[], byte[]> map = row.getFamilyMap(Bytes.toBytes("comment")); keyStr = Bytes.toString(row.getRow()); commentAuthor = Bytes.toString(map.get(Bytes.toBytes("comment_author"))); commentBody = Bytes.toString(map.get(Bytes.toBytes("comment_body"))); Long timeOfComment = Long.parseLong(keyStr.substring(keyStr.lastIndexOf(",") + 1)); dateOfComment = new Date(timeOfComment); return true; } }
private void loadIPs() { dns = new HashMap(100000000); // ���貢�� unknownHosts = new HashMap(1000000); querying = new HashMap(100000); try { int statsCommit = 500000; HConnection connection = HConnectionManager.createConnection(HBaseConfiguration.create()); HTableInterface fetchFailTable = connection.getTable("fetchFail"); Scan scan = new Scan(); scan.setCaching(statsCommit); List<Filter> filters = new ArrayList<Filter>(); Filter filter = new ColumnPrefixFilter(Bytes.toBytes("ip")); filters.add(filter); FilterList filterList = new FilterList(filters); scan.setFilter(filterList); ResultScanner rs = fetchFailTable.getScanner(scan); long cnt = 0; for (Result r : rs) { NavigableMap<byte[], byte[]> map = r.getFamilyMap(Bytes.toBytes("cf")); String ip = Bytes.toString(map.get(Bytes.toBytes("ip"))); String host = Bytes.toString(r.getRow()).split("��")[0]; if (host != null && ip != null) { dns.put(host, ip); } if (++cnt % statsCommit == 0) { LOG.info("loadIPs url=" + Bytes.toString(r.getRow()) + " cnt=" + cnt); } } rs.close(); fetchFailTable.close(); LOG.info("load hostip cache=" + dns.size()); connection.close(); } catch (Exception e) { e.printStackTrace(); } finally { // } }