private static Long latestVersion(Hashtable<String, String> config, dbutil db_util) throws Exception { if (!config.get("timestamp_stop").equals(Integer.toString(Integer.MAX_VALUE))) { return new Long(config.get("timestamp_stop")); } String rowName = config.get("file_id") + config.get("run_id") + "_"; if (config.get("task_id") != "") { try { rowName = rowName + String.format("%04d", new Integer(config.get("task_id"))); } catch (NumberFormatException E) { rowName = rowName + config.get("task_id"); } } Get timestampGet = new Get(rowName.getBytes()); timestampGet.addColumn("d".getBytes(), "update".getBytes()); Result timestampResult = db_util.doGet(config.get("db_name_updates"), timestampGet); KeyValue tsKv = timestampResult.getColumnLatest("d".getBytes(), "update".getBytes()); if (tsKv == null) { rowName = config.get("file_id") + "_"; timestampGet = new Get(rowName.getBytes()); timestampGet.addColumn("d".getBytes(), "update".getBytes()); timestampResult = db_util.doGet(config.get("db_name_updates"), timestampGet); tsKv = timestampResult.getColumnLatest("d".getBytes(), "update".getBytes()); } if (tsKv == null) { return new Long(Integer.MAX_VALUE); } Long latestVersion = new Long(tsKv.getTimestamp()); return latestVersion; }
@Before public void setUp() throws Exception { super.setUp(); row1 = Bytes.toBytes("row1"); row2 = Bytes.toBytes("row2"); row3 = Bytes.toBytes("row3"); fam1 = Bytes.toBytes("fam1"); fam2 = Bytes.toBytes("fam2"); col1 = Bytes.toBytes("col1"); col2 = Bytes.toBytes("col2"); col3 = Bytes.toBytes("col3"); col4 = Bytes.toBytes("col4"); col5 = Bytes.toBytes("col5"); data = Bytes.toBytes("data"); // Create Get get = new Get(row1); get.addFamily(fam1); get.addColumn(fam2, col2); get.addColumn(fam2, col4); get.addColumn(fam2, col5); this.scan = new Scan(get); rowComparator = CellComparator.COMPARATOR; }
private static VesselTrackInfo getTrackInfo(Table TrackInfo_Table, String IMO_str) throws IOException { Get get = new Get(Bytes.toBytes(IMO_str)); get.addColumn(details, lastlocation); get.addColumn(details, firstrecordtime); get.addColumn(details, lastrecordtime); Result result = TrackInfo_Table.get(get); byte[] last_location = result.getValue(details, lastlocation); byte[] fist_recordtime = result.getValue(details, firstrecordtime); byte[] last_recordtime = result.getValue(details, lastrecordtime); VesselTrackInfo trackinfo = new VesselTrackInfo(); trackinfo.LastLocation = last_location; if (fist_recordtime != null) { trackinfo.FirstRecordTime = DateTime.parse(Bytes.toString(fist_recordtime), rawformatter).getMillis(); } if (last_recordtime != null) { trackinfo.LastRecordTime = DateTime.parse(Bytes.toString(last_recordtime), rawformatter).getMillis(); } return trackinfo; }
// Get all events with exit at last location public static Map<Integer, VesselEvent> getAllEventsStartBeforeEndAfterBeforeLocation( Table VTEvent_Table, String IMO_str, VesselLocation location) throws IOException { Scan getAllEventsWithExistAtLastLocation = new Scan(); getAllEventsWithExistAtLastLocation .setStartRow( Bytes.toBytes( IMO_str + LpadNum(Long.MAX_VALUE - location.recordtime, 19) + "0000000000")) .setStopRow(Bytes.toBytes(IMO_str + LpadNum(Long.MAX_VALUE, 19) + "9999999999")) .addColumn(details, exittime); getAllEventsWithExistAtLastLocation.setCaching(100); Filter ExistTimeValuefilter = new ValueFilter( CompareFilter.CompareOp.GREATER_OR_EQUAL, new BinaryComparator( Bytes.toBytes(new DateTime(location.recordtime).toString(rawformatter)))); getAllEventsWithExistAtLastLocation.setFilter(ExistTimeValuefilter); ResultScanner Result_event = VTEvent_Table.getScanner(getAllEventsWithExistAtLastLocation); Map<Integer, VesselEvent> events = new HashMap<Integer, VesselEvent>(); for (Result res : Result_event) { Get get = new Get(res.getRow()); get.addColumn(details, entrytime); get.addColumn(details, entrycoordinates); Result result = VTEvent_Table.get(get); String rowkey = Bytes.toString(result.getRow()); String polygonid = rowkey.substring(26); VesselEvent VE = new VesselEvent(); VE.exittime = location.recordtime; VE.exitcoordinates = location.coordinates; VE.destination = location.destination; VE.polygonid = Integer.parseInt(polygonid); for (Cell cell : result.rawCells()) { String Qualifier = Bytes.toString(CellUtil.cloneQualifier(cell)); String Value = Bytes.toString(CellUtil.cloneValue(cell)); if (Qualifier.equals("entertime")) { VE.entrytime = DateTime.parse(Value, rawformatter).getMillis(); } else if (Qualifier.equals("entercoordinates")) { VE.entrycoordinates = Value; } } events.put(VE.polygonid, VE); } Result_event.close(); return events; }
public boolean fetchReadAllFieldLine(List<Line> lines, LineSender sender) throws IOException { if (null == this.rs) { throw new IllegalStateException("HBase Client try to fetch data failed ."); } for (Result result = rs.next(); result != null; result = rs.next()) { Get get = new Get(result.getRow()); for (int i = 0; i < this.families.length; i++) { get.addColumn(this.families[i].getBytes(), this.columns[i].getBytes()); } gets.add(get); if (gets.size() > this.BUFFER_LINE) { Result[] getResults = this.htable.get(gets); for (Result resu : getResults) { if (null != resu) { Line line = sender.createLine(); for (int i = 0; i < this.families.length; i++) { byte[] value = resu.getValue(this.families[i].getBytes(), this.columns[i].getBytes()); if (null == value) { line.addField(null); } else { line.addField(new String(value, encode)); } } line.addField(new String(resu.getRow(), encode)); } } return true; } } return false; }
@TimeDepend @Test public void testGetWith_Ts() throws Exception { recreateTable(); fillData(); Get get = new Get(rowKey_ForTest); get.addColumn(ColumnFamilyNameBytes, QName1); get.setMaxVersions(3); get.setTimeStamp(3L); Result result = table.get(get); Assert.assertEquals(1, result.raw().length); get.setTimeStamp(2L); result = table.get(get); Assert.assertEquals(1, result.raw().length); get.setTimeStamp(1L); result = table.get(get); Assert.assertEquals(1, result.raw().length); get.setTimeStamp(0L); result = table.get(get); Assert.assertEquals(0, result.raw().length); get.setTimeRange(1, 4); result = table.get(get); Assert.assertEquals(3, result.raw().length); recreateTable(); }
@Override public void setLogPosition(String queueId, String filename, long position) { try { byte[] rowKey = queueIdToRowKey(queueId); // Check that the log exists. addLog() must have been called before setLogPosition(). Get checkLogExists = new Get(rowKey); checkLogExists.addColumn(CF_QUEUE, Bytes.toBytes(filename)); if (!replicationTable.exists(checkLogExists)) { String errMsg = "Could not set position of non-existent log from queueId=" + queueId + ", filename=" + filename; abortable.abort(errMsg, new ReplicationException(errMsg)); return; } // Update the log offset if it exists Put walAndOffset = new Put(rowKey); walAndOffset.addColumn(CF_QUEUE, Bytes.toBytes(filename), Bytes.toBytes(position)); safeQueueUpdate(walAndOffset); } catch (IOException | ReplicationException e) { String errMsg = "Failed writing log position queueId=" + queueId + "filename=" + filename + " position=" + position; abortable.abort(errMsg, e); } }
public void verifyInvocationResults(Integer[] selectQualifiers, Integer[] expectedQualifiers) throws Exception { Get get = new Get(ROW_BYTES); for (int i = 0; i < selectQualifiers.length; i++) { get.addColumn(FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + selectQualifiers[i])); } get.setFilter(new InvocationRecordFilter()); List<KeyValue> expectedValues = new ArrayList<KeyValue>(); for (int i = 0; i < expectedQualifiers.length; i++) { expectedValues.add( new KeyValue( ROW_BYTES, FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + expectedQualifiers[i]), expectedQualifiers[i], Bytes.toBytes(VALUE_PREFIX + expectedQualifiers[i]))); } Scan scan = new Scan(get); List<Cell> actualValues = new ArrayList<Cell>(); List<Cell> temp = new ArrayList<Cell>(); InternalScanner scanner = this.region.getScanner(scan); while (scanner.next(temp)) { actualValues.addAll(temp); temp.clear(); } actualValues.addAll(temp); Assert.assertTrue( "Actual values " + actualValues + " differ from the expected values:" + expectedValues, expectedValues.equals(actualValues)); }
@Test public void testSaveJsonFormat() throws AvroBaseException, IOException { AvroBase<User, byte[]> userHAB = AvroBaseFactory.createAvroBase(new HABModule(), HAB.class, AvroFormat.JSON); User saved = new User(); saved.firstName = $("Sam"); saved.lastName = $("Pullara"); saved.birthday = $("1212"); saved.gender = GenderType.MALE; saved.email = $("*****@*****.**"); saved.description = $("CTO of RightTime, Inc. and one of the founders of BagCheck"); saved.title = $("Engineer"); saved.image = $("http://farm1.static.flickr.com/1/buddyicons/[email protected]"); saved.location = $("Los Altos, CA"); saved.mobile = $("4155551212"); saved.password = ByteBuffer.wrap($("").getBytes()); byte[] row = Bytes.toBytes("spullara"); userHAB.put(row, saved); Row<User, byte[]> loaded = userHAB.get(row); assertEquals(saved, loaded.value); HTablePool pool = new HTablePool(); HTableInterface table = pool.getTable(TABLE); try { Get get = new Get(row); byte[] DATA = Bytes.toBytes("d"); get.addColumn(COLUMN_FAMILY, DATA); Result result = table.get(get); assertTrue(Bytes.toString(result.getValue(COLUMN_FAMILY, DATA)).startsWith("{")); } finally { pool.putTable(table); } }
/* 根据ROW KEY集合获取GET对象集合 */ private static List<Get> getList( List<byte[]> rowList, byte[] cf, String[] selectArray, String tableName) { List<Get> list = new LinkedList<Get>(); for (byte[] row : rowList) { Get get = new Get(row); for (String select : selectArray) { String temp[] = processField(select, Bytes.toString(cf)); if ("ca_summary_optimize".equals(tableName) && compoundFieldMap.containsKey(temp[1])) { get.addColumn(Bytes.toBytes(temp[0]), Bytes.toBytes(compoundFieldMap.get(temp[1]))); } else { get.addColumn(Bytes.toBytes(temp[0]), Bytes.toBytes(temp[1])); } } list.add(get); } return list; }
/** * Read a record from the database. Each field/value pair from the result will be stored in a * HashMap. * * @param table The name of the table * @param key The record key of the record to read. * @param fields The list of fields to read, or null for all of them * @param result A HashMap of field/value pairs for the result * @return Zero on success, a non-zero error code on error */ public Status read( String table, String key, Set<String> fields, HashMap<String, ByteIterator> result) { // if this is a "new" table, init HTable object. Else, use existing one if (!tableName.equals(table)) { currentTable = null; try { getHTable(table); tableName = table; } catch (IOException e) { System.err.println("Error accessing HBase table: " + e); return Status.ERROR; } } Result r = null; try { if (debug) { System.out.println("Doing read from HBase columnfamily " + columnFamily); System.out.println("Doing read for key: " + key); } Get g = new Get(Bytes.toBytes(key)); if (fields == null) { g.addFamily(columnFamilyBytes); } else { for (String field : fields) { g.addColumn(columnFamilyBytes, Bytes.toBytes(field)); } } r = currentTable.get(g); } catch (IOException e) { if (debug) { System.err.println("Error doing get: " + e); } return Status.ERROR; } catch (ConcurrentModificationException e) { // do nothing for now...need to understand HBase concurrency model better return Status.ERROR; } if (r.isEmpty()) { return Status.NOT_FOUND; } while (r.advance()) { final Cell c = r.current(); result.put( Bytes.toString(CellUtil.cloneQualifier(c)), new ByteArrayByteIterator(CellUtil.cloneValue(c))); if (debug) { System.out.println( "Result for field: " + Bytes.toString(CellUtil.cloneQualifier(c)) + " is: " + Bytes.toString(CellUtil.cloneValue(c))); } } return Status.OK; }
private static String getVesselType(Table Vessel_Table, String IMO_str) throws IOException { Get get = new Get(Bytes.toBytes(IMO_str)); get.addColumn(ves, TYPE); Result result = Vessel_Table.get(get); byte[] type = result.getValue(ves, TYPE); return Bytes.toString(type); }
public static HBaseTuple getHBaseTuple(String row, String colFamily, String colName) throws IOException { Configuration conf = getConfiguration(); HTable table1 = new HTable(conf, "table1"); Get get = new Get(Bytes.toBytes(row)); get.addFamily(Bytes.toBytes(colFamily)); get.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(colName)); Result result = table1.get(get); return getHBaseTuple(result); }
private void addFieldsToGet(Get get, List<FieldType> fields) { if (fields != null && (!fields.isEmpty())) { for (FieldType field : fields) { get.addColumn(RecordCf.DATA.bytes, ((FieldTypeImpl) field).getQualifier()); } RecordDecoder.addSystemColumnsToGet(get); } else { // Retrieve everything get.addFamily(RecordCf.DATA.bytes); } }
@Override public Row get() { if (row == null) { if (searchRow != null) { Result r; try { Get get = new Get(HBaseUtils.toBytes(searchRow.getRowKey())); get.setTimeStamp(searchTimestamp); if (columns != null) { for (Column c : columns) { if (c.isRowKeyColumn()) continue; else if (c.getColumnFamilyName() != null) get.addColumn(c.getColumnFamilyNameAsBytes(), c.getNameAsBytes()); else get.addColumn(defaultColumnFamilyName, c.getNameAsBytes()); } } r = secondaryIndex.dataTable.get(get); } catch (IOException e) { throw DbException.convert(e); } if (r != null) { Value[] data = new Value[columns.size()]; Value rowKey = ValueString.get(Bytes.toString(r.getRow())); if (columns != null) { int i = 0; for (Column c : columns) { i = c.getColumnId(); if (c.isRowKeyColumn()) data[i] = rowKey; else data[i] = HBaseUtils.toValue( // r.getValue(c.getColumnFamilyNameAsBytes(), c.getNameAsBytes()), c.getType()); } } row = new HBaseRow(null, rowKey, data, Row.MEMORY_CALCULATE, r); } } } return row; }
private static void assertGet(byte[] row, byte[] familiy, byte[] qualifier, byte[] value) throws IOException { // run a get and see if the value matches Get get = new Get(row); get.addColumn(familiy, qualifier); Result result = region.get(get, null); assertEquals(1, result.size()); KeyValue kv = result.raw()[0]; byte[] r = kv.getValue(); assertTrue(Bytes.compareTo(r, value) == 0); }
/* * 查询表中的某一列 * * @tableName 表名 * * @rowKey rowKey */ public static void getResultByColumn( String tableName, String rowKey, String familyName, String columnName) throws IOException { HTable table = new HTable(conf, Bytes.toBytes(tableName)); Get get = new Get(Bytes.toBytes(rowKey)); get.addColumn(Bytes.toBytes(familyName), Bytes.toBytes(columnName)); // 获取指定列族和列修饰符对应的列 Result result = table.get(get); for (KeyValue kv : result.list()) { System.out.println("family:" + Bytes.toString(kv.getFamily())); System.out.println("qualifier:" + Bytes.toString(kv.getQualifier())); System.out.println("value:" + Bytes.toString(kv.getValue())); System.out.println("Timestamp:" + kv.getTimestamp()); System.out.println("-------------------------------------------"); } }
/** * Read a record from the database. Each field/value pair from the result will be stored in a * HashMap. * * @param table The name of the table * @param key The record key of the record to read. * @param fields The list of fields to read, or null for all of them * @param result A HashMap of field/value pairs for the result * @return Zero on success, a non-zero error code on error */ public int read( String table, String key, Set<String> fields, HashMap<String, ByteIterator> result) { // if this is a "new" table, init HTable object. Else, use existing one if (!_table.equals(table)) { _hTable = null; try { getHTable(table); _table = table; } catch (IOException e) { System.err.println("Error accessing HBase table: " + e); return ServerError; } } Result r = null; try { if (_debug) { System.out.println("Doing read from HBase columnfamily " + _columnFamily); System.out.println("Doing read for key: " + key); } Get g = new Get(Bytes.toBytes(key)); if (fields == null) { g.addFamily(_columnFamilyBytes); } else { for (String field : fields) { g.addColumn(_columnFamilyBytes, Bytes.toBytes(field)); } } r = _hTable.get(g); } catch (IOException e) { System.err.println("Error doing get: " + e); return ServerError; } catch (ConcurrentModificationException e) { // do nothing for now...need to understand HBase concurrency model better return ServerError; } for (KeyValue kv : r.raw()) { result.put(Bytes.toString(kv.getQualifier()), new ByteArrayByteIterator(kv.getValue())); if (_debug) { System.out.println( "Result for field: " + Bytes.toString(kv.getQualifier()) + " is: " + Bytes.toString(kv.getValue())); } } return Ok; }
@Override public Get getGet(byte[] row) { Get get = new Get(row); for (Entry<byte[], NavigableSet<byte[]>> familyMapEntry : familyMap.entrySet()) { byte[] columnFamily = familyMapEntry.getKey(); if (familyMapEntry.getValue() == null) { get.addFamily(columnFamily); } else { for (byte[] qualifier : familyMapEntry.getValue()) { get.addColumn(columnFamily, qualifier); } } } return get; }
/** * Instantiate with {@code FieldDefinitions}s and {@code DocumentExtractDefinition}s. * * @param fieldDefinitions define fields to be indexed * @param documentExtractDefinitions additional document extraction definitions */ public DefaultResultToSolrMapper( String indexerName, List<FieldDefinition> fieldDefinitions, List<DocumentExtractDefinition> documentExtractDefinitions) { extractors = Lists.newArrayList(); resultDocumentExtractors = Lists.newArrayList(); for (FieldDefinition fieldDefinition : fieldDefinitions) { ByteArrayExtractor byteArrayExtractor = ByteArrayExtractors.getExtractor( fieldDefinition.getValueExpression(), fieldDefinition.getValueSource()); ByteArrayValueMapper valueMapper = ByteArrayValueMappers.getMapper(fieldDefinition.getTypeName()); ConfigureUtil.configure(valueMapper, ConfigureUtil.mapToJson(fieldDefinition.getParams())); resultDocumentExtractors.add( new HBaseSolrDocumentExtractor( fieldDefinition.getName(), byteArrayExtractor, valueMapper)); extractors.add(byteArrayExtractor); } for (DocumentExtractDefinition extractDefinition : documentExtractDefinitions) { ByteArrayExtractor byteArrayExtractor = ByteArrayExtractors.getExtractor( extractDefinition.getValueExpression(), extractDefinition.getValueSource()); extractors.add(byteArrayExtractor); } Get get = newGet(); for (ByteArrayExtractor extractor : extractors) { byte[] columnFamily = extractor.getColumnFamily(); byte[] columnQualifier = extractor.getColumnQualifier(); if (columnFamily != null) { if (columnQualifier != null) { get.addColumn(columnFamily, columnQualifier); } else { get.addFamily(columnFamily); } } } familyMap = get.getFamilyMap(); mappingTimer = Metrics.newTimer( metricName(getClass(), "HBase Result to Solr mapping time", indexerName), TimeUnit.MILLISECONDS, TimeUnit.SECONDS); }
@Test // HBase-3583 public void testHBase3583() throws IOException { TableName tableName = TableName.valueOf("testHBase3583"); util.createTable(tableName, new byte[][] {A, B, C}); util.waitUntilAllRegionsAssigned(tableName); verifyMethodResult( SimpleRegionObserver.class, new String[] {"hadPreGet", "hadPostGet", "wasScannerNextCalled", "wasScannerCloseCalled"}, tableName, new Boolean[] {false, false, false, false}); HTable table = new HTable(util.getConfiguration(), tableName); Put put = new Put(ROW); put.add(A, A, A); table.put(put); Get get = new Get(ROW); get.addColumn(A, A); table.get(get); // verify that scannerNext and scannerClose upcalls won't be invoked // when we perform get(). verifyMethodResult( SimpleRegionObserver.class, new String[] {"hadPreGet", "hadPostGet", "wasScannerNextCalled", "wasScannerCloseCalled"}, tableName, new Boolean[] {true, true, false, false}); Scan s = new Scan(); ResultScanner scanner = table.getScanner(s); try { for (Result rr = scanner.next(); rr != null; rr = scanner.next()) {} } finally { scanner.close(); } // now scanner hooks should be invoked. verifyMethodResult( SimpleRegionObserver.class, new String[] {"wasScannerNextCalled", "wasScannerCloseCalled"}, tableName, new Boolean[] {true, true}); util.deleteTable(tableName); table.close(); }
/** * Transactional version of {@link HTable#get(Get)} * * @param transactionState Identifier of the transaction * @see HTable#get(Get) * @throws IOException */ public Result get(TransactionState transactionState, final Get get) throws IOException { final long readTimestamp = transactionState.getStartTimestamp(); final Get tsget = new Get(get.getRow()); TimeRange timeRange = get.getTimeRange(); long startTime = timeRange.getMin(); long endTime = Math.min(timeRange.getMax(), readTimestamp + 1); // int maxVersions = get.getMaxVersions(); tsget .setTimeRange(startTime, endTime) .setMaxVersions((int) (versionsAvg + CACHE_VERSIONS_OVERHEAD)); Map<byte[], NavigableSet<byte[]>> kvs = get.getFamilyMap(); for (Map.Entry<byte[], NavigableSet<byte[]>> entry : kvs.entrySet()) { byte[] family = entry.getKey(); NavigableSet<byte[]> qualifiers = entry.getValue(); if (qualifiers == null || qualifiers.isEmpty()) { tsget.addFamily(family); } else { for (byte[] qualifier : qualifiers) { tsget.addColumn(family, qualifier); } } } // Result result; // Result filteredResult; // do { // result = super.get(tsget); // filteredResult = filter(super.get(tsget), readTimestamp, maxVersions); // } while (!result.isEmpty() && filteredResult == null); getsPerformed++; Result result = filter( transactionState, super.get(tsget), readTimestamp, (int) (versionsAvg + CACHE_VERSIONS_OVERHEAD)); return result == null ? new Result() : result; // Scan scan = new Scan(get); // scan.setRetainDeletesInOutput(true); // ResultScanner rs = this.getScanner(transactionState, scan); // Result r = rs.next(); // if (r == null) { // r = new Result(); // } // return r; }
private void verifyData(Region newReg, int startRow, int numRows, byte[] qf, byte[]... families) throws IOException { for (int i = startRow; i < startRow + numRows; i++) { byte[] row = Bytes.toBytes("" + i); Get get = new Get(row); for (byte[] family : families) { get.addColumn(family, qf); } Result result = newReg.get(get); Cell[] raw = result.rawCells(); assertEquals(families.length, result.size()); for (int j = 0; j < families.length; j++) { assertTrue(CellUtil.matchingRow(raw[j], row)); assertTrue(CellUtil.matchingFamily(raw[j], families[j])); assertTrue(CellUtil.matchingQualifier(raw[j], qf)); } } }
private void doGets(HRegion region) throws IOException { for (int i = 0; i < NUM_ROWS; ++i) { final byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes(); for (int j = 0; j < NUM_COLS_PER_ROW; ++j) { final String qualStr = String.valueOf(j); if (VERBOSE) { System.err.println( "Reading row " + i + ", column " + j + " " + Bytes.toString(rowKey) + "/" + qualStr); } final byte[] qualBytes = Bytes.toBytes(qualStr); Get get = new Get(rowKey); get.addColumn(CF_BYTES, qualBytes); Result result = region.get(get); assertEquals(1, result.size()); byte[] value = result.getValue(CF_BYTES, qualBytes); assertTrue(LoadTestKVGenerator.verify(value, rowKey, qualBytes)); } } }
/** * 查询列 * * @param tableName 表名 * @param rowKey 行键 * @param familyName 列族名 * @param columnName 列名 * @return 列 */ public HbaseColumn getColumn( String tableName, String rowKey, String familyName, String columnName) { HbaseColumn col = null; try { HTable table = new HTable(conf, Bytes.toBytes(tableName)); Get get = new Get(Bytes.toBytes(rowKey)); get.addColumn(Bytes.toBytes(familyName), Bytes.toBytes(columnName)); // 获取指定列族和列修饰符对应的列 Result rowtmp = table.get(get); col = new HbaseColumn(); for (Cell cell : rowtmp.listCells()) { col.CopyFromCell(cell); } } catch (Exception e) { logger.error("getColumn failed", e); } return col; }
/* * 查询某列数据的多个版本 * * @tableName 表名 * * @rowKey rowKey * * @familyName 列族名 * * @columnName 列名 */ public static void getResultByVersion( String tableName, String rowKey, String familyName, String columnName) throws IOException { HTable table = new HTable(conf, Bytes.toBytes(tableName)); Get get = new Get(Bytes.toBytes(rowKey)); get.addColumn(Bytes.toBytes(familyName), Bytes.toBytes(columnName)); get.setMaxVersions(5); Result result = table.get(get); for (KeyValue kv : result.list()) { System.out.println("family:" + Bytes.toString(kv.getFamily())); System.out.println("qualifier:" + Bytes.toString(kv.getQualifier())); System.out.println("value:" + Bytes.toString(kv.getValue())); System.out.println("Timestamp:" + kv.getTimestamp()); System.out.println("-------------------------------------------"); } /* * List<?> results = table.get(get).list(); Iterator<?> it = * results.iterator(); while (it.hasNext()) { * System.out.println(it.next().toString()); } */ }
@Override public long getLogPosition(String queueId, String filename) throws ReplicationException { try { byte[] rowKey = queueIdToRowKey(queueId); Get getOffset = new Get(rowKey); getOffset.addColumn(CF_QUEUE, Bytes.toBytes(filename)); Result result = getResultIfOwner(getOffset); if (result == null || !result.containsColumn(CF_QUEUE, Bytes.toBytes(filename))) { throw new ReplicationException( "Could not read empty result while getting log position " + "queueId=" + queueId + ", filename=" + filename); } return Bytes.toLong(result.getValue(CF_QUEUE, Bytes.toBytes(filename))); } catch (IOException e) { throw new ReplicationException( "Could not get position in log for queueId=" + queueId + ", filename=" + filename); } }
/** * 查询列的多版本 * * @param tableName 表名 * @param rowKey 行键名 * @param familyName 列族名 * @param columnName 列名 * @param num 版本数 * @return 列列表 */ public ArrayList<HbaseColumn> getColumnVersions( String tableName, String rowKey, String familyName, String columnName, int num) { ArrayList<HbaseColumn> cols = null; try { HTable table = new HTable(conf, Bytes.toBytes(tableName)); Get get = new Get(Bytes.toBytes(rowKey)); get.addColumn(Bytes.toBytes(familyName), Bytes.toBytes(columnName)); get.setMaxVersions(num); Result rowtmp = table.get(get); cols = new ArrayList<>(); for (Cell cell : rowtmp.listCells()) { HbaseColumn col = new HbaseColumn(cell); cols.add(col); } } catch (Exception e) { logger.error("getColumnVersions failed", e); } return cols; }
public void dump(String table, String[] rows, String[] fams, String[] quals) throws IOException { HTable tbl = new HTable(conf, table); List<Get> gets = new ArrayList<Get>(); for (String row : rows) { Get get = new Get(Bytes.toBytes(row)); get.setMaxVersions(); if (fams != null) { for (String fam : fams) { for (String qual : quals) { get.addColumn(Bytes.toBytes(fam), Bytes.toBytes(qual)); } } } gets.add(get); } Result[] results = tbl.get(gets); for (Result result : results) { for (KeyValue kv : result.raw()) { System.out.println("KV: " + kv + ", Value: " + Bytes.toString(kv.getValue())); } } }
@Override public InputStream getInputStream(byte[] blobKey) throws BlobException { Get get = new Get(blobKey); get.addColumn(BLOBS_COLUMN_FAMILY_BYTES, BLOB_COLUMN); Result result; try { result = table.get(get); } catch (IOException e) { throw new BlobException( "Failed to open an inputstream for blobkey '" + Hex.encodeHexString(blobKey) + "' on the HBASE blobstore", e); } byte[] value = result.getValue(BLOBS_COLUMN_FAMILY_BYTES, BLOB_COLUMN); if (value == null) { throw new BlobException( "Failed to open an inputstream for blobkey '" + Hex.encodeHexString(blobKey) + "' since no blob was found on the HBASE blobstore"); } return new ByteArrayInputStream(value); }