private static VesselTrackInfo getTrackInfo(Table TrackInfo_Table, String IMO_str) throws IOException { Get get = new Get(Bytes.toBytes(IMO_str)); get.addColumn(details, lastlocation); get.addColumn(details, firstrecordtime); get.addColumn(details, lastrecordtime); Result result = TrackInfo_Table.get(get); byte[] last_location = result.getValue(details, lastlocation); byte[] fist_recordtime = result.getValue(details, firstrecordtime); byte[] last_recordtime = result.getValue(details, lastrecordtime); VesselTrackInfo trackinfo = new VesselTrackInfo(); trackinfo.LastLocation = last_location; if (fist_recordtime != null) { trackinfo.FirstRecordTime = DateTime.parse(Bytes.toString(fist_recordtime), rawformatter).getMillis(); } if (last_recordtime != null) { trackinfo.LastRecordTime = DateTime.parse(Bytes.toString(last_recordtime), rawformatter).getMillis(); } return trackinfo; }
/** * Test aborted transactional delete requests, that must be rolled back. * * @throws Exception */ @Test public void testAbortedTransactionalDelete() throws Exception { transactionContext.start(); Put put = new Put(TestBytes.row); put.add(TestBytes.family, TestBytes.qualifier, TestBytes.value); transactionAwareHTable.put(put); transactionContext.finish(); transactionContext.start(); Result result = transactionAwareHTable.get(new Get(TestBytes.row)); transactionContext.finish(); byte[] value = result.getValue(TestBytes.family, TestBytes.qualifier); assertArrayEquals(TestBytes.value, value); transactionContext.start(); Delete delete = new Delete(TestBytes.row); transactionAwareHTable.delete(delete); transactionContext.abort(); transactionContext.start(); result = transactionAwareHTable.get(new Get(TestBytes.row)); transactionContext.finish(); value = result.getValue(TestBytes.family, TestBytes.qualifier); assertArrayEquals(TestBytes.value, value); }
public String[] getTags(String objectType, String objectId) throws IOException { List<String> ret = new ArrayList<String>(); String rowKey = objectType + "_" + objectId; Scan s = new Scan(rowKey.getBytes(), rowKey.getBytes()); s.setMaxVersions(1); ResultScanner scanner = htable.getScanner(s); try { for (Result rr = scanner.next(); rr != null; rr = scanner.next()) { String localObjectType = new String(rr.getValue("tags".getBytes(), "OBJECTTYPE".getBytes())); String localObjectId = new String(rr.getValue("tags".getBytes(), "OBJECTID".getBytes())); NavigableMap<byte[], byte[]> map = rr.getFamilyMap("tags".getBytes()); Iterator<Entry<byte[], byte[]>> it = map.entrySet().iterator(); while (it.hasNext()) { Entry<byte[], byte[]> entry = it.next(); String key = new String(entry.getKey()); if (!key.startsWith("OBJECT")) { int val = Bytes.toInt(entry.getValue()); if (val > 0) ret.add(key); } } } } finally { scanner.close(); } return ret.toArray(new String[] {}); }
public String[] getObjectIDs(String objectType, String... tags) throws IOException { List<String> ret = new ArrayList<String>(); FilterList list = new FilterList(FilterList.Operator.MUST_PASS_ALL); SingleColumnValueFilter filter1 = new SingleColumnValueFilter( "tags".getBytes(), "OBJECTTYPE".getBytes(), CompareOp.EQUAL, Bytes.toBytes(objectType)); list.addFilter(filter1); for (String tag : tags) { SingleColumnValueFilter filter2 = new SingleColumnValueFilter( "tags".getBytes(), tag.toUpperCase().getBytes(), CompareOp.EQUAL, Bytes.toBytes(1)); filter2.setFilterIfMissing(true); list.addFilter(filter2); } Scan s = new Scan(); s.setFilter(list); s.setMaxVersions(1); ResultScanner scanner = htable.getScanner(s); try { for (Result rr = scanner.next(); rr != null; rr = scanner.next()) { String localObjectType = new String(rr.getValue("tags".getBytes(), "OBJECTTYPE".getBytes())); String localObjectId = new String(rr.getValue("tags".getBytes(), "OBJECTID".getBytes())); ret.add(localObjectId); } } finally { scanner.close(); } return ret.toArray(new String[] {}); }
public Blog(String blogid) throws IOException { Configuration conf = HBaseConfiguration.create(); table = new HTable(conf, "blogs"); // 1. Get the row whose row key is blogid from above Get g = new Get(Bytes.toBytes(blogid)); Result r = table.get(g); // 2. Extract the rowkey, blog text (column "body") and blog title // (column "meta:title") key = r.getRow(); keyStr = Bytes.toString(key); blogText = Bytes.toString(r.getValue(Bytes.toBytes("body"), Bytes.toBytes(""))); blogTitle = Bytes.toString(r.getValue(Bytes.toBytes("meta"), Bytes.toBytes("title"))); Long reverseTimestamp = Long.parseLong(keyStr.substring(4)); Long epoch = Math.abs(reverseTimestamp - Long.MAX_VALUE); dateOfPost = new Date(epoch); // Get an iterator for the comments Scan s = new Scan(); s.addFamily(Bytes.toBytes("comment")); // Use a PrefixFilter PrefixFilter filter = new PrefixFilter(key); s.setFilter(filter); scanner = table.getScanner(s); resultIterator = scanner.iterator(); }
/** * Tests that each transaction can see its own persisted writes, while not seeing writes from * other in-progress transactions. */ @Test public void testReadYourWrites() throws Exception { // In-progress tx1: started before our main transaction HTable hTable1 = new HTable(testUtil.getConfiguration(), TestBytes.table); TransactionAwareHTable txHTable1 = new TransactionAwareHTable(hTable1); TransactionContext inprogressTxContext1 = new TransactionContext(new InMemoryTxSystemClient(txManager), txHTable1); // In-progress tx2: started while our main transaction is running HTable hTable2 = new HTable(testUtil.getConfiguration(), TestBytes.table); TransactionAwareHTable txHTable2 = new TransactionAwareHTable(hTable2); TransactionContext inprogressTxContext2 = new TransactionContext(new InMemoryTxSystemClient(txManager), txHTable2); // create an in-progress write that should be ignored byte[] col2 = Bytes.toBytes("col2"); inprogressTxContext1.start(); Put putCol2 = new Put(TestBytes.row); byte[] valueCol2 = Bytes.toBytes("writing in progress"); putCol2.add(TestBytes.family, col2, valueCol2); txHTable1.put(putCol2); // start a tx and write a value to test reading in same tx transactionContext.start(); Put put = new Put(TestBytes.row); byte[] value = Bytes.toBytes("writing"); put.add(TestBytes.family, TestBytes.qualifier, value); transactionAwareHTable.put(put); // test that a write from a tx started after the first is not visible inprogressTxContext2.start(); Put put2 = new Put(TestBytes.row); byte[] value2 = Bytes.toBytes("writing2"); put2.add(TestBytes.family, TestBytes.qualifier, value2); txHTable2.put(put2); Get get = new Get(TestBytes.row); Result row = transactionAwareHTable.get(get); assertFalse(row.isEmpty()); byte[] col1Value = row.getValue(TestBytes.family, TestBytes.qualifier); Assert.assertNotNull(col1Value); Assert.assertArrayEquals(value, col1Value); // write from in-progress transaction should not be visible byte[] col2Value = row.getValue(TestBytes.family, col2); assertNull(col2Value); // commit in-progress transaction, should still not be visible inprogressTxContext1.finish(); get = new Get(TestBytes.row); row = transactionAwareHTable.get(get); assertFalse(row.isEmpty()); col2Value = row.getValue(TestBytes.family, col2); assertNull(col2Value); transactionContext.finish(); inprogressTxContext2.abort(); }
/** * Extract a {@link ServerName} For use on catalog table {@link Result}. * * @param r Result to pull from * @return A ServerName instance or null if necessary fields not found or empty. */ public static ServerName getServerNameFromCatalogResult(final Result r) { byte[] value = r.getValue(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER); if (value == null || value.length == 0) return null; String hostAndPort = Bytes.toString(value); value = r.getValue(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER); if (value == null || value.length == 0) return null; return new ServerName(hostAndPort, Bytes.toLong(value)); }
public static void main(String[] args) throws Exception { conf.set("hbase.zookeeper.quorum", "hadoop271.itversity.com"); conf.set("hbase.zookeeper.property.clientPort", "2181"); Connection connection = ConnectionFactory.createConnection(conf); Table table = connection.getTable(TableName.valueOf("demo")); Scan scan1 = new Scan(); ResultScanner scanner1 = table.getScanner(scan1); for (Result res : scanner1) { System.out.println(Bytes.toString(res.getRow())); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column1".getBytes()))); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column2".getBytes()))); } scanner1.close(); Put put = new Put("3".getBytes()); put.addColumn("cf1".getBytes(), "column1".getBytes(), "value1".getBytes()); put.addColumn("cf1".getBytes(), "column2".getBytes(), "value2".getBytes()); table.put(put); Get get = new Get("3".getBytes()); Result getResult = table.get(get); System.out.println("Printing colunns for rowkey 3"); System.out.println(Bytes.toString(getResult.getValue("cf1".getBytes(), "column1".getBytes()))); System.out.println(Bytes.toString(getResult.getValue("cf1".getBytes(), "column2".getBytes()))); scanner1 = table.getScanner(scan1); System.out.println("Before Delete"); for (Result res : scanner1) { System.out.println(Bytes.toString(res.getRow())); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column1".getBytes()))); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column2".getBytes()))); } scanner1.close(); Delete del = new Delete("3".getBytes()); table.delete(del); System.out.println("After Delete"); scanner1 = table.getScanner(scan1); for (Result res : scanner1) { System.out.println(Bytes.toString(res.getRow())); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column1".getBytes()))); System.out.println(Bytes.toString(res.getValue("cf1".getBytes(), "column2".getBytes()))); } scanner1.close(); table.close(); connection.close(); }
/** * Returns the daughter regions by reading from the corresponding columns of the .META. table * Result. If the region is not a split parent region, it returns PairOfSameType(null, null). */ public static PairOfSameType<HRegionInfo> getDaughterRegions(Result data) throws IOException { HRegionInfo splitA = Writables.getHRegionInfoOrNull( data.getValue(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER)); HRegionInfo splitB = Writables.getHRegionInfoOrNull( data.getValue(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER)); return new PairOfSameType<HRegionInfo>(splitA, splitB); }
private MetaRecord getMetaRecord(Result r) { Value[] data = new Value[4]; // id // data[0] = ValueInt.get(Bytes.toInt(r.getValue(FAMILY, ID))); data[0] = ValueInt.get(Bytes.toInt(r.getRow())); // head 未使用 // data[1] = null; // type data[2] = ValueInt.get(Bytes.toInt(r.getValue(FAMILY, OBJECT_TYPE))); // sql data[3] = ValueString.get(Bytes.toString(r.getValue(FAMILY, SQL))); return new MetaRecord(new SimpleRow(data)); }
public boolean fetchReadAllFieldLine(List<Line> lines, LineSender sender) throws IOException { if (null == this.rs) { throw new IllegalStateException("HBase Client try to fetch data failed ."); } for (Result result = rs.next(); result != null; result = rs.next()) { Get get = new Get(result.getRow()); for (int i = 0; i < this.families.length; i++) { get.addColumn(this.families[i].getBytes(), this.columns[i].getBytes()); } gets.add(get); if (gets.size() > this.BUFFER_LINE) { Result[] getResults = this.htable.get(gets); for (Result resu : getResults) { if (null != resu) { Line line = sender.createLine(); for (int i = 0; i < this.families.length; i++) { byte[] value = resu.getValue(this.families[i].getBytes(), this.columns[i].getBytes()); if (null == value) { line.addField(null); } else { line.addField(new String(value, encode)); } } line.addField(new String(resu.getRow(), encode)); } } return true; } } return false; }
/* * Add to each of the regions in .META. a value. Key is the startrow of the * region (except its 'aaa' for first region). Actual value is the row name. * @param expected * @return * @throws IOException */ private static int addToEachStartKey(final int expected) throws IOException { HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); HTable meta = new HTable(TEST_UTIL.getConfiguration(), HConstants.META_TABLE_NAME); int rows = 0; Scan scan = new Scan(); scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); ResultScanner s = meta.getScanner(scan); for (Result r = null; (r = s.next()) != null; ) { byte[] b = r.getValue(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); if (b == null || b.length <= 0) break; HRegionInfo hri = Writables.getHRegionInfo(b); // If start key, add 'aaa'. byte[] row = getStartKey(hri); Put p = new Put(row); p.setWriteToWAL(false); p.add(getTestFamily(), getTestQualifier(), row); t.put(p); rows++; } s.close(); Assert.assertEquals(expected, rows); t.close(); meta.close(); return rows; }
public static HRegionInfo getHRegionInfo(Result data) throws IOException { byte[] bytes = data.getValue(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); if (bytes == null) return null; HRegionInfo info = Writables.getHRegionInfo(bytes); LOG.info("Current INFO from scan results = " + info); return info; }
public static String get(final MockHTable table, final byte[] key) throws IOException { Get get = new Get(key); Result result = null; result = table.get(get); Assert.assertEquals(1, result.size()); return Bytes.toString(result.getValue(SYNC_COLUMN_FAMILY, SYNC_COLUMN_QUALIFIER)); }
public List<Query> getQueries(final String creator) throws IOException { if (null == creator) { return null; } List<Query> queries = new ArrayList<Query>(); HTableInterface htable = null; try { htable = HBaseConnection.get(hbaseUrl).getTable(userTableName); Get get = new Get(Bytes.toBytes(creator)); get.addFamily(Bytes.toBytes(USER_QUERY_FAMILY)); Result result = htable.get(get); Query[] query = querySerializer.deserialize( result.getValue(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN))); if (null != query) { queries.addAll(Arrays.asList(query)); } } finally { IOUtils.closeQuietly(htable); } return queries; }
@Override public GCResult createResult(Result result) { if (result.getRow() != null) { GCResult gcResult = new GCResult(result.getRow()); gcResult.setChromosome(result.getValue(Bytes.toBytes("chr"), Bytes.toBytes("name"))); byte[] family = Bytes.toBytes("gc"); gcResult.setMin(result.getValue(family, Bytes.toBytes("min"))); gcResult.setMax(result.getValue(family, Bytes.toBytes("max"))); gcResult.setTotalFragments(result.getValue(Bytes.toBytes("frag"), Bytes.toBytes("total"))); return gcResult; } return null; }
private long getTimestamp(Result r) { if (r == null) { return 0; } else { return Bytes.toLong(r.getValue(B_FAMILY, B_COLUMN_TS)); } }
private HRegionInfo nextRegion() throws IOException { try { Result results = getMetaRow(); if (results == null) { return null; } byte[] regionInfoValue = results.getValue(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); if (regionInfoValue == null || regionInfoValue.length == 0) { throw new NoSuchElementException( "meta region entry missing " + Bytes.toString(HConstants.CATALOG_FAMILY) + ":" + Bytes.toString(HConstants.REGIONINFO_QUALIFIER)); } HRegionInfo region = Writables.getHRegionInfo(regionInfoValue); if (!Bytes.equals(region.getTableName(), this.tableName)) { return null; } return region; } catch (IOException e) { e = RemoteExceptionHandler.checkIOException(e); LOG.error("meta scanner error", e); metaScanner.close(); throw e; } }
@Test public void testSaveJsonFormat() throws AvroBaseException, IOException { AvroBase<User, byte[]> userHAB = AvroBaseFactory.createAvroBase(new HABModule(), HAB.class, AvroFormat.JSON); User saved = new User(); saved.firstName = $("Sam"); saved.lastName = $("Pullara"); saved.birthday = $("1212"); saved.gender = GenderType.MALE; saved.email = $("*****@*****.**"); saved.description = $("CTO of RightTime, Inc. and one of the founders of BagCheck"); saved.title = $("Engineer"); saved.image = $("http://farm1.static.flickr.com/1/buddyicons/[email protected]"); saved.location = $("Los Altos, CA"); saved.mobile = $("4155551212"); saved.password = ByteBuffer.wrap($("").getBytes()); byte[] row = Bytes.toBytes("spullara"); userHAB.put(row, saved); Row<User, byte[]> loaded = userHAB.get(row); assertEquals(saved, loaded.value); HTablePool pool = new HTablePool(); HTableInterface table = pool.getTable(TABLE); try { Get get = new Get(row); byte[] DATA = Bytes.toBytes("d"); get.addColumn(COLUMN_FAMILY, DATA); Result result = table.get(get); assertTrue(Bytes.toString(result.getValue(COLUMN_FAMILY, DATA)).startsWith("{")); } finally { pool.putTable(table); } }
public static void measureReadingHbase(HTable table, byte[] key, PrintWriter out) { try { long start1 = System.nanoTime(); Get g = new Get(key); Result r = table.get(g); long start2 = System.nanoTime(); byte[] value = r.getValue(Import.family, Import.qualifier); if (value != null) { long start3 = System.nanoTime(); BufferedOutputStream outToNull = new BufferedOutputStream(new ByteArrayOutputStream()); outToNull.write(value); outToNull.close(); long stop = System.nanoTime(); // stop synchronized (out) { out.println( "NAN, " + (double) (stop - start1) / 1000000. + ", " + (double) (start2 - start1) / 1000000. + ", " + (double) (start3 - start2) / 1000000. + ", " + (double) (stop - start3) / 1000000.); // + // " " // + threadCount++; } } else { System.out.println("No image is extracted with name key " + new String(key)); } } catch (Exception e) { e.printStackTrace(); } }
private void testSimpleScanInternal( long origKeyPrefix, Scan scan, int numValues, int startWithValue, int seekIntervalMinValue, int seekIntervalMaxValue) throws IOException { int valuesCountInSeekInterval = writeTestData( origKeyPrefix, numValues, startWithValue, seekIntervalMinValue, seekIntervalMaxValue); // TODO: add some filters to the scan for better testing ResultScanner distributedScanner = DistributedScanner.create(hTable, scan, keyDistributor); Result previous = null; int countMatched = 0; for (Result current : distributedScanner) { countMatched++; if (previous != null) { byte[] currentRowOrigKey = keyDistributor.getOriginalKey(current.getRow()); byte[] previousRowOrigKey = keyDistributor.getOriginalKey(previous.getRow()); Assert.assertTrue(Bytes.compareTo(currentRowOrigKey, previousRowOrigKey) >= 0); int currentValue = Bytes.toInt(current.getValue(CF, QUAL)); Assert.assertTrue(currentValue >= seekIntervalMinValue); Assert.assertTrue(currentValue <= seekIntervalMaxValue); } previous = current; } Assert.assertEquals(valuesCountInSeekInterval, countMatched); }
public void map(ImmutableBytesWritable row, Result value, Context context) throws IOException, InterruptedException { String val = new String(value.getValue(CF, ATTR1)); text.set(val); // we can only emit Writables... context.write(text, ONE); }
/* * (non-Javadoc) * * @see com.hazelcast.core.MapLoader#load(java.lang.Object) */ @Override public String load(String key) { String retval = null; if (allowLoad) { HTableInterface table = null; try { Get g = new Get(Bytes.toBytes(key)); table = pool.getTable(tableName); Result r = table.get(g); byte[] value = r.getValue(family, qualifier); if (value != null) { if (outputFormatType == StoreFormatType.SMILE) { retval = jsonSmileConverter.convertFromSmile(value); } else { retval = new String(value); } } } catch (IOException e) { LOG.error("Value did not exist for row: " + key, e); } finally { if (pool != null && table != null) { pool.putTable(table); } } } return retval; }
@Override protected void map(ImmutableBytesWritable rowkey, Result result, Context context) throws IOException, InterruptedException { byte[] c = result.getValue(Bytes.toBytes("products"), Bytes.toBytes("product_category_id")); byte[] d = result.getValue(Bytes.toBytes("orders"), Bytes.toBytes("order_date")); String thg = "N"; String[] dStr = Bytes.toString(d).split("\\W+"); if (dStr[1].equals("11")) { int date = Integer.parseInt(dStr[2]); if (date >= 21) thg = "Y"; } context.write( new Text(Bytes.toString(c) + ":" + thg), new IntWritable(1)); // Generate an output key/value pair. // context.write(outputKey, outputValue) -> (123:Y, 1), (124:N, 1) }
public Optional<byte[]> get( Optional<String> table, Optional<String> family, Optional<String> qualifier, Optional<String> key) { if (!valid) { Logger.error("CANNOT GET! NO VALID CONNECTION"); return Optional.empty(); } if (table.isPresent() && family.isPresent() && qualifier.isPresent() && key.isPresent() && !key.get().isEmpty()) { try { final Table htable = connection.getTable(TableName.valueOf(table.get())); Result result = htable.get(new Get(key.get().getBytes("UTF8"))); return Optional.ofNullable( result.getValue(family.get().getBytes("UTF8"), qualifier.get().getBytes("UTF8"))); } catch (IOException e) { e.printStackTrace(); } } return Optional.empty(); }
public Optional<Response> get(Optional<Request> request) { if (!valid) { Logger.error("CANNOT GET! NO VALID CONNECTION"); return Optional.empty(); } Response response = new Response(); if (request.isPresent()) { Request r = request.get(); response.key = r.key; response.table = r.table; try { final Table htable = connection.getTable(TableName.valueOf(r.table)); Result result = htable.get(new Get(r.key)); if (result == null || result.isEmpty()) { return Optional.empty(); } r.columns.forEach( c -> response.columns.add( new Request.Column( c.family, c.qualifier, result.getValue(c.family.getBytes(), c.qualifier.getBytes())))); } catch (IOException e) { e.printStackTrace(); } } return Optional.of(response); }
private static Map<byte[], byte[]> packFamilyMap( String tableName, Result result, byte[] cf, String[] selectArray) { Map<byte[], byte[]> fmap = new HashMap<byte[], byte[]>(); // 缓存复合字段一条数据,供解析子字段使用 Map<String, Map<String, String>> compoundFieldValueMap = new HashMap<String, Map<String, String>>(); for (String column : selectArray) { String[] temp = processField(column, Bytes.toString(cf)); byte[] value; if ("ca_summary_optimize".equals(tableName) && compoundFieldMap.containsKey(temp[1])) { value = result.getValue(Bytes.toBytes(temp[0]), Bytes.toBytes(compoundFieldMap.get(temp[1]))); if (compoundFieldValueMap.containsKey(compoundFieldMap.get(temp[1]))) { if (compoundFieldValueMap.get(compoundFieldMap.get(temp[1])).get(temp[1]) != null) { value = Bytes.toBytes( compoundFieldValueMap.get(compoundFieldMap.get(temp[1])).get(temp[1])); } else { value = Bytes.toBytes("-"); } } else { byte[] comValue = result.getValue(Bytes.toBytes(temp[0]), Bytes.toBytes(compoundFieldMap.get(temp[1]))); if (comValue != null) { String str = Bytes.toString(comValue).replace("\n", ""); Map<String, String> map = compoundFieldValueToMap(str, tableName, compoundFieldMap.get(temp[1])); compoundFieldValueMap.put(compoundFieldMap.get(temp[1]), map); if (map.get(temp[1]) != null) { value = Bytes.toBytes(map.get(temp[1])); } else { value = Bytes.toBytes("-"); } } } } else { value = result.getValue(Bytes.toBytes(temp[0]), Bytes.toBytes(temp[1])); } if (value == null) { value = Bytes.toBytes(IndexConfig.getDefaultValue(tableName, column)); } fmap.put(Bytes.toBytes(column), value); } return fmap; }
/** * Get the field out of the row without checking whether parsing is needed. This is called by both * getField and getFieldsAsList. * * @param fieldID The id of the field starting from 0. * @return The value of the field */ private Object uncheckedGetField(int fieldID) { LazyObjectBase[] fields = getFields(); boolean[] fieldsInited = getFieldInited(); if (!fieldsInited[fieldID]) { fieldsInited[fieldID] = true; ColumnMapping colMap = columnsMapping[fieldID]; if (!colMap.hbaseRowKey && !colMap.hbaseTimestamp && colMap.qualifierName == null) { // it is a column family // primitive type for Map<Key, Value> can be stored in binary format. Pass in the // qualifier prefix to cherry pick the qualifiers that match the prefix instead of picking // up everything ((LazyHBaseCellMap) fields[fieldID]) .init( result, colMap.familyNameBytes, colMap.binaryStorage, colMap.qualifierPrefixBytes, colMap.isDoPrefixCut()); return fields[fieldID].getObject(); } if (colMap.hbaseTimestamp) { // Get the latest timestamp of all the cells as the row timestamp long timestamp = result.rawCells()[0].getTimestamp(); // from hbase-0.96.0 for (int i = 1; i < result.rawCells().length; i++) { timestamp = Math.max(timestamp, result.rawCells()[i].getTimestamp()); } LazyObjectBase lz = fields[fieldID]; if (lz instanceof LazyTimestamp) { ((LazyTimestamp) lz).getWritableObject().setTime(timestamp); } else { ((LazyLong) lz).getWritableObject().set(timestamp); } return lz.getObject(); } byte[] bytes; if (colMap.hbaseRowKey) { bytes = result.getRow(); } else { // it is a column i.e. a column-family with column-qualifier bytes = result.getValue(colMap.familyNameBytes, colMap.qualifierNameBytes); } if (bytes == null || isNull(oi.getNullSequence(), bytes, 0, bytes.length)) { fields[fieldID].setNull(); } else { ByteArrayRef ref = new ByteArrayRef(); ref.setData(bytes); fields[fieldID].init(ref, 0, bytes.length); } } return fields[fieldID].getObject(); }
/** Test copy of table from sourceTable to targetTable all rows from family a */ @Test public void testRenameFamily() throws Exception { TableName sourceTable = TableName.valueOf("sourceTable"); TableName targetTable = TableName.valueOf("targetTable"); byte[][] families = {FAMILY_A, FAMILY_B}; Table t = TEST_UTIL.createTable(sourceTable, families); Table t2 = TEST_UTIL.createTable(targetTable, families); Put p = new Put(ROW1); p.addColumn(FAMILY_A, QUALIFIER, Bytes.toBytes("Data11")); p.addColumn(FAMILY_B, QUALIFIER, Bytes.toBytes("Data12")); p.addColumn(FAMILY_A, QUALIFIER, Bytes.toBytes("Data13")); t.put(p); p = new Put(ROW2); p.addColumn(FAMILY_B, QUALIFIER, Bytes.toBytes("Dat21")); p.addColumn(FAMILY_A, QUALIFIER, Bytes.toBytes("Data22")); p.addColumn(FAMILY_B, QUALIFIER, Bytes.toBytes("Data23")); t.put(p); long currentTime = System.currentTimeMillis(); String[] args = new String[] { "--new.name=" + targetTable, "--families=a:b", "--all.cells", "--starttime=" + (currentTime - 100000), "--endtime=" + (currentTime + 100000), "--versions=1", sourceTable.getNameAsString() }; assertNull(t2.get(new Get(ROW1)).getRow()); assertTrue(runCopy(args)); assertNotNull(t2.get(new Get(ROW1)).getRow()); Result res = t2.get(new Get(ROW1)); byte[] b1 = res.getValue(FAMILY_B, QUALIFIER); assertEquals("Data13", new String(b1)); assertNotNull(t2.get(new Get(ROW2)).getRow()); res = t2.get(new Get(ROW2)); b1 = res.getValue(FAMILY_A, QUALIFIER); // Data from the family of B is not copied assertNull(b1); }
@Override protected SizeResult createResult(Result result) { if (result.getRow() != null) { SizeResult size = new SizeResult(result.getRow()); size.setProbability(result.getValue(Bytes.toBytes("bp"), Bytes.toBytes("prob"))); return size; } return null; }