@Override public void process(long now, HRegion region, List<Mutation> mutations, WALEdit walEdit) throws IOException { // Scan current counter List<Cell> kvs = new ArrayList<Cell>(); Scan scan = new Scan(row, row); scan.addColumn(FAM, COUNTER); doScan(region, scan, kvs); counter = kvs.size() == 0 ? 0 : Bytes.toInt(CellUtil.cloneValue(kvs.iterator().next())); // Assert counter value assertEquals(expectedCounter, counter); // Increment counter and send it to both memstore and wal edit counter += 1; expectedCounter += 1; Put p = new Put(row); KeyValue kv = new KeyValue(row, FAM, COUNTER, now, Bytes.toBytes(counter)); p.add(kv); mutations.add(p); walEdit.add(kv); // We can also inject some meta data to the walEdit KeyValue metaKv = new KeyValue( row, WALEdit.METAFAMILY, Bytes.toBytes("I just increment counter"), Bytes.toBytes(counter)); walEdit.add(metaKv); }
private Cell toOffheapCell( ByteBuffer valAndTagsBuffer, int vOffset, int tagsLenSerializationSize) { ByteBuffer tagsBuf = HConstants.EMPTY_BYTE_BUFFER; int tOffset = 0; if (this.includeTags) { if (this.tagCompressionContext == null) { tagsBuf = valAndTagsBuffer; tOffset = vOffset + this.valueLength + tagsLenSerializationSize; } else { tagsBuf = ByteBuffer.wrap(Bytes.copy(tagsBuffer, 0, this.tagsLength)); tOffset = 0; } } return new OffheapDecodedCell( ByteBuffer.wrap(Bytes.copy(keyBuffer, 0, this.keyLength)), currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(), currentKey.getQualifierOffset(), currentKey.getQualifierLength(), currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer, vOffset, this.valueLength, memstoreTS, tagsBuf, tOffset, this.tagsLength); }
@Override public void receive(Message msg) { try { LOG.debug( "Table " + Bytes.toString(idxCoprocessor.getTableName()) + " received message: Table " + Bytes.toString((byte[]) msg.getObject()) + " must update its index columns cache."); if (Arrays.equals((byte[]) msg.getObject(), idxCoprocessor.getTableName())) { LOG.debug( "Table " + Bytes.toString(idxCoprocessor.getTableName()) + " is updating its indexed columns cache."); idxCoprocessor.initIndexedColumnsForTable(); } } catch (IOException e1) { LOG.error("Failed to update from the master index table.", e1); } catch (Exception e) { LOG.error( "Failed to read contents of message; updating from master index table just in case.", e); try { idxCoprocessor.initIndexedColumnsForTable(); } catch (IOException e1) { LOG.error("Failed to update from the master index table.", e1); } } }
public String[] getObjectIDs(String objectType, String... tags) throws IOException { List<String> ret = new ArrayList<String>(); FilterList list = new FilterList(FilterList.Operator.MUST_PASS_ALL); SingleColumnValueFilter filter1 = new SingleColumnValueFilter( "tags".getBytes(), "OBJECTTYPE".getBytes(), CompareOp.EQUAL, Bytes.toBytes(objectType)); list.addFilter(filter1); for (String tag : tags) { SingleColumnValueFilter filter2 = new SingleColumnValueFilter( "tags".getBytes(), tag.toUpperCase().getBytes(), CompareOp.EQUAL, Bytes.toBytes(1)); filter2.setFilterIfMissing(true); list.addFilter(filter2); } Scan s = new Scan(); s.setFilter(list); s.setMaxVersions(1); ResultScanner scanner = htable.getScanner(s); try { for (Result rr = scanner.next(); rr != null; rr = scanner.next()) { String localObjectType = new String(rr.getValue("tags".getBytes(), "OBJECTTYPE".getBytes())); String localObjectId = new String(rr.getValue("tags".getBytes(), "OBJECTID".getBytes())); ret.add(localObjectId); } } finally { scanner.close(); } return ret.toArray(new String[] {}); }
@Override public OutputStream getOutputStream(Blob blob) throws BlobException { UUID uuid = UUID.randomUUID(); byte[] blobKey = Bytes.toBytes(uuid.getMostSignificantBits()); blobKey = Bytes.add(blobKey, Bytes.toBytes(uuid.getLeastSignificantBits())); return new HBaseBlobOutputStream(table, blobKey, blob); }
public static VesselLocation getLocation(Table VTLocation_Table, String RowKey) throws IOException { if (RowKey != null) { Get get = new Get(Bytes.toBytes(RowKey)); Result result = VTLocation_Table.get(get); VesselLocation VL = new VesselLocation(); for (Cell cell : result.rawCells()) { String Qualifier = Bytes.toString(CellUtil.cloneQualifier(cell)); String Value = Bytes.toString(CellUtil.cloneValue(cell)); if (Qualifier.equals("coordinates")) { VL.coordinates = Value; } else if (Qualifier.equals("speed")) { VL.speed = Value; } else if (Qualifier.equals("destination")) { VL.destination = Value; } else if (Qualifier.equals("timestamp")) { VL.recordtime = DateTime.parse(Value, rawformatter).getMillis(); } else if (Qualifier.equals("previouslocation")) { VL.previouslocation = Value; } else if (Qualifier.equals("nextlocation")) { VL.nextlocation = Value; } } return VL; } else { return null; } }
/** Ensure that expired delete family markers don't override valid puts */ public void testExpiredDeleteFamily() throws Exception { long now = System.currentTimeMillis(); KeyValue[] kvs = new KeyValue[] { new KeyValue( Bytes.toBytes("R1"), Bytes.toBytes("cf"), null, now - 1000, KeyValue.Type.DeleteFamily), KeyValueTestUtil.create("R1", "cf", "a", now - 10, KeyValue.Type.Put, "dont-care"), }; List<KeyValueScanner> scanners = scanFixture(kvs); Scan scan = new Scan(); scan.setMaxVersions(1); // scanner with ttl equal to 500 ScanInfo scanInfo = new ScanInfo(CF, 0, 1, 500, false, 0, KeyValue.COMPARATOR); ScanType scanType = ScanType.USER_SCAN; StoreScanner scanner = new StoreScanner(scan, scanInfo, scanType, null, scanners); List<KeyValue> results = new ArrayList<KeyValue>(); assertEquals(true, scanner.next(results)); assertEquals(1, results.size()); assertEquals(kvs[1], results.get(0)); results.clear(); assertEquals(false, scanner.next(results)); }
@Test public void testSingleByteFieldIndex() throws Exception { final String INDEX_NAME = "singleByteField"; IndexManager indexManager = new IndexManager(TEST_UTIL.getConfiguration()); IndexDefinition indexDef = new IndexDefinition(INDEX_NAME, INDEX_NAME); ByteIndexFieldDefinition fieldDef = indexDef.addByteField("field1"); fieldDef.setLength(3); indexManager.createIndex(indexDef); Index index = indexManager.getIndex(INDEX_NAME, INDEX_NAME); // Create a few index entries, inserting them in non-sorted order byte[][] values = {Bytes.toBytes("aaa"), Bytes.toBytes("aab")}; for (int i = 0; i < values.length; i++) { IndexEntry entry = new IndexEntry(); entry.addField("field1", values[i]); index.addEntry(entry, Bytes.toBytes("key" + i)); } Query query = new Query(); query.setRangeCondition("field1", Bytes.toBytes("aaa"), Bytes.toBytes("aab")); QueryResult result = index.performQuery(query); assertResultIds(result, "key0", "key1"); }
@Test public void testData() throws Exception { final String INDEX_NAME = "dataIndex"; IndexManager indexManager = new IndexManager(TEST_UTIL.getConfiguration()); IndexDefinition indexDef = new IndexDefinition(INDEX_NAME, INDEX_NAME); indexDef.addStringField("field1"); indexManager.createIndex(indexDef); Index index = indexManager.getIndex(INDEX_NAME, INDEX_NAME); String[] values = new String[] {"foo", "bar"}; for (String value : values) { IndexEntry entry = new IndexEntry(); entry.addField("field1", value); entry.addData(Bytes.toBytes("originalValue"), Bytes.toBytes(value)); index.addEntry(entry, Bytes.toBytes(value)); } Query query = new Query(); query.setRangeCondition("field1", Query.MIN_VALUE, Query.MAX_VALUE); QueryResult result = index.performQuery(query); assertNotNull(result.next()); assertEquals("bar", result.getDataAsString("originalValue")); assertNotNull(result.next()); assertEquals("foo", result.getDataAsString("originalValue")); }
@Test public void testSingleLongFieldIndex() throws Exception { final String INDEX_NAME = "singleLongField"; IndexManager indexManager = new IndexManager(TEST_UTIL.getConfiguration()); IndexDefinition indexDef = new IndexDefinition(INDEX_NAME, INDEX_NAME); indexDef.addLongField("field1"); indexManager.createIndex(indexDef); Index index = indexManager.getIndex(INDEX_NAME, INDEX_NAME); long values[] = {Long.MIN_VALUE, -1, 0, 1, Long.MAX_VALUE}; for (long value : values) { IndexEntry entry = new IndexEntry(); entry.addField("field1", value); index.addEntry(entry, Bytes.toBytes("key" + value)); } Query query = new Query(); query.setRangeCondition("field1", Long.MIN_VALUE, Long.MAX_VALUE); QueryResult result = index.performQuery(query); for (long value : values) { assertEquals("key" + value, Bytes.toString(result.next())); } assertNull(result.next()); }
@Test public void testDeleteFromIndex() throws Exception { final String INDEX_NAME = "deleteFromIndex"; IndexManager indexManager = new IndexManager(TEST_UTIL.getConfiguration()); IndexDefinition indexDef = new IndexDefinition(INDEX_NAME, INDEX_NAME); indexDef.addStringField("field1"); indexManager.createIndex(indexDef); Index index = indexManager.getIndex(INDEX_NAME, INDEX_NAME); // Add the entry IndexEntry entry = new IndexEntry(); entry.addField("field1", "foobar"); index.addEntry(entry, Bytes.toBytes("key1")); // Test it is there Query query = new Query(); query.addEqualsCondition("field1", "foobar"); QueryResult result = index.performQuery(query); assertEquals("key1", Bytes.toString(result.next())); assertNull(result.next()); // Delete the entry index.removeEntry(entry, Bytes.toBytes("key1")); // Test it is gone result = index.performQuery(query); assertNull(result.next()); // Delete the entry again, this should not give an error index.removeEntry(entry, Bytes.toBytes("key1")); }
/** * Write random values to the writer assuming a table created using {@link #FAMILIES} as column * family descriptors */ private void writeRandomKeyValues( RecordWriter<ImmutableBytesWritable, Cell> writer, TaskAttemptContext context, Set<byte[]> families, int numRows) throws IOException, InterruptedException { byte keyBytes[] = new byte[Bytes.SIZEOF_INT]; int valLength = 10; byte valBytes[] = new byte[valLength]; int taskId = context.getTaskAttemptID().getTaskID().getId(); assert taskId < Byte.MAX_VALUE : "Unit tests dont support > 127 tasks!"; final byte[] qualifier = Bytes.toBytes("data"); Random random = new Random(); for (int i = 0; i < numRows; i++) { Bytes.putInt(keyBytes, 0, i); random.nextBytes(valBytes); ImmutableBytesWritable key = new ImmutableBytesWritable(keyBytes); for (byte[] family : families) { Cell kv = new KeyValue(keyBytes, family, qualifier, valBytes); writer.write(key, kv); } } }
@Test public void bulkLoadHFileTest() throws Exception { String testName = TestRegionObserverInterface.class.getName() + ".bulkLoadHFileTest"; TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".bulkLoadHFileTest"); Configuration conf = util.getConfiguration(); HTable table = util.createTable(tableName, new byte[][] {A, B, C}); try { verifyMethodResult( SimpleRegionObserver.class, new String[] {"hadPreBulkLoadHFile", "hadPostBulkLoadHFile"}, tableName, new Boolean[] {false, false}); FileSystem fs = util.getTestFileSystem(); final Path dir = util.getDataTestDirOnTestFS(testName).makeQualified(fs); Path familyDir = new Path(dir, Bytes.toString(A)); createHFile(util.getConfiguration(), fs, new Path(familyDir, Bytes.toString(A)), A, A); // Bulk load new LoadIncrementalHFiles(conf).doBulkLoad(dir, new HTable(conf, tableName)); verifyMethodResult( SimpleRegionObserver.class, new String[] {"hadPreBulkLoadHFile", "hadPostBulkLoadHFile"}, tableName, new Boolean[] {true, true}); } finally { util.deleteTable(tableName); table.close(); } }
@Test public void testCheckAndDeleteHooks() throws IOException { TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testCheckAndDeleteHooks"); HTable table = util.createTable(tableName, new byte[][] {A, B, C}); try { Put p = new Put(Bytes.toBytes(0)); p.add(A, A, A); table.put(p); table.flushCommits(); Delete d = new Delete(Bytes.toBytes(0)); table.delete(d); verifyMethodResult( SimpleRegionObserver.class, new String[] { "hadPreCheckAndDelete", "hadPreCheckAndDeleteAfterRowLock", "hadPostCheckAndDelete" }, tableName, new Boolean[] {false, false, false}); table.checkAndDelete(Bytes.toBytes(0), A, A, A, d); verifyMethodResult( SimpleRegionObserver.class, new String[] { "hadPreCheckAndDelete", "hadPreCheckAndDeleteAfterRowLock", "hadPostCheckAndDelete" }, tableName, new Boolean[] {true, true, true}); } finally { util.deleteTable(tableName); table.close(); } }
private static VesselTrackInfo getTrackInfo(Table TrackInfo_Table, String IMO_str) throws IOException { Get get = new Get(Bytes.toBytes(IMO_str)); get.addColumn(details, lastlocation); get.addColumn(details, firstrecordtime); get.addColumn(details, lastrecordtime); Result result = TrackInfo_Table.get(get); byte[] last_location = result.getValue(details, lastlocation); byte[] fist_recordtime = result.getValue(details, firstrecordtime); byte[] last_recordtime = result.getValue(details, lastrecordtime); VesselTrackInfo trackinfo = new VesselTrackInfo(); trackinfo.LastLocation = last_location; if (fist_recordtime != null) { trackinfo.FirstRecordTime = DateTime.parse(Bytes.toString(fist_recordtime), rawformatter).getMillis(); } if (last_recordtime != null) { trackinfo.LastRecordTime = DateTime.parse(Bytes.toString(last_recordtime), rawformatter).getMillis(); } return trackinfo; }
@BeforeClass public static void beforeClass() throws Exception { HBaseTestUtils.getMiniCluster(); byte[] tableNameBytes = Bytes.toBytes(tableName); byte[][] cfNames = {Bytes.toBytes("meta"), Bytes.toBytes("conflict"), Bytes.toBytes("_s")}; HBaseTestUtils.util.createTable(tableNameBytes, cfNames); }
public static List<Delete> GetDeleteEventsBetween( Table VTEvent_Table, String imo_str, long first_timestamp, long last_timestamp) throws IOException { // scan // 'cdb_vessel:vessel_event',{FILTER=>"(PrefixFilter('0000003162')"} Scan GetEventsBetween = new Scan(); GetEventsBetween.setStartRow( Bytes.toBytes(imo_str + LpadNum(Long.MAX_VALUE - last_timestamp, 19) + "0000000000")) .setStopRow( Bytes.toBytes( imo_str + LpadNum(Long.MAX_VALUE - first_timestamp + 1, 19) + "9999999999")) .addColumn(details, exittime); GetEventsBetween.setCaching(100); Filter ExistTimeValuefilter = new ValueFilter( CompareFilter.CompareOp.LESS_OR_EQUAL, new BinaryComparator( Bytes.toBytes(new DateTime(last_timestamp).toString(rawformatter)))); GetEventsBetween.setFilter(ExistTimeValuefilter); ResultScanner Result_ExistingEvents = VTEvent_Table.getScanner(GetEventsBetween); List<Delete> deletes = new ArrayList<Delete>(); for (Result res : Result_ExistingEvents) { deletes.add(new Delete(res.getRow())); } Result_ExistingEvents.close(); return deletes; }
/** * Creates a HBase {@link Increment} from a Storm {@link Tuple} * * @param tuple The {@link Tuple} * @param increment The amount to increment the counter by * @return {@link Increment} */ public Increment getIncrementFromTuple(final Tuple tuple, final long increment) { byte[] rowKey = Bytes.toBytes(tuple.getStringByField(tupleRowKeyField)); Increment inc = new Increment(rowKey); inc.setWriteToWAL(writeToWAL); if (columnFamilies.size() > 0) { for (String cf : columnFamilies.keySet()) { byte[] cfBytes = Bytes.toBytes(cf); for (String cq : columnFamilies.get(cf)) { byte[] val; try { val = Bytes.toBytes(tuple.getStringByField(cq)); } catch (IllegalArgumentException ex) { // if cq isn't a tuple field, use cq for counter instead of tuple // value val = Bytes.toBytes(cq); } inc.addColumn(cfBytes, val, increment); } } } return inc; }
protected static byte[] constructKey(int token_i, String udid_s) { byte[] udid = udid_s.getBytes(); int salt = ((int) udid[0]) << 24 | (token_i); byte[] key = Bytes.add(Bytes.toBytes(salt), udid); return key; }
/** * Creates a HBase {@link Put} from a Storm {@link Tuple} * * @param tuple The {@link Tuple} * @return {@link Put} */ public Put getPutFromTuple(final Tuple tuple) { byte[] rowKey = Bytes.toBytes(tuple.getStringByField(tupleRowKeyField)); long ts = 0; if (!tupleTimestampField.equals("")) { ts = tuple.getLongByField(tupleTimestampField); } Put p = new Put(rowKey); p.setWriteToWAL(writeToWAL); if (columnFamilies.size() > 0) { for (String cf : columnFamilies.keySet()) { byte[] cfBytes = Bytes.toBytes(cf); for (String cq : columnFamilies.get(cf)) { byte[] cqBytes = Bytes.toBytes(cq); byte[] val = Bytes.toBytes(tuple.getStringByField(cq)); if (ts > 0) { p.add(cfBytes, cqBytes, ts, val); } else { p.add(cfBytes, cqBytes, val); } } } } return p; }
private static byte[][] makeN(byte[] base, int n) { byte[][] ret = new byte[n][]; for (int i = 0; i < n; i++) { ret[i] = Bytes.add(base, Bytes.toBytes(String.format("%04d", i))); } return ret; }
public List<String> getStationsNearPoint_Schema2(double lat, double lon) throws IOException { Scan scan = new Scan(); scan.addFamily(BixiConstant.SCHEMA2_CLUSTER_FAMILY_NAME.getBytes()); InternalScanner scanner = ((RegionCoprocessorEnvironment) getEnvironment()).getRegion().getScanner(scan); boolean hasMoreResult = false; List<KeyValue> res = new ArrayList<KeyValue>(); List<String> result = new ArrayList<String>(); try { do { hasMoreResult = scanner.next(res); for (KeyValue kv : res) { String clusterId = Bytes.toString(kv.getRow()); String[] parts = clusterId.split(":"); double cLat = Double.parseDouble(parts[0]); double cLon = Double.parseDouble(parts[1]); double dx = Double.parseDouble(parts[2]); double dy = Double.parseDouble(parts[3]); double distx = lat - cLat; double disty = lon - cLon; if (distx >= 0 && distx <= dx && disty >= 0 && disty <= dy) { // get stations in cluster result.add(Bytes.toString(kv.getQualifier())); } } res.clear(); } while (hasMoreResult); } finally { scanner.close(); } return result; }
public void verifyInvocationResults(Integer[] selectQualifiers, Integer[] expectedQualifiers) throws Exception { Get get = new Get(ROW_BYTES); for (int i = 0; i < selectQualifiers.length; i++) { get.addColumn(FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + selectQualifiers[i])); } get.setFilter(new InvocationRecordFilter()); List<KeyValue> expectedValues = new ArrayList<KeyValue>(); for (int i = 0; i < expectedQualifiers.length; i++) { expectedValues.add( new KeyValue( ROW_BYTES, FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + expectedQualifiers[i]), expectedQualifiers[i], Bytes.toBytes(VALUE_PREFIX + expectedQualifiers[i]))); } Scan scan = new Scan(get); List<Cell> actualValues = new ArrayList<Cell>(); List<Cell> temp = new ArrayList<Cell>(); InternalScanner scanner = this.region.getScanner(scan); while (scanner.next(temp)) { actualValues.addAll(temp); temp.clear(); } actualValues.addAll(temp); Assert.assertTrue( "Actual values " + actualValues + " differ from the expected values:" + expectedValues, expectedValues.equals(actualValues)); }
/** * Test KeyValues with negative timestamp. * * @throws IOException On test failure. */ @Test public void testNegativeTimestamps() throws IOException { List<KeyValue> kvList = new ArrayList<KeyValue>(); byte[] row = new byte[0]; byte[] family = new byte[0]; byte[] qualifier = new byte[0]; byte[] value = new byte[0]; if (includesTags) { byte[] metaValue1 = Bytes.toBytes("metaValue1"); byte[] metaValue2 = Bytes.toBytes("metaValue2"); kvList.add( new KeyValue( row, family, qualifier, 0l, value, new Tag[] {new ArrayBackedTag((byte) 1, metaValue1)})); kvList.add( new KeyValue( row, family, qualifier, 0l, value, new Tag[] {new ArrayBackedTag((byte) 1, metaValue2)})); } else { kvList.add(new KeyValue(row, family, qualifier, -1l, Type.Put, value)); kvList.add(new KeyValue(row, family, qualifier, -2l, Type.Put, value)); } testEncodersOnDataset(kvList, includesMemstoreTS, includesTags); }
/** @return string */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("row="); sb.append(Bytes.toString(this.row)); sb.append(", ts="); sb.append(this.ts); sb.append(", families={"); boolean moreThanOne = false; for (Map.Entry<byte[], List<KeyValue>> entry : this.familyMap.entrySet()) { if (moreThanOne) { sb.append(", "); } else { moreThanOne = true; } sb.append("(family="); sb.append(Bytes.toString(entry.getKey())); sb.append(", keyvalues=("); boolean moreThanOneB = false; for (KeyValue kv : entry.getValue()) { if (moreThanOneB) { sb.append(", "); } else { moreThanOneB = true; } sb.append(kv.toString()); } sb.append(")"); } sb.append("}"); return sb.toString(); }
private void testAlgorithm( byte[] encodedData, ByteBuffer unencodedDataBuf, DataBlockEncoder encoder) throws IOException { // decode ByteArrayInputStream bais = new ByteArrayInputStream( encodedData, ENCODED_DATA_OFFSET, encodedData.length - ENCODED_DATA_OFFSET); DataInputStream dis = new DataInputStream(bais); ByteBuffer actualDataset; HFileContext meta = new HFileContextBuilder() .withHBaseCheckSum(false) .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(includesTags) .withCompression(Compression.Algorithm.NONE) .build(); actualDataset = encoder.decodeKeyValues(dis, encoder.newDataBlockDecodingContext(meta)); actualDataset.rewind(); // this is because in case of prefix tree the decoded stream will not have // the // mvcc in it. assertEquals( "Encoding -> decoding gives different results for " + encoder, Bytes.toStringBinary(unencodedDataBuf), Bytes.toStringBinary(actualDataset)); }
@Override public Event intercept(Event event) { // TODO Auto-generated method stub Map<String, String> headers = event.getHeaders(); String Filename = headers.get("file"); String fileType = getFileType(new String(event.getBody())); Configuration conf = HBaseConfiguration.create(); HTable table = null; try { table = new HTable(conf, "fs"); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } Put put = new Put(Bytes.toBytes(fileType + "_" + Filename)); put.add(Bytes.toBytes("fn"), Bytes.toBytes("ST"), Bytes.toBytes("PICKED")); try { table.put(put); } catch (RetriesExhaustedWithDetailsException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InterruptedIOException e) { // TODO Auto-generated catch block e.printStackTrace(); } try { table.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return event; }
private HashMap<String, String[][]> getObjectList( ResultScanner resultScan, byte[][] family, List<byte[]> rowKeyList) { String tableFamilies; // = new table; String data[][]; String rowKey; HashMap<String, String[][]> tableDataList = new HashMap<String, String[][]>(); long k = 0; for (Result rows : resultScan) { System.out.println("Rows passed : " + (++k)); for (int i = 0; i < family.length; i++) { tableFamilies = Bytes.toString(family[i]); rowKey = Bytes.toString(rows.getRow()); rowKeyList.add(rows.getRow()); if (tblMngr.getDataFiller(rows, tableFamilies)) { data = tblMngr.getResultMap(); tableFamilies = tableFamilies + "," + rowKey; tableDataList.put(tableFamilies, data); } } } return tableDataList; }
@Override public AlertContext deserialize(byte[] bytes) { AlertContext context = new AlertContext(); Map<String, String> properties = new HashMap<String, String>(); final int length = bytes.length; if (length < 4) { return context; } int size = Bytes.toInt(bytes, 0, 4); int offset = 4; for (int i = 0; i < size; i++) { int keySize = Bytes.toInt(bytes, offset, 4); offset += 4; int valueSize = Bytes.toInt(bytes, offset, 4); offset += 4; String key = Bytes.toString(bytes, offset, keySize); offset += keySize; String value = Bytes.toString(bytes, offset, valueSize); offset += valueSize; properties.put(key, value); } context.addAll(properties); return context; }
public Put call(String v) throws Exception { String[] cells = v.split(","); Put put = new Put(Bytes.toBytes(cells[0])); put.add(Bytes.toBytes(cells[1]), Bytes.toBytes(cells[2]), Bytes.toBytes(cells[3])); return put; }