public void readFieldsFromPb( org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey walKey, WALCellCodec.ByteStringUncompressor uncompressor) throws IOException { if (this.compressionContext != null) { this.encodedRegionName = uncompressor.uncompress(walKey.getEncodedRegionName(), compressionContext.regionDict); byte[] tablenameBytes = uncompressor.uncompress(walKey.getTableName(), compressionContext.tableDict); this.tablename = TableName.valueOf(tablenameBytes); } else { this.encodedRegionName = walKey.getEncodedRegionName().toByteArray(); this.tablename = TableName.valueOf(walKey.getTableName().toByteArray()); } clusterIds.clear(); if (walKey.hasClusterId()) { // When we are reading the older log (0.95.1 release) // This is definitely the originating cluster clusterIds.add( new UUID( walKey.getClusterId().getMostSigBits(), walKey.getClusterId().getLeastSigBits())); } for (HBaseProtos.UUID clusterId : walKey.getClusterIdsList()) { clusterIds.add(new UUID(clusterId.getMostSigBits(), clusterId.getLeastSigBits())); } if (walKey.hasNonceGroup()) { this.nonceGroup = walKey.getNonceGroup(); } if (walKey.hasNonce()) { this.nonce = walKey.getNonce(); } this.scopes = null; if (walKey.getScopesCount() > 0) { this.scopes = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR); for (FamilyScope scope : walKey.getScopesList()) { byte[] family = (compressionContext == null) ? scope.getFamily().toByteArray() : uncompressor.uncompress(scope.getFamily(), compressionContext.familyDict); this.scopes.put(family, scope.getScopeType().getNumber()); } } this.logSeqNum = walKey.getLogSequenceNumber(); this.writeTime = walKey.getWriteTime(); if (walKey.hasOrigSequenceNumber()) { this.origLogSeqNum = walKey.getOrigSequenceNumber(); } }