private Integer seekToSubColumn( CFMetaData metadata, FileDataInput file, ByteBuffer sblockId, List<IndexHelper.IndexInfo> indexList) throws IOException { file.readInt(); // column count /* get the various column ranges we have to read */ AbstractType comparator = metadata.comparator; int index = IndexHelper.indexFor(sblockId, indexList, comparator, false); if (index == indexList.size()) return null; IndexHelper.IndexInfo indexInfo = indexList.get(index); if (comparator.compare(sblockId, indexInfo.firstName) < 0) return null; FileMark mark = file.mark(); FileUtils.skipBytesFully(file, indexInfo.offset); while (file.bytesPastMark(mark) < indexInfo.offset + indexInfo.width) { Integer dataLength = isSubBlockFound(metadata, file, sblockId); if (dataLength == null) return null; if (dataLength < 0) continue; return dataLength; } return null; }
/** * Checks if the current column is the one we are looking for * * @param metadata * @param file * @param sblockId * @return if > 0 the length to read from current file offset. if -1 not relevent. if null out of * bounds */ private Integer isSubBlockFound(CFMetaData metadata, FileDataInput file, ByteBuffer sblockId) throws IOException { ByteBuffer name = ByteBufferUtil.readWithShortLength(file); // Stop if we've gone too far (return null) if (metadata.comparator.compare(name, sblockId) > 0) return null; // verify column type; int b = file.readUnsignedByte(); // skip ts (since we know block ids are unique) long ts = file.readLong(); int sblockLength = file.readInt(); if (!name.equals(sblockId) || (b & ColumnSerializer.DELETION_MASK) != 0 || (b & ColumnSerializer.EXPIRATION_MASK) != 0) { FileUtils.skipBytesFully(file, sblockLength); return -1; } return sblockLength; }
/** * Retrieves a local subBlock * * @param blockId row key * @param sblockId SubBlock column name * @param offset inside the sblock * @return a local sublock * @throws TException */ private LocalBlock getLocalSubBlock( String subBlockCFName, ByteBuffer blockId, ByteBuffer sblockId, int offset) throws TException { DecoratedKey<Token<?>> decoratedKey = new DecoratedKey<Token<?>>(StorageService.getPartitioner().getToken(blockId), blockId); Table table = Table.open(cfsKeyspace); ColumnFamilyStore sblockStore = table.getColumnFamilyStore(subBlockCFName); Collection<SSTableReader> sstables = sblockStore.getSSTables(); for (SSTableReader sstable : sstables) { long position = sstable.getPosition(decoratedKey, Operator.EQ); if (position == -1) continue; String filename = sstable.descriptor.filenameFor(Component.DATA); RandomAccessFile raf = null; int mappedLength = -1; MappedByteBuffer mappedData = null; MappedFileDataInput file = null; try { raf = new RandomAccessFile(filename, "r"); assert position < raf.length(); mappedLength = (raf.length() - position) < Integer.MAX_VALUE ? (int) (raf.length() - position) : Integer.MAX_VALUE; mappedData = raf.getChannel().map(FileChannel.MapMode.READ_ONLY, position, mappedLength); file = new MappedFileDataInput(mappedData, filename, 0); if (file == null) continue; // Verify key was found in data file DecoratedKey keyInDisk = SSTableReader.decodeKey( sstable.partitioner, sstable.descriptor, ByteBufferUtil.readWithShortLength(file)); assert keyInDisk.equals(decoratedKey) : String.format("%s != %s in %s", keyInDisk, decoratedKey, file.getPath()); long rowSize = SSTableReader.readRowSize(file, sstable.descriptor); assert rowSize > 0; assert rowSize < mappedLength; Filter bf = IndexHelper.defreezeBloomFilter(file, sstable.descriptor.usesOldBloomFilter); // verify this column in in this version of the row. if (!bf.isPresent(sblockId)) continue; List<IndexHelper.IndexInfo> indexList = IndexHelper.deserializeIndex(file); // we can stop early if bloom filter says none of the // columns actually exist -- but, // we can't stop before initializing the cf above, in // case there's a relevant tombstone ColumnFamilySerializer serializer = ColumnFamily.serializer(); try { ColumnFamily cf = serializer.deserializeFromSSTableNoColumns( ColumnFamily.create(sstable.metadata), file); if (cf.isMarkedForDelete()) continue; } catch (Exception e) { e.printStackTrace(); throw new IOException( serializer + " failed to deserialize " + sstable.getColumnFamilyName() + " with " + sstable.metadata + " from " + file, e); } Integer sblockLength = null; if (indexList == null) sblockLength = seekToSubColumn(sstable.metadata, file, sblockId); else sblockLength = seekToSubColumn(sstable.metadata, file, sblockId, indexList); if (sblockLength == null || sblockLength < 0) continue; int bytesReadFromStart = mappedLength - (int) file.bytesRemaining(); if (logger.isDebugEnabled()) logger.debug("BlockLength = " + sblockLength + " Availible " + file.bytesRemaining()); assert offset <= sblockLength : String.format("%d > %d", offset, sblockLength); long dataOffset = position + bytesReadFromStart; if (file.bytesRemaining() == 0 || sblockLength == 0) continue; return new LocalBlock(file.getPath(), dataOffset + offset, sblockLength - offset); } catch (IOException e) { throw new TException(e); } finally { FileUtils.closeQuietly(raf); } } return null; }