@Override public boolean next() { if (!currentBuffer.hasRemaining()) { return false; } decodeNext(); current.setKey(current.keyBuffer, current.memstoreTS); previous.invalidate(); return true; }
@Override public void rewind() { currentBuffer.rewind(); if (tagCompressionContext != null) { tagCompressionContext.clear(); } decodeFirst(); current.setKey(current.keyBuffer, current.memstoreTS); previous.invalidate(); }
@Override public void setCurrentBuffer(ByteBuff buffer) { if (this.tagCompressionContext != null) { this.tagCompressionContext.clear(); } currentBuffer = buffer; current.currentBuffer = currentBuffer; if (tagCompressionContext != null) { current.tagCompressionContext = tagCompressionContext; } decodeFirst(); current.setKey(current.keyBuffer, current.memstoreTS); previous.invalidate(); }
/** * ends a block once all column chunks have been written * * @throws IOException */ public void endBlock() throws IOException { state = state.endBlock(); if (DEBUG) LOG.debug(out.getPos() + ": end block"); currentBlock.setRowCount(currentRecordCount); blocks.add(currentBlock); currentBlock = null; }
/** * writes a single page * * @param valueCount count of values * @param uncompressedPageSize the size of the data once uncompressed * @param bytes the compressed data for the page without header * @param rlEncoding encoding of the repetition level * @param dlEncoding encoding of the definition level * @param valuesEncoding encoding of values */ public void writeDataPage( int valueCount, int uncompressedPageSize, BytesInput bytes, Statistics statistics, parquet.column.Encoding rlEncoding, parquet.column.Encoding dlEncoding, parquet.column.Encoding valuesEncoding) throws IOException { state = state.write(); long beforeHeader = out.getPos(); if (DEBUG) LOG.debug(beforeHeader + ": write data page: " + valueCount + " values"); int compressedPageSize = (int) bytes.size(); metadataConverter.writeDataPageHeader( uncompressedPageSize, compressedPageSize, valueCount, statistics, rlEncoding, dlEncoding, valuesEncoding, out); long headerSize = out.getPos() - beforeHeader; this.uncompressedLength += uncompressedPageSize + headerSize; this.compressedLength += compressedPageSize + headerSize; if (DEBUG) LOG.debug(out.getPos() + ": write data page content " + compressedPageSize); bytes.writeAllTo(out); currentStatistics.mergeStatistics(statistics); currentEncodings.add(rlEncoding); currentEncodings.add(dlEncoding); currentEncodings.add(valuesEncoding); }
/** * start a block * * @param recordCount the record count in this block * @throws IOException */ public void startBlock(long recordCount) throws IOException { state = state.startBlock(); if (DEBUG) LOG.debug(out.getPos() + ": start block"); // out.write(MAGIC); // TODO: add a magic delimiter currentBlock = new BlockMetaData(); currentRecordCount = recordCount; }
/** * ends a file once all blocks have been written. closes the file. * * @param extraMetaData the extra meta data to write in the footer * @throws IOException */ public void end(Map<String, String> extraMetaData) throws IOException { state = state.end(); if (DEBUG) LOG.debug(out.getPos() + ": end"); ParquetMetadata footer = new ParquetMetadata(new FileMetaData(schema, extraMetaData, Version.FULL_VERSION), blocks); serializeFooter(footer, out); out.close(); }
public void disconnect() { // Block double disconnection if (State.equals(STATE.DISCONNECTED)) { return; } // Wipe state State = STATE.DISCONNECTED; SessionId = null; disconnect1(); }
/* (non-Javadoc) * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((mDuplicatorResponses == null) ? 0 : mDuplicatorResponses.hashCode()); result = prime * result + ((mSpoilerDestinationState == null) ? 0 : mSpoilerDestinationState.hashCode()); result = prime * result + ((mSummarySource == null) ? 0 : mSummarySource.hashCode()); return result; }
private void moveToPrevious() { if (!previous.isValid()) { throw new IllegalStateException( "Can move back only once and not in first key in the block."); } STATE tmp = previous; previous = current; current = tmp; // move after last key value currentBuffer.position(current.nextKvOffset); // Already decoded the tag bytes. We cache this tags into current state and also the total // compressed length of the tags bytes. For the next time decodeNext() we don't need to decode // the tags again. This might pollute the Data Dictionary what we use for the compression. // When current.uncompressTags is false, we will just reuse the current.tagsBuffer and skip // 'tagsCompressedLength' bytes of source stream. // See in decodeTags() current.tagsBuffer = previous.tagsBuffer; current.tagsCompressedLength = previous.tagsCompressedLength; current.uncompressTags = false; // The current key has to be reset with the previous Cell current.setKey(current.keyBuffer, current.memstoreTS); previous.invalidate(); }
protected void decodeTags() { current.tagsLength = ByteBuff.readCompressedInt(currentBuffer); if (tagCompressionContext != null) { if (current.uncompressTags) { // Tag compression is been used. uncompress it into tagsBuffer current.ensureSpaceForTags(); try { current.tagsCompressedLength = tagCompressionContext.uncompressTags( currentBuffer, current.tagsBuffer, 0, current.tagsLength); } catch (IOException e) { throw new RuntimeException("Exception while uncompressing tags", e); } } else { currentBuffer.skip(current.tagsCompressedLength); current.uncompressTags = true; // Reset this. } current.tagsOffset = -1; } else { // When tag compress is not used, let us not do copying of tags bytes into tagsBuffer. // Just mark the tags Offset so as to create the KV buffer later in getKeyValueBuffer() current.tagsOffset = currentBuffer.position(); currentBuffer.skip(current.tagsLength); } }
public void run() { STATE cur = getState(); current = cur.name(); switch (cur) { case CUTTING: if (cutLogs()) {} break; case LIGHTING: if (lightLogs()) needsInvo = false; break; case IDLE: break; case TALKING: talk(); break; case WALKING: walk(); break; case DONE: done = true; default: break; } }
private void eventDelegateLoaded(String sessionId) { if (!State.equals(STATE.CONNECTING)) { exception("BadServerMessageException", "Multiple loaded events have occured."); return; } // Note that we're connected State = STATE.CONNECTED; SessionId = sessionId; // Call connected event Handler.onStartupSuccess(); pushlishQueueLength(); flushBuffer(); }
/** * writes a number of pages at once * * @param bytes bytes to be written including page headers * @param uncompressedTotalPageSize total uncompressed size (without page headers) * @param compressedTotalPageSize total compressed size (without page headers) * @throws IOException */ void writeDataPages( BytesInput bytes, long uncompressedTotalPageSize, long compressedTotalPageSize, Statistics totalStats, List<parquet.column.Encoding> encodings) throws IOException { state = state.write(); if (DEBUG) LOG.debug(out.getPos() + ": write data pages"); long headersSize = bytes.size() - compressedTotalPageSize; this.uncompressedLength += uncompressedTotalPageSize + headersSize; this.compressedLength += compressedTotalPageSize + headersSize; if (DEBUG) LOG.debug(out.getPos() + ": write data pages content"); bytes.writeAllTo(out); currentEncodings.addAll(encodings); currentStatistics = totalStats; }
/** * start a column inside a block * * @param descriptor the column descriptor * @param valueCount the value count in this column * @param statistics the statistics in this column * @param compressionCodecName * @throws IOException */ public void startColumn( ColumnDescriptor descriptor, long valueCount, CompressionCodecName compressionCodecName) throws IOException { state = state.startColumn(); if (DEBUG) LOG.debug(out.getPos() + ": start column: " + descriptor + " count=" + valueCount); currentEncodings = new HashSet<parquet.column.Encoding>(); currentChunkPath = ColumnPath.get(descriptor.getPath()); currentChunkType = descriptor.getType(); currentChunkCodec = compressionCodecName; currentChunkValueCount = valueCount; currentChunkFirstDataPage = out.getPos(); compressedLength = 0; uncompressedLength = 0; // need to know what type of stats to initialize to // better way to do this? currentStatistics = Statistics.getStatsBasedOnType(currentChunkType); }
public void call(Call call) { // Calculate callId if (NextCallSeqId > 9223372036854775806L) { NextCallSeqId = 1; } int callSeqId = NextCallSeqId++; // Add receiver to pending OutstandingCalls.put(new Integer(callSeqId), call); pushlishQueueLength(); // Add the call to out-bound queue queueTx(callSeqId, call.getTx()); if (State.equals(STATE.CONNECTED)) { flushBuffer(); } }
/* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(final Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; final GameCallReturnSummary other = (GameCallReturnSummary) obj; if (mDuplicatorResponses == null) { if (other.mDuplicatorResponses != null) return false; } else if (!mDuplicatorResponses.equals(other.mDuplicatorResponses)) return false; if (mSpoilerDestinationState == null) { if (other.mSpoilerDestinationState != null) return false; } else if (!mSpoilerDestinationState.equals(other.mSpoilerDestinationState)) return false; if (mSummarySource == null) { if (other.mSummarySource != null) return false; } else if (!mSummarySource.equals(other.mSummarySource)) return false; return true; }
/** * end a column (once all rep, def and data have been written) * * @throws IOException */ public void endColumn() throws IOException { state = state.endColumn(); if (DEBUG) LOG.debug(out.getPos() + ": end column"); currentBlock.addColumn( ColumnChunkMetaData.get( currentChunkPath, currentChunkType, currentChunkCodec, currentEncodings, currentStatistics, currentChunkFirstDataPage, currentChunkDictionaryPageOffset, currentChunkValueCount, compressedLength, uncompressedLength)); if (DEBUG) LOG.info("ended Column chumk: " + currentColumn); currentColumn = null; this.currentBlock.setTotalByteSize(currentBlock.getTotalByteSize() + uncompressedLength); this.uncompressedLength = 0; this.compressedLength = 0; }
/** * writes a dictionary page page * * @param dictionaryPage the dictionary page */ public void writeDictionaryPage(DictionaryPage dictionaryPage) throws IOException { state = state.write(); if (DEBUG) LOG.debug( out.getPos() + ": write dictionary page: " + dictionaryPage.getDictionarySize() + " values"); currentChunkDictionaryPageOffset = out.getPos(); int uncompressedSize = dictionaryPage.getUncompressedSize(); int compressedPageSize = (int) dictionaryPage.getBytes().size(); // TODO: fix casts metadataConverter.writeDictionaryPageHeader( uncompressedSize, compressedPageSize, dictionaryPage.getDictionarySize(), dictionaryPage.getEncoding(), out); long headerSize = out.getPos() - currentChunkDictionaryPageOffset; this.uncompressedLength += uncompressedSize + headerSize; this.compressedLength += compressedPageSize + headerSize; if (DEBUG) LOG.debug(out.getPos() + ": write dictionary page content " + compressedPageSize); dictionaryPage.getBytes().writeAllTo(out); currentEncodings.add(dictionaryPage.getEncoding()); }
@Override public int seekToKeyInBlock(Cell seekCell, boolean seekBefore) { int rowCommonPrefix = 0; int familyCommonPrefix = 0; int qualCommonPrefix = 0; previous.invalidate(); do { int comp; keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength); if (current.lastCommonPrefix != 0) { // The KV format has row key length also in the byte array. The // common prefix // includes it. So we need to subtract to find out the common prefix // in the // row part alone rowCommonPrefix = Math.min(rowCommonPrefix, current.lastCommonPrefix - 2); } if (current.lastCommonPrefix <= 2) { rowCommonPrefix = 0; } rowCommonPrefix += findCommonPrefixInRowPart(seekCell, keyOnlyKV, rowCommonPrefix); comp = compareCommonRowPrefix(seekCell, keyOnlyKV, rowCommonPrefix); if (comp == 0) { comp = compareTypeBytes(seekCell, keyOnlyKV); if (comp == 0) { // Subtract the fixed row key length and the family key fixed length familyCommonPrefix = Math.max( 0, Math.min( familyCommonPrefix, current.lastCommonPrefix - (3 + keyOnlyKV.getRowLength()))); familyCommonPrefix += findCommonPrefixInFamilyPart(seekCell, keyOnlyKV, familyCommonPrefix); comp = compareCommonFamilyPrefix(seekCell, keyOnlyKV, familyCommonPrefix); if (comp == 0) { // subtract the rowkey fixed length and the family key fixed // length qualCommonPrefix = Math.max( 0, Math.min( qualCommonPrefix, current.lastCommonPrefix - (3 + keyOnlyKV.getRowLength() + keyOnlyKV.getFamilyLength()))); qualCommonPrefix += findCommonPrefixInQualifierPart(seekCell, keyOnlyKV, qualCommonPrefix); comp = compareCommonQualifierPrefix(seekCell, keyOnlyKV, qualCommonPrefix); if (comp == 0) { comp = CellComparator.compareTimestamps(seekCell, keyOnlyKV); if (comp == 0) { // Compare types. Let the delete types sort ahead of puts; // i.e. types // of higher numbers sort before those of lesser numbers. // Maximum // (255) // appears ahead of everything, and minimum (0) appears // after // everything. comp = (0xff & keyOnlyKV.getTypeByte()) - (0xff & seekCell.getTypeByte()); } } } } } if (comp == 0) { // exact match if (seekBefore) { if (!previous.isValid()) { // The caller (seekBefore) has to ensure that we are not at the // first key in the block. throw new IllegalStateException( "Cannot seekBefore if " + "positioned at the first key in the block: key=" + Bytes.toStringBinary(seekCell.getRowArray())); } moveToPrevious(); return 1; } return 0; } if (comp < 0) { // already too large, check previous if (previous.isValid()) { moveToPrevious(); } else { return HConstants.INDEX_KEY_MAGIC; // using optimized index key } return 1; } // move to next, if more data is available if (currentBuffer.hasRemaining()) { previous.copyFromNext(current); decodeNext(); current.setKey(current.keyBuffer, current.memstoreTS); } else { break; } } while (true); // we hit the end of the block, not an exact match return 1; }
/** * start the file * * @throws IOException */ public void start() throws IOException { state = state.start(); if (DEBUG) LOG.debug(out.getPos() + ": start"); out.write(MAGIC); }
@Override public Cell getCell() { return current.toCell(); }
protected String stateToString(Nfa<STATE> nfa, STATE state) { if (stateFormatter != null) return stateFormatter.apply(state); if (state == null) return "null"; return state.toString(); }
public void setState(String nextState) { setState(STATE.valueOf(nextState)); }
public int hashCode() { int result; result = (current != null ? current.hashCode() : 0); return result; }