@Override public void initBlock() { nodLenBuffer.offset = nodLenBuffer.length = 0; nodBuffer.offset = nodBuffer.length = 0; termFreqBuffer.offset = termFreqBuffer.length = 0; this.resetCurrentNode(); nodLenReadPending = true; nodReadPending = true; termFreqReadPending = true; nodLenCompressedBufferLength = 0; nodCompressedBufferLength = 0; termFreqCompressedBufferLength = 0; }
/** Create a random Json document with random values */ public String getRandomJson(int nbNodes) { // init sb.setLength(0); sb.append("{"); states.clear(); states.add(OBJECT_ATT); images.clear(); nodes.clear(); incr.clear(); datatypes.clear(); types.clear(); curNodePath.length = 1; curNodePath.offset = 0; Arrays.fill(curNodePath.ints, -1); shouldFail = false; nestedObjs = 0; // <= so that when nbNodes == 1, the json is still valid /* * the generated json might be uncomplete, if states is not empty, and * the maximum number of nodes has been reached. */ for (final int i = 0; i <= nbNodes && !states.empty(); nbNodes++) { sb.append(this.getWhitespace()).append(this.getNextNode()).append(this.getWhitespace()); } shouldFail = shouldFail ? true : !states.empty(); return sb.toString(); }
private InputOutput<T> setResult() { if (upto == 0) { return null; } else { current.length = upto - 1; result.output = output[upto]; return result; } }
private void decodeTermFreqs() throws IOException { // logger.debug("Decode Term Freq in Node: {}", this.hashCode()); // logger.debug("Decode Term Freq in Node at {}", in.getFilePointer()); in.readBytes(termFreqCompressedBuffer.bytes, 0, termFreqCompressedBufferLength); termFreqCompressedBuffer.offset = 0; termFreqCompressedBuffer.length = termFreqCompressedBufferLength; nodDecompressor.decompress(termFreqCompressedBuffer, termFreqBuffer); // set length limit based on block size, as certain decompressor with // large window size can set it larger than the blockSize, e.g., AFor termFreqBuffer.length = termFreqBlockSize; termFreqReadPending = false; }
private void decodeNodeLengths() throws IOException { // logger.debug("Decode Nodes Length: {}", this.hashCode()); // logger.debug("Decode Nodes Length at {}", in.getFilePointer()); in.readBytes(nodLenCompressedBuffer.bytes, 0, nodLenCompressedBufferLength); nodLenCompressedBuffer.offset = 0; nodLenCompressedBuffer.length = nodLenCompressedBufferLength; nodDecompressor.decompress(nodLenCompressedBuffer, nodLenBuffer); // set length limit based on block size, as certain decompressor with // large window size can set it larger than the blockSize, e.g., AFor nodLenBuffer.length = nodLenBlockSize; nodLenReadPending = false; }
/** * Decode delta of the node. * * <p>If a new doc has been read (currentNode.length == 0), then update currentNode offset and * length. Otherwise, perform delta decoding. * * <p>Perform delta decoding while current node id and previous node id are equals. */ private final void deltaDecoding() { final int[] nodBufferInts = nodBuffer.ints; // increment length by one final int nodLength = nodLenBuffer.ints[nodLenBuffer.offset++] + 1; final int nodOffset = nodBuffer.offset; final int nodEnd = nodOffset + nodLength; final int currentNodeOffset = currentNode.offset; final int currentNodeEnd = currentNodeOffset + currentNode.length; for (int i = nodOffset, j = currentNodeOffset; i < nodEnd && j < currentNodeEnd; i++, j++) { nodBufferInts[i] += nodBufferInts[j]; // if node ids are different, then stop decoding if (nodBufferInts[i] != nodBufferInts[j]) { break; } } // increment node buffer offset nodBuffer.offset += nodLength; // update last node offset and length currentNode.offset = nodOffset; currentNode.length = nodLength; }
public void resetCurrentNode() { currentNode.offset = currentNode.length = 0; }