@Override public void serialize(IndexInput input, JsonGenerator jg, SerializerProvider sp) throws IOException { jg.writeStartObject(); // Riak bug ... explicitly specifying "default" as the type breaks things if (!input.getNamespace().getBucketTypeAsString().equals(Namespace.DEFAULT_BUCKET_TYPE)) { jg.writeArrayFieldStart("bucket"); jg.writeString(input.getNamespace().getBucketTypeAsString()); jg.writeString(input.getNamespace().getBucketNameAsString()); jg.writeEndArray(); } else { jg.writeStringField("bucket", input.getNamespace().getBucketNameAsString()); } jg.writeStringField("index", input.getIndex()); IndexInput.IndexCriteria criteria = input.getCriteria(); if (criteria instanceof IndexInput.MatchCriteria) { IndexInput.MatchCriteria<?> match = (IndexInput.MatchCriteria) criteria; jg.writeObjectField("key", match.getValue()); } else if (criteria instanceof IndexInput.RangeCriteria) { IndexInput.RangeCriteria range = (IndexInput.RangeCriteria) criteria; jg.writeObjectField("start", range.getBegin()); jg.writeObjectField("end", range.getEnd()); } jg.writeEndObject(); }
static Map<String, String> readChecksums(Directory[] dirs, Map<String, String> defaultValue) throws IOException { long lastFound = -1; Directory lastDir = null; for (Directory dir : dirs) { for (String name : dir.listAll()) { if (!isChecksum(name)) { continue; } long current = Long.parseLong(name.substring(CHECKSUMS_PREFIX.length())); if (current > lastFound) { lastFound = current; lastDir = dir; } } } if (lastFound == -1) { return defaultValue; } IndexInput indexInput = lastDir.openInput(CHECKSUMS_PREFIX + lastFound, IOContext.READONCE); try { indexInput.readInt(); // version return indexInput.readStringStringMap(); } catch (Exception e) { // failed to load checksums, ignore and return an empty map return defaultValue; } finally { indexInput.close(); } }
public boolean next() throws IOException { pointer++; if (pointer < docFreq) { in.seek(postingMaps[pointer].offset); freq = in.readVInt(); position = 0; return true; } return false; }
// LUCENE-1196 public void testIllegalEOF() throws Exception { RAMDirectory dir = new RAMDirectory(); IndexOutput o = dir.createOutput("out"); byte[] b = new byte[1024]; o.writeBytes(b, 0, 1024); o.close(); IndexInput i = dir.openInput("out"); i.seek(1024); i.close(); dir.close(); }
private void corruptFile(Directory dir, String fileIn, String fileOut) throws IOException { IndexInput input = dir.openInput(fileIn, IOContext.READONCE); IndexOutput output = dir.createOutput(fileOut, IOContext.DEFAULT); long len = input.length(); byte[] b = new byte[1024]; long broken = randomInt((int) len); long pos = 0; while (pos < len) { int min = (int) Math.min(input.length() - pos, b.length); input.readBytes(b, 0, min); if (broken >= pos && broken < pos + min) { // Flip one byte int flipPos = (int) (broken - pos); b[flipPos] = (byte) (b[flipPos] ^ 42); } output.writeBytes(b, min); pos += min; } IOUtils.close(input, output); }
@Test public void testVerifyingIndexOutput() throws IOException { Directory dir = newDirectory(); IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT); int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); } CodecUtil.writeFooter(output); output.close(); IndexInput indexInput = dir.openInput("foo.bar", IOContext.DEFAULT); String checksum = Store.digestToString(CodecUtil.retrieveChecksum(indexInput)); indexInput.seek(0); BytesRef ref = new BytesRef(scaledRandomIntBetween(1, 1024)); long length = indexInput.length(); IndexOutput verifyingOutput = new Store.LuceneVerifyingIndexOutput( new StoreFileMetaData("foo1.bar", length, checksum), dir.createOutput("foo1.bar", IOContext.DEFAULT)); while (length > 0) { if (random().nextInt(10) == 0) { verifyingOutput.writeByte(indexInput.readByte()); length--; } else { int min = (int) Math.min(length, ref.bytes.length); indexInput.readBytes(ref.bytes, ref.offset, min); verifyingOutput.writeBytes(ref.bytes, ref.offset, min); length -= min; } } Store.verify(verifyingOutput); verifyingOutput.writeByte((byte) 0x0); try { Store.verify(verifyingOutput); fail("should be a corrupted index"); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // ok } IOUtils.close(indexInput, verifyingOutput, dir); }
@Test public void testVerifyingIndexInput() throws IOException { Directory dir = newDirectory(); IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT); int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); } CodecUtil.writeFooter(output); output.close(); // Check file IndexInput indexInput = dir.openInput("foo.bar", IOContext.DEFAULT); long checksum = CodecUtil.retrieveChecksum(indexInput); indexInput.seek(0); IndexInput verifyingIndexInput = new Store.VerifyingIndexInput(dir.openInput("foo.bar", IOContext.DEFAULT)); readIndexInputFullyWithRandomSeeks(verifyingIndexInput); Store.verify(verifyingIndexInput); assertThat(checksum, equalTo(((ChecksumIndexInput) verifyingIndexInput).getChecksum())); IOUtils.close(indexInput, verifyingIndexInput); // Corrupt file and check again corruptFile(dir, "foo.bar", "foo1.bar"); verifyingIndexInput = new Store.VerifyingIndexInput(dir.openInput("foo1.bar", IOContext.DEFAULT)); readIndexInputFullyWithRandomSeeks(verifyingIndexInput); try { Store.verify(verifyingIndexInput); fail("should be a corrupted index"); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // ok } IOUtils.close(verifyingIndexInput); IOUtils.close(dir); }
// LUCENE-2852 public void testSeekToEOFThenBack() throws Exception { RAMDirectory dir = new RAMDirectory(); IndexOutput o = dir.createOutput("out"); byte[] bytes = new byte[3 * RAMInputStream.BUFFER_SIZE]; o.writeBytes(bytes, 0, bytes.length); o.close(); IndexInput i = dir.openInput("out"); i.seek(2 * RAMInputStream.BUFFER_SIZE - 1); i.seek(3 * RAMInputStream.BUFFER_SIZE); i.seek(RAMInputStream.BUFFER_SIZE); i.readBytes(bytes, 0, 2 * RAMInputStream.BUFFER_SIZE); i.close(); dir.close(); }
private void readIndexInputFullyWithRandomSeeks(IndexInput indexInput) throws IOException { BytesRef ref = new BytesRef(scaledRandomIntBetween(1, 1024)); long pos = 0; while (pos < indexInput.length()) { assertEquals(pos, indexInput.getFilePointer()); int op = random().nextInt(5); if (op == 0) { int shift = 100 - randomIntBetween(0, 200); pos = Math.min(indexInput.length() - 1, Math.max(0, pos + shift)); indexInput.seek(pos); } else if (op == 1) { indexInput.readByte(); pos++; } else { int min = (int) Math.min(indexInput.length() - pos, ref.bytes.length); indexInput.readBytes(ref.bytes, ref.offset, min); pos += min; } } }
@Override public void close() throws IOException { main.close(); }
@Override public byte readByte() throws IOException { final byte b = main.readByte(); digest.update(b); return b; }
@Override public void readBytes(byte[] b, int offset, int len) throws IOException { main.readBytes(b, offset, len); digest.update(b, offset, len); }
public int nextPosition() throws IOException { int positionIncrement = in.readVInt(); position += positionIncrement; return position; }
private void verifyData(ByteBufferDirectory dir) throws IOException { byte[] test = new byte[] {1, 2, 3, 4, 5, 6, 7, 8}; assertThat(dir.fileExists("value1"), equalTo(true)); assertThat(dir.fileLength("value1"), equalTo(38l)); IndexInput indexInput = dir.openInput("value1", IOContext.DEFAULT); indexInput.readBytes(test, 0, 5); assertThat(test[0], equalTo((byte) 8)); assertThat(indexInput.readInt(), equalTo(-1)); assertThat(indexInput.readLong(), equalTo((long) 10)); assertThat(indexInput.readInt(), equalTo(0)); assertThat(indexInput.readInt(), equalTo(0)); indexInput.readBytes(test, 0, 8); assertThat(test[0], equalTo((byte) 1)); assertThat(test[7], equalTo((byte) 8)); indexInput.readBytes(test, 0, 5); assertThat(test[0], equalTo((byte) 1)); assertThat(test[4], equalTo((byte) 5)); indexInput.seek(28); assertThat(indexInput.readByte(), equalTo((byte) 4)); indexInput.seek(30); assertThat(indexInput.readByte(), equalTo((byte) 6)); indexInput.seek(0); indexInput.readBytes(test, 0, 5); assertThat(test[0], equalTo((byte) 8)); indexInput.close(); indexInput = dir.openInput("value1", IOContext.DEFAULT); // iterate over all the data for (int i = 0; i < 38; i++) { indexInput.readByte(); } indexInput.close(); }
// IF THIS TEST FAILS ON UPGRADE GO LOOK AT THE // OldSIMockingCodec!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! @Test public void testWriteLegacyChecksums() throws IOException { final ShardId shardId = new ShardId(new Index("index"), 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Store store = new Store( shardId, ImmutableSettings.EMPTY, directoryService, randomDistributor(directoryService), new DummyShardLock(shardId)); // set default codec - all segments need checksums final boolean usesOldCodec = randomBoolean(); IndexWriter writer = new IndexWriter( store.directory(), newIndexWriterConfig(random(), new MockAnalyzer(random())) .setCodec(usesOldCodec ? new OldSIMockingCodec() : actualDefaultCodec())); int docs = 1 + random().nextInt(100); for (int i = 0; i < docs; i++) { Document doc = new Document(); doc.add( new TextField("id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add( new TextField( "body", TestUtil.randomRealisticUnicodeString(random()), random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add( new SortedDocValuesField( "dv", new BytesRef(TestUtil.randomRealisticUnicodeString(random())))); writer.addDocument(doc); } if (random().nextBoolean()) { for (int i = 0; i < docs; i++) { if (random().nextBoolean()) { Document doc = new Document(); doc.add( new TextField( "id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add( new TextField( "body", TestUtil.randomRealisticUnicodeString(random()), random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.updateDocument(new Term("id", "" + i), doc); } } } if (random().nextBoolean()) { DirectoryReader.open(writer, random().nextBoolean()).close(); // flush } Store.MetadataSnapshot metadata; // check before we committed try { store.getMetadata(); fail("no index present - expected exception"); } catch (IndexNotFoundException ex) { // expected } assertThat(store.getMetadataOrEmpty(), is(Store.MetadataSnapshot.EMPTY)); // nothing committed writer.close(); Store.LegacyChecksums checksums = new Store.LegacyChecksums(); Map<String, StoreFileMetaData> legacyMeta = new HashMap<>(); for (String file : store.directory().listAll()) { if (file.equals("write.lock") || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { continue; } BytesRef hash = new BytesRef(); if (file.startsWith("segments")) { hash = Store.MetadataSnapshot.hashFile(store.directory(), file); } StoreFileMetaData storeFileMetaData = new StoreFileMetaData( file, store.directory().fileLength(file), file + "checksum", null, hash); legacyMeta.put(file, storeFileMetaData); checksums.add(storeFileMetaData); } checksums.write(store); metadata = store.getMetadata(); Map<String, StoreFileMetaData> stringStoreFileMetaDataMap = metadata.asMap(); assertThat(legacyMeta.size(), equalTo(stringStoreFileMetaDataMap.size())); if (usesOldCodec) { for (StoreFileMetaData meta : legacyMeta.values()) { assertTrue(meta.toString(), stringStoreFileMetaDataMap.containsKey(meta.name())); assertEquals(meta.name() + "checksum", meta.checksum()); assertTrue( meta + " vs. " + stringStoreFileMetaDataMap.get(meta.name()), stringStoreFileMetaDataMap.get(meta.name()).isSame(meta)); } } else { // even if we have a legacy checksum - if we use a new codec we should reuse for (StoreFileMetaData meta : legacyMeta.values()) { assertTrue(meta.toString(), stringStoreFileMetaDataMap.containsKey(meta.name())); assertFalse( meta + " vs. " + stringStoreFileMetaDataMap.get(meta.name()), stringStoreFileMetaDataMap.get(meta.name()).isSame(meta)); StoreFileMetaData storeFileMetaData = metadata.get(meta.name()); try (IndexInput input = store.openVerifyingInput(meta.name(), IOContext.DEFAULT, storeFileMetaData)) { assertTrue(storeFileMetaData.toString(), input instanceof Store.VerifyingIndexInput); input.seek(meta.length()); Store.verify(input); } } } assertDeleteContent(store, directoryService); IOUtils.close(store); }
public void testCheckIntegrity() throws IOException { Directory dir = newDirectory(); long luceneFileLength = 0; try (IndexOutput output = dir.createOutput("lucene_checksum.bin", IOContext.DEFAULT)) { int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); luceneFileLength += bytesRef.length; } CodecUtil.writeFooter(output); luceneFileLength += CodecUtil.footerLength(); } final Adler32 adler32 = new Adler32(); long legacyFileLength = 0; try (IndexOutput output = dir.createOutput("legacy.bin", IOContext.DEFAULT)) { int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); adler32.update(bytesRef.bytes, bytesRef.offset, bytesRef.length); legacyFileLength += bytesRef.length; } } final long luceneChecksum; final long adler32LegacyChecksum = adler32.getValue(); try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) { assertEquals(luceneFileLength, indexInput.length()); luceneChecksum = CodecUtil.retrieveChecksum(indexInput); } { // positive check StoreFileMetaData lucene = new StoreFileMetaData( "lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); StoreFileMetaData legacy = new StoreFileMetaData( "legacy.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertTrue(Store.checkIntegrityNoException(lucene, dir)); assertTrue(Store.checkIntegrityNoException(legacy, dir)); } { // negative check - wrong checksum StoreFileMetaData lucene = new StoreFileMetaData( "lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum + 1), Version.LUCENE_4_8_0); StoreFileMetaData legacy = new StoreFileMetaData( "legacy.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum + 1)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrityNoException(lucene, dir)); assertFalse(Store.checkIntegrityNoException(legacy, dir)); } { // negative check - wrong length StoreFileMetaData lucene = new StoreFileMetaData( "lucene_checksum.bin", luceneFileLength + 1, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); StoreFileMetaData legacy = new StoreFileMetaData( "legacy.bin", legacyFileLength + 1, Store.digestToString(adler32LegacyChecksum)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrityNoException(lucene, dir)); assertFalse(Store.checkIntegrityNoException(legacy, dir)); } { // negative check - wrong file StoreFileMetaData lucene = new StoreFileMetaData( "legacy.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); StoreFileMetaData legacy = new StoreFileMetaData( "lucene_checksum.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrityNoException(lucene, dir)); assertFalse(Store.checkIntegrityNoException(legacy, dir)); } dir.close(); }
@Override public long getFilePointer() { return main.getFilePointer(); }
@Override public long length() { return main.length(); }
public long length() { return main.length(); }