Exemple #1
0
 @Test
 public void testVerifyingIndexOutput() throws IOException {
   Directory dir = newDirectory();
   IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT);
   int iters = scaledRandomIntBetween(10, 100);
   for (int i = 0; i < iters; i++) {
     BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024));
     output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length);
   }
   CodecUtil.writeFooter(output);
   output.close();
   IndexInput indexInput = dir.openInput("foo.bar", IOContext.DEFAULT);
   String checksum = Store.digestToString(CodecUtil.retrieveChecksum(indexInput));
   indexInput.seek(0);
   BytesRef ref = new BytesRef(scaledRandomIntBetween(1, 1024));
   long length = indexInput.length();
   IndexOutput verifyingOutput =
       new Store.LuceneVerifyingIndexOutput(
           new StoreFileMetaData("foo1.bar", length, checksum),
           dir.createOutput("foo1.bar", IOContext.DEFAULT));
   while (length > 0) {
     if (random().nextInt(10) == 0) {
       verifyingOutput.writeByte(indexInput.readByte());
       length--;
     } else {
       int min = (int) Math.min(length, ref.bytes.length);
       indexInput.readBytes(ref.bytes, ref.offset, min);
       verifyingOutput.writeBytes(ref.bytes, ref.offset, min);
       length -= min;
     }
   }
   Store.verify(verifyingOutput);
   verifyingOutput.writeByte((byte) 0x0);
   try {
     Store.verify(verifyingOutput);
     fail("should be a corrupted index");
   } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) {
     // ok
   }
   IOUtils.close(indexInput, verifyingOutput, dir);
 }
Exemple #2
0
  @Test
  public void testVerifyingIndexInput() throws IOException {
    Directory dir = newDirectory();
    IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT);
    int iters = scaledRandomIntBetween(10, 100);
    for (int i = 0; i < iters; i++) {
      BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024));
      output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length);
    }
    CodecUtil.writeFooter(output);
    output.close();

    // Check file
    IndexInput indexInput = dir.openInput("foo.bar", IOContext.DEFAULT);
    long checksum = CodecUtil.retrieveChecksum(indexInput);
    indexInput.seek(0);
    IndexInput verifyingIndexInput =
        new Store.VerifyingIndexInput(dir.openInput("foo.bar", IOContext.DEFAULT));
    readIndexInputFullyWithRandomSeeks(verifyingIndexInput);
    Store.verify(verifyingIndexInput);
    assertThat(checksum, equalTo(((ChecksumIndexInput) verifyingIndexInput).getChecksum()));
    IOUtils.close(indexInput, verifyingIndexInput);

    // Corrupt file and check again
    corruptFile(dir, "foo.bar", "foo1.bar");
    verifyingIndexInput =
        new Store.VerifyingIndexInput(dir.openInput("foo1.bar", IOContext.DEFAULT));
    readIndexInputFullyWithRandomSeeks(verifyingIndexInput);
    try {
      Store.verify(verifyingIndexInput);
      fail("should be a corrupted index");
    } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) {
      // ok
    }
    IOUtils.close(verifyingIndexInput);
    IOUtils.close(dir);
  }
  /** Sole constructor. */
  public BlockTreeTermsReader(PostingsReaderBase postingsReader, SegmentReadState state)
      throws IOException {
    boolean success = false;
    IndexInput indexIn = null;

    this.postingsReader = postingsReader;
    this.segment = state.segmentInfo.name;

    String termsName =
        IndexFileNames.segmentFileName(segment, state.segmentSuffix, TERMS_EXTENSION);
    try {
      termsIn = state.directory.openInput(termsName, state.context);
      version =
          CodecUtil.checkIndexHeader(
              termsIn,
              TERMS_CODEC_NAME,
              VERSION_START,
              VERSION_CURRENT,
              state.segmentInfo.getId(),
              state.segmentSuffix);

      String indexName =
          IndexFileNames.segmentFileName(segment, state.segmentSuffix, TERMS_INDEX_EXTENSION);
      indexIn = state.directory.openInput(indexName, state.context);
      CodecUtil.checkIndexHeader(
          indexIn,
          TERMS_INDEX_CODEC_NAME,
          version,
          version,
          state.segmentInfo.getId(),
          state.segmentSuffix);
      CodecUtil.checksumEntireFile(indexIn);

      // Have PostingsReader init itself
      postingsReader.init(termsIn, state);

      // NOTE: data file is too costly to verify checksum against all the bytes on open,
      // but for now we at least verify proper structure of the checksum footer: which looks
      // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
      // such as file truncation.
      CodecUtil.retrieveChecksum(termsIn);

      // Read per-field details
      seekDir(termsIn, dirOffset);
      seekDir(indexIn, indexDirOffset);

      final int numFields = termsIn.readVInt();
      if (numFields < 0) {
        throw new CorruptIndexException("invalid numFields: " + numFields, termsIn);
      }

      for (int i = 0; i < numFields; ++i) {
        final int field = termsIn.readVInt();
        final long numTerms = termsIn.readVLong();
        if (numTerms <= 0) {
          throw new CorruptIndexException("Illegal numTerms for field number: " + field, termsIn);
        }
        final int numBytes = termsIn.readVInt();
        if (numBytes < 0) {
          throw new CorruptIndexException(
              "invalid rootCode for field number: " + field + ", numBytes=" + numBytes, termsIn);
        }
        final BytesRef rootCode = new BytesRef(new byte[numBytes]);
        termsIn.readBytes(rootCode.bytes, 0, numBytes);
        rootCode.length = numBytes;
        final FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field);
        if (fieldInfo == null) {
          throw new CorruptIndexException("invalid field number: " + field, termsIn);
        }
        final long sumTotalTermFreq =
            fieldInfo.getIndexOptions() == IndexOptions.DOCS ? -1 : termsIn.readVLong();
        final long sumDocFreq = termsIn.readVLong();
        final int docCount = termsIn.readVInt();
        final int longsSize = termsIn.readVInt();
        if (longsSize < 0) {
          throw new CorruptIndexException(
              "invalid longsSize for field: " + fieldInfo.name + ", longsSize=" + longsSize,
              termsIn);
        }
        BytesRef minTerm = readBytesRef(termsIn);
        BytesRef maxTerm = readBytesRef(termsIn);
        if (docCount < 0
            || docCount > state.segmentInfo.getDocCount()) { // #docs with field must be <= #docs
          throw new CorruptIndexException(
              "invalid docCount: " + docCount + " maxDoc: " + state.segmentInfo.getDocCount(),
              termsIn);
        }
        if (sumDocFreq < docCount) { // #postings must be >= #docs with field
          throw new CorruptIndexException(
              "invalid sumDocFreq: " + sumDocFreq + " docCount: " + docCount, termsIn);
        }
        if (sumTotalTermFreq != -1
            && sumTotalTermFreq < sumDocFreq) { // #positions must be >= #postings
          throw new CorruptIndexException(
              "invalid sumTotalTermFreq: " + sumTotalTermFreq + " sumDocFreq: " + sumDocFreq,
              termsIn);
        }
        final long indexStartFP = indexIn.readVLong();
        FieldReader previous =
            fields.put(
                fieldInfo.name,
                new FieldReader(
                    this,
                    fieldInfo,
                    numTerms,
                    rootCode,
                    sumTotalTermFreq,
                    sumDocFreq,
                    docCount,
                    indexStartFP,
                    longsSize,
                    indexIn,
                    minTerm,
                    maxTerm));
        if (previous != null) {
          throw new CorruptIndexException("duplicate field: " + fieldInfo.name, termsIn);
        }
      }

      indexIn.close();
      success = true;
    } finally {
      if (!success) {
        // this.close() will close in:
        IOUtils.closeWhileHandlingException(indexIn, this);
      }
    }
  }
Exemple #4
0
  public void testCheckIntegrity() throws IOException {
    Directory dir = newDirectory();
    long luceneFileLength = 0;

    try (IndexOutput output = dir.createOutput("lucene_checksum.bin", IOContext.DEFAULT)) {
      int iters = scaledRandomIntBetween(10, 100);
      for (int i = 0; i < iters; i++) {
        BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024));
        output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length);
        luceneFileLength += bytesRef.length;
      }
      CodecUtil.writeFooter(output);
      luceneFileLength += CodecUtil.footerLength();
    }

    final Adler32 adler32 = new Adler32();
    long legacyFileLength = 0;
    try (IndexOutput output = dir.createOutput("legacy.bin", IOContext.DEFAULT)) {
      int iters = scaledRandomIntBetween(10, 100);
      for (int i = 0; i < iters; i++) {
        BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024));
        output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length);
        adler32.update(bytesRef.bytes, bytesRef.offset, bytesRef.length);
        legacyFileLength += bytesRef.length;
      }
    }
    final long luceneChecksum;
    final long adler32LegacyChecksum = adler32.getValue();
    try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) {
      assertEquals(luceneFileLength, indexInput.length());
      luceneChecksum = CodecUtil.retrieveChecksum(indexInput);
    }

    { // positive check
      StoreFileMetaData lucene =
          new StoreFileMetaData(
              "lucene_checksum.bin",
              luceneFileLength,
              Store.digestToString(luceneChecksum),
              Version.LUCENE_4_8_0);
      StoreFileMetaData legacy =
          new StoreFileMetaData(
              "legacy.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum));
      assertTrue(legacy.hasLegacyChecksum());
      assertFalse(lucene.hasLegacyChecksum());
      assertTrue(Store.checkIntegrityNoException(lucene, dir));
      assertTrue(Store.checkIntegrityNoException(legacy, dir));
    }

    { // negative check - wrong checksum
      StoreFileMetaData lucene =
          new StoreFileMetaData(
              "lucene_checksum.bin",
              luceneFileLength,
              Store.digestToString(luceneChecksum + 1),
              Version.LUCENE_4_8_0);
      StoreFileMetaData legacy =
          new StoreFileMetaData(
              "legacy.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum + 1));
      assertTrue(legacy.hasLegacyChecksum());
      assertFalse(lucene.hasLegacyChecksum());
      assertFalse(Store.checkIntegrityNoException(lucene, dir));
      assertFalse(Store.checkIntegrityNoException(legacy, dir));
    }

    { // negative check - wrong length
      StoreFileMetaData lucene =
          new StoreFileMetaData(
              "lucene_checksum.bin",
              luceneFileLength + 1,
              Store.digestToString(luceneChecksum),
              Version.LUCENE_4_8_0);
      StoreFileMetaData legacy =
          new StoreFileMetaData(
              "legacy.bin", legacyFileLength + 1, Store.digestToString(adler32LegacyChecksum));
      assertTrue(legacy.hasLegacyChecksum());
      assertFalse(lucene.hasLegacyChecksum());
      assertFalse(Store.checkIntegrityNoException(lucene, dir));
      assertFalse(Store.checkIntegrityNoException(legacy, dir));
    }

    { // negative check - wrong file
      StoreFileMetaData lucene =
          new StoreFileMetaData(
              "legacy.bin",
              luceneFileLength,
              Store.digestToString(luceneChecksum),
              Version.LUCENE_4_8_0);
      StoreFileMetaData legacy =
          new StoreFileMetaData(
              "lucene_checksum.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum));
      assertTrue(legacy.hasLegacyChecksum());
      assertFalse(lucene.hasLegacyChecksum());
      assertFalse(Store.checkIntegrityNoException(lucene, dir));
      assertFalse(Store.checkIntegrityNoException(legacy, dir));
    }
    dir.close();
  }
Exemple #5
0
  @Test
  public void testMixedChecksums() throws IOException {
    final ShardId shardId = new ShardId(new Index("index"), 1);
    DirectoryService directoryService = new LuceneManagedDirectoryService(random());
    Store store =
        new Store(
            shardId,
            ImmutableSettings.EMPTY,
            directoryService,
            randomDistributor(directoryService),
            new DummyShardLock(shardId));
    // this time random codec....
    IndexWriter writer =
        new IndexWriter(
            store.directory(),
            newIndexWriterConfig(random(), new MockAnalyzer(random()))
                .setCodec(actualDefaultCodec()));
    int docs = 1 + random().nextInt(100);

    for (int i = 0; i < docs; i++) {
      Document doc = new Document();
      doc.add(
          new TextField("id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
      doc.add(
          new TextField(
              "body",
              TestUtil.randomRealisticUnicodeString(random()),
              random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
      doc.add(
          new SortedDocValuesField(
              "dv", new BytesRef(TestUtil.randomRealisticUnicodeString(random()))));
      writer.addDocument(doc);
    }
    if (random().nextBoolean()) {
      for (int i = 0; i < docs; i++) {
        if (random().nextBoolean()) {
          Document doc = new Document();
          doc.add(
              new TextField(
                  "id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
          doc.add(
              new TextField(
                  "body",
                  TestUtil.randomRealisticUnicodeString(random()),
                  random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
          writer.updateDocument(new Term("id", "" + i), doc);
        }
      }
    }
    if (random().nextBoolean()) {
      DirectoryReader.open(writer, random().nextBoolean()).close(); // flush
    }
    Store.MetadataSnapshot metadata;
    // check before we committed
    try {
      store.getMetadata();
      fail("no index present - expected exception");
    } catch (IndexNotFoundException ex) {
      // expected
    }
    assertThat(store.getMetadataOrEmpty(), is(Store.MetadataSnapshot.EMPTY)); // nothing committed
    writer.commit();
    writer.close();
    Store.LegacyChecksums checksums = new Store.LegacyChecksums();
    metadata = store.getMetadata();
    assertThat(metadata.asMap().isEmpty(), is(false));
    for (StoreFileMetaData meta : metadata) {
      try (IndexInput input = store.directory().openInput(meta.name(), IOContext.DEFAULT)) {
        if (meta.checksum() == null) {
          String checksum = null;
          try {
            CodecUtil.retrieveChecksum(input);
            fail("expected a corrupt index - posting format has not checksums");
          } catch (CorruptIndexException
              | IndexFormatTooOldException
              | IndexFormatTooNewException ex) {
            try (ChecksumIndexInput checksumIndexInput =
                store.directory().openChecksumInput(meta.name(), IOContext.DEFAULT)) {
              checksumIndexInput.seek(meta.length());
              checksum = Store.digestToString(checksumIndexInput.getChecksum());
            }
            // fine - it's a postings format without checksums
            checksums.add(new StoreFileMetaData(meta.name(), meta.length(), checksum, null));
          }
        } else {
          String checksum = Store.digestToString(CodecUtil.retrieveChecksum(input));
          assertThat(
              "File: " + meta.name() + " has a different checksum",
              meta.checksum(),
              equalTo(checksum));
          assertThat(meta.hasLegacyChecksum(), equalTo(false));
          assertThat(meta.writtenBy(), equalTo(Version.LATEST));
        }
      }
    }
    assertConsistent(store, metadata);
    checksums.write(store);
    metadata = store.getMetadata();
    assertThat(metadata.asMap().isEmpty(), is(false));
    for (StoreFileMetaData meta : metadata) {
      assertThat(
          "file: " + meta.name() + " has a null checksum", meta.checksum(), not(nullValue()));
      if (meta.hasLegacyChecksum()) {
        try (ChecksumIndexInput checksumIndexInput =
            store.directory().openChecksumInput(meta.name(), IOContext.DEFAULT)) {
          checksumIndexInput.seek(meta.length());
          assertThat(
              meta.checksum(), equalTo(Store.digestToString(checksumIndexInput.getChecksum())));
        }
      } else {
        try (IndexInput input = store.directory().openInput(meta.name(), IOContext.DEFAULT)) {
          String checksum = Store.digestToString(CodecUtil.retrieveChecksum(input));
          assertThat(
              "File: " + meta.name() + " has a different checksum",
              meta.checksum(),
              equalTo(checksum));
          assertThat(meta.hasLegacyChecksum(), equalTo(false));
          assertThat(meta.writtenBy(), equalTo(Version.LATEST));
        }
      }
    }
    assertConsistent(store, metadata);
    TestUtil.checkIndex(store.directory());
    assertDeleteContent(store, directoryService);
    IOUtils.close(store);
  }
Exemple #6
0
  @Test
  public void testNewChecksums() throws IOException {
    final ShardId shardId = new ShardId(new Index("index"), 1);
    DirectoryService directoryService = new LuceneManagedDirectoryService(random());
    Store store =
        new Store(
            shardId,
            ImmutableSettings.EMPTY,
            directoryService,
            randomDistributor(directoryService),
            new DummyShardLock(shardId));
    // set default codec - all segments need checksums
    IndexWriter writer =
        new IndexWriter(
            store.directory(),
            newIndexWriterConfig(random(), new MockAnalyzer(random()))
                .setCodec(actualDefaultCodec()));
    int docs = 1 + random().nextInt(100);

    for (int i = 0; i < docs; i++) {
      Document doc = new Document();
      doc.add(
          new TextField("id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
      doc.add(
          new TextField(
              "body",
              TestUtil.randomRealisticUnicodeString(random()),
              random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
      doc.add(
          new SortedDocValuesField(
              "dv", new BytesRef(TestUtil.randomRealisticUnicodeString(random()))));
      writer.addDocument(doc);
    }
    if (random().nextBoolean()) {
      for (int i = 0; i < docs; i++) {
        if (random().nextBoolean()) {
          Document doc = new Document();
          doc.add(
              new TextField(
                  "id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
          doc.add(
              new TextField(
                  "body",
                  TestUtil.randomRealisticUnicodeString(random()),
                  random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
          writer.updateDocument(new Term("id", "" + i), doc);
        }
      }
    }
    if (random().nextBoolean()) {
      DirectoryReader.open(writer, random().nextBoolean()).close(); // flush
    }
    Store.MetadataSnapshot metadata;
    // check before we committed
    try {
      store.getMetadata();
      fail("no index present - expected exception");
    } catch (IndexNotFoundException ex) {
      // expected
    }
    assertThat(store.getMetadataOrEmpty(), is(Store.MetadataSnapshot.EMPTY)); // nothing committed
    writer.commit();
    writer.close();
    metadata = store.getMetadata();
    assertThat(metadata.asMap().isEmpty(), is(false));
    for (StoreFileMetaData meta : metadata) {
      try (IndexInput input = store.directory().openInput(meta.name(), IOContext.DEFAULT)) {
        String checksum = Store.digestToString(CodecUtil.retrieveChecksum(input));
        assertThat(
            "File: " + meta.name() + " has a different checksum",
            meta.checksum(),
            equalTo(checksum));
        assertThat(meta.hasLegacyChecksum(), equalTo(false));
        assertThat(meta.writtenBy(), equalTo(Version.LATEST));
        if (meta.name().endsWith(".si") || meta.name().startsWith("segments_")) {
          assertThat(meta.hash().length, greaterThan(0));
        }
      }
    }
    assertConsistent(store, metadata);

    TestUtil.checkIndex(store.directory());
    assertDeleteContent(store, directoryService);
    IOUtils.close(store);
  }
  private boolean doRestore() throws Exception {

    Path backupPath = Paths.get(backupLocation).resolve(backupName);
    SimpleDateFormat dateFormat = new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT);
    String restoreIndexName = "restore." + dateFormat.format(new Date());
    String restoreIndexPath = core.getDataDir() + restoreIndexName;

    Directory restoreIndexDir = null;
    Directory indexDir = null;
    try (Directory backupDir = FSDirectory.open(backupPath)) {

      final Version version =
          IndexFetcher.checkOldestVersion(SegmentInfos.readLatestCommit(backupDir));

      restoreIndexDir =
          core.getDirectoryFactory()
              .get(
                  restoreIndexPath,
                  DirectoryFactory.DirContext.DEFAULT,
                  core.getSolrConfig().indexConfig.lockType);

      // Prefer local copy.
      indexDir =
          core.getDirectoryFactory()
              .get(
                  core.getIndexDir(),
                  DirectoryFactory.DirContext.DEFAULT,
                  core.getSolrConfig().indexConfig.lockType);

      // Move all files from backupDir to restoreIndexDir
      for (String filename : backupDir.listAll()) {
        checkInterrupted();
        log.info("Copying file {} to restore directory ", filename);
        try (IndexInput indexInput = backupDir.openInput(filename, IOContext.READONCE)) {
          Long checksum = null;
          try {
            checksum = CodecUtil.retrieveChecksum(indexInput);
          } catch (Exception e) {
            log.warn("Could not read checksum from index file: " + filename, e);
          }
          long length = indexInput.length();
          IndexFetcher.CompareResult compareResult =
              IndexFetcher.compareFile(indexDir, version, filename, length, checksum);
          if (!compareResult.equal
              || (!compareResult.checkSummed
                  && (filename.endsWith(".si")
                      || filename.endsWith(".liv")
                      || filename.startsWith("segments_")))) {
            restoreIndexDir.copyFrom(backupDir, filename, filename, IOContext.READONCE);
          } else {
            // prefer local copy
            restoreIndexDir.copyFrom(indexDir, filename, filename, IOContext.READONCE);
          }
        } catch (Exception e) {
          throw new SolrException(
              SolrException.ErrorCode.UNKNOWN, "Exception while restoring the backup index", e);
        }
      }
      log.debug("Switching directories");
      IndexFetcher.modifyIndexProps(core, restoreIndexName);

      boolean success;
      try {
        core.getUpdateHandler().newIndexWriter(false);
        openNewSearcher();
        success = true;
        log.info("Successfully restored to the backup index");
      } catch (Exception e) {
        // Rollback to the old index directory. Delete the restore index directory and mark the
        // restore as failed.
        log.warn("Could not switch to restored index. Rolling back to the current index");
        Directory dir = null;
        try {
          dir =
              core.getDirectoryFactory()
                  .get(
                      core.getDataDir(),
                      DirectoryFactory.DirContext.META_DATA,
                      core.getSolrConfig().indexConfig.lockType);
          dir.deleteFile(IndexFetcher.INDEX_PROPERTIES);
        } finally {
          if (dir != null) {
            core.getDirectoryFactory().release(dir);
          }
        }

        core.getDirectoryFactory().doneWithDirectory(restoreIndexDir);
        core.getDirectoryFactory().remove(restoreIndexDir);
        core.getUpdateHandler().newIndexWriter(false);
        openNewSearcher();
        throw new SolrException(
            SolrException.ErrorCode.UNKNOWN, "Exception while restoring the backup index", e);
      }
      if (success) {
        core.getDirectoryFactory().doneWithDirectory(indexDir);
        core.getDirectoryFactory().remove(indexDir);
      }

      return true;
    } finally {
      if (restoreIndexDir != null) {
        core.getDirectoryFactory().release(restoreIndexDir);
      }
      if (indexDir != null) {
        core.getDirectoryFactory().release(indexDir);
      }
    }
  }
  Lucene42DocValuesProducer(
      SegmentReadState state,
      String dataCodec,
      String dataExtension,
      String metaCodec,
      String metaExtension)
      throws IOException {
    maxDoc = state.segmentInfo.maxDoc();
    merging = false;
    String metaName =
        IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    // read in the entries from the metadata file.
    ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context);
    boolean success = false;
    ramBytesUsed = new AtomicLong(RamUsageEstimator.shallowSizeOfInstance(getClass()));
    try {
      version = CodecUtil.checkHeader(in, metaCodec, VERSION_START, VERSION_CURRENT);
      numerics = new HashMap<>();
      binaries = new HashMap<>();
      fsts = new HashMap<>();
      numEntries = readFields(in, state.fieldInfos);

      if (version >= VERSION_CHECKSUM) {
        CodecUtil.checkFooter(in);
      } else {
        CodecUtil.checkEOF(in);
      }

      success = true;
    } finally {
      if (success) {
        IOUtils.close(in);
      } else {
        IOUtils.closeWhileHandlingException(in);
      }
    }

    String dataName =
        IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    this.data = state.directory.openInput(dataName, state.context);
    success = false;
    try {
      final int version2 = CodecUtil.checkHeader(data, dataCodec, VERSION_START, VERSION_CURRENT);
      if (version != version2) {
        throw new CorruptIndexException(
            "Format versions mismatch: meta=" + version + ", data=" + version2, data);
      }

      if (version >= VERSION_CHECKSUM) {
        // NOTE: data file is too costly to verify checksum against all the bytes on open,
        // but for now we at least verify proper structure of the checksum footer: which looks
        // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
        // such as file truncation.
        CodecUtil.retrieveChecksum(data);
      }

      success = true;
    } finally {
      if (!success) {
        IOUtils.closeWhileHandlingException(this.data);
      }
    }
  }
  public BlockTermsReader(
      TermsIndexReaderBase indexReader, PostingsReaderBase postingsReader, SegmentReadState state)
      throws IOException {

    this.postingsReader = postingsReader;

    String filename =
        IndexFileNames.segmentFileName(
            state.segmentInfo.name, state.segmentSuffix, BlockTermsWriter.TERMS_EXTENSION);
    in = state.directory.openInput(filename, state.context);

    boolean success = false;
    try {
      CodecUtil.checkIndexHeader(
          in,
          BlockTermsWriter.CODEC_NAME,
          BlockTermsWriter.VERSION_START,
          BlockTermsWriter.VERSION_CURRENT,
          state.segmentInfo.getId(),
          state.segmentSuffix);

      // Have PostingsReader init itself
      postingsReader.init(in, state);

      // NOTE: data file is too costly to verify checksum against all the bytes on open,
      // but for now we at least verify proper structure of the checksum footer: which looks
      // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
      // such as file truncation.
      CodecUtil.retrieveChecksum(in);

      // Read per-field details
      seekDir(in);

      final int numFields = in.readVInt();
      if (numFields < 0) {
        throw new CorruptIndexException("invalid number of fields: " + numFields, in);
      }
      for (int i = 0; i < numFields; i++) {
        final int field = in.readVInt();
        final long numTerms = in.readVLong();
        assert numTerms >= 0;
        final long termsStartPointer = in.readVLong();
        final FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field);
        final long sumTotalTermFreq =
            fieldInfo.getIndexOptions() == IndexOptions.DOCS ? -1 : in.readVLong();
        final long sumDocFreq = in.readVLong();
        final int docCount = in.readVInt();
        final int longsSize = in.readVInt();
        if (docCount < 0
            || docCount > state.segmentInfo.maxDoc()) { // #docs with field must be <= #docs
          throw new CorruptIndexException(
              "invalid docCount: " + docCount + " maxDoc: " + state.segmentInfo.maxDoc(), in);
        }
        if (sumDocFreq < docCount) { // #postings must be >= #docs with field
          throw new CorruptIndexException(
              "invalid sumDocFreq: " + sumDocFreq + " docCount: " + docCount, in);
        }
        if (sumTotalTermFreq != -1
            && sumTotalTermFreq < sumDocFreq) { // #positions must be >= #postings
          throw new CorruptIndexException(
              "invalid sumTotalTermFreq: " + sumTotalTermFreq + " sumDocFreq: " + sumDocFreq, in);
        }
        FieldReader previous =
            fields.put(
                fieldInfo.name,
                new FieldReader(
                    fieldInfo,
                    numTerms,
                    termsStartPointer,
                    sumTotalTermFreq,
                    sumDocFreq,
                    docCount,
                    longsSize));
        if (previous != null) {
          throw new CorruptIndexException("duplicate fields: " + fieldInfo.name, in);
        }
      }
      success = true;
    } finally {
      if (!success) {
        in.close();
      }
    }

    this.indexReader = indexReader;
  }
  Lucene70NormsProducer(
      SegmentReadState state,
      String dataCodec,
      String dataExtension,
      String metaCodec,
      String metaExtension)
      throws IOException {
    maxDoc = state.segmentInfo.maxDoc();
    String metaName =
        IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
    int version = -1;

    // read in the entries from the metadata file.
    try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context)) {
      Throwable priorE = null;
      try {
        version =
            CodecUtil.checkIndexHeader(
                in,
                metaCodec,
                VERSION_START,
                VERSION_CURRENT,
                state.segmentInfo.getId(),
                state.segmentSuffix);
        readFields(in, state.fieldInfos);
      } catch (Throwable exception) {
        priorE = exception;
      } finally {
        CodecUtil.checkFooter(in, priorE);
      }
    }

    String dataName =
        IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
    data = state.directory.openInput(dataName, state.context);
    boolean success = false;
    try {
      final int version2 =
          CodecUtil.checkIndexHeader(
              data,
              dataCodec,
              VERSION_START,
              VERSION_CURRENT,
              state.segmentInfo.getId(),
              state.segmentSuffix);
      if (version != version2) {
        throw new CorruptIndexException(
            "Format versions mismatch: meta=" + version + ",data=" + version2, data);
      }

      // NOTE: data file is too costly to verify checksum against all the bytes on open,
      // but for now we at least verify proper structure of the checksum footer: which looks
      // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
      // such as file truncation.
      CodecUtil.retrieveChecksum(data);

      success = true;
    } finally {
      if (!success) {
        IOUtils.closeWhileHandlingException(this.data);
      }
    }
  }