/** Test attributes map */
  public void testAttributes() throws Exception {
    Directory dir = newDirectory();
    Codec codec = getCodec();
    byte id[] = StringHelper.randomId();
    Map<String, String> attributes = new HashMap<>();
    attributes.put("key1", "value1");
    attributes.put("key2", "value2");
    SegmentInfo info =
        new SegmentInfo(
            dir,
            getVersions()[0],
            "_123",
            1,
            false,
            codec,
            Collections.emptyMap(),
            id,
            attributes,
            null);
    info.setFiles(Collections.<String>emptySet());
    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
    SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
    assertEquals(attributes, info2.getAttributes());

    // attributes map should be immutable
    expectThrows(
        UnsupportedOperationException.class,
        () -> {
          info2.getAttributes().put("bogus", "bogus");
        });

    dir.close();
  }
  /**
   * Sets some otherwise hard-to-test properties: random segment names, ID values, document count,
   * etc and round-trips
   */
  public void testRandom() throws Exception {
    Codec codec = getCodec();
    Version[] versions = getVersions();
    for (int i = 0; i < 10; i++) {
      Directory dir = newDirectory();
      Version version = versions[random().nextInt(versions.length)];
      String name =
          "_" + Integer.toString(random().nextInt(Integer.MAX_VALUE), Character.MAX_RADIX);
      int docCount = TestUtil.nextInt(random(), 1, IndexWriter.MAX_DOCS);
      boolean isCompoundFile = random().nextBoolean();
      Set<String> files = new HashSet<>();
      int numFiles = random().nextInt(10);
      for (int j = 0; j < numFiles; j++) {
        String file = IndexFileNames.segmentFileName(name, "", Integer.toString(j));
        files.add(file);
        dir.createOutput(file, IOContext.DEFAULT).close();
      }
      Map<String, String> diagnostics = new HashMap<>();
      int numDiags = random().nextInt(10);
      for (int j = 0; j < numDiags; j++) {
        diagnostics.put(
            TestUtil.randomUnicodeString(random()), TestUtil.randomUnicodeString(random()));
      }
      byte id[] = new byte[StringHelper.ID_LENGTH];
      random().nextBytes(id);

      Map<String, String> attributes = new HashMap<>();
      int numAttributes = random().nextInt(10);
      for (int j = 0; j < numAttributes; j++) {
        attributes.put(
            TestUtil.randomUnicodeString(random()), TestUtil.randomUnicodeString(random()));
      }

      SegmentInfo info =
          new SegmentInfo(
              dir,
              version,
              name,
              docCount,
              isCompoundFile,
              codec,
              diagnostics,
              id,
              attributes,
              null);
      info.setFiles(files);
      codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
      SegmentInfo info2 = codec.segmentInfoFormat().read(dir, name, id, IOContext.DEFAULT);
      assertEquals(info, info2);

      dir.close();
    }
  }
  /**
   * Test segment infos read that hits exception on close make sure we get our exception back, no
   * file handle leaks, etc.
   */
  public void testExceptionOnCloseInput() throws Exception {
    Failure fail =
        new Failure() {
          @Override
          public void eval(MockDirectoryWrapper dir) throws IOException {
            for (StackTraceElement e : Thread.currentThread().getStackTrace()) {
              if (doFail && "close".equals(e.getMethodName())) {
                throw new FakeIOException();
              }
            }
          }
        };

    MockDirectoryWrapper dir = newMockDirectory();
    dir.failOn(fail);
    Codec codec = getCodec();
    byte id[] = StringHelper.randomId();
    SegmentInfo info =
        new SegmentInfo(
            dir,
            getVersions()[0],
            "_123",
            1,
            false,
            codec,
            Collections.<String, String>emptyMap(),
            id,
            new HashMap<>(),
            null);
    info.setFiles(Collections.<String>emptySet());
    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);

    fail.setDoFail();
    expectThrows(
        FakeIOException.class,
        () -> {
          codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
        });
    fail.clearDoFail();

    dir.close();
  }
 /** Test files map */
 public void testFiles() throws Exception {
   Directory dir = newDirectory();
   Codec codec = getCodec();
   byte id[] = StringHelper.randomId();
   SegmentInfo info =
       new SegmentInfo(
           dir,
           getVersions()[0],
           "_123",
           1,
           false,
           codec,
           Collections.<String, String>emptyMap(),
           id,
           new HashMap<>(),
           null);
   info.setFiles(Collections.<String>emptySet());
   codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
   SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
   assertEquals(info.files(), info2.files());
   dir.close();
 }
  /** Test sort */
  public void testSort() throws IOException {
    assumeTrue("test requires a codec that can read/write index sort", supportsIndexSort());

    final int iters = atLeast(5);
    for (int i = 0; i < iters; ++i) {
      Sort sort;
      if (i == 0) {
        sort = null;
      } else {
        final int numSortFields = TestUtil.nextInt(random(), 1, 3);
        SortField[] sortFields = new SortField[numSortFields];
        for (int j = 0; j < numSortFields; ++j) {
          sortFields[j] = randomIndexSortField();
        }
        sort = new Sort(sortFields);
      }

      Directory dir = newDirectory();
      Codec codec = getCodec();
      byte id[] = StringHelper.randomId();
      SegmentInfo info =
          new SegmentInfo(
              dir,
              getVersions()[0],
              "_123",
              1,
              false,
              codec,
              Collections.<String, String>emptyMap(),
              id,
              new HashMap<>(),
              sort);
      info.setFiles(Collections.<String>emptySet());
      codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
      SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
      assertEquals(sort, info2.getIndexSort());
      dir.close();
    }
  }
  /** Tests SI writer adds itself to files... */
  public void testAddsSelfToFiles() throws Exception {
    Directory dir = newDirectory();
    Codec codec = getCodec();
    byte id[] = StringHelper.randomId();
    SegmentInfo info =
        new SegmentInfo(
            dir,
            getVersions()[0],
            "_123",
            1,
            false,
            codec,
            Collections.<String, String>emptyMap(),
            id,
            new HashMap<>(),
            null);
    Set<String> originalFiles = Collections.singleton("_123.a");
    info.setFiles(originalFiles);
    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);

    Set<String> modifiedFiles = info.files();
    assertTrue(modifiedFiles.containsAll(originalFiles));
    assertTrue(
        "did you forget to add yourself to files()", modifiedFiles.size() > originalFiles.size());

    SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
    assertEquals(info.files(), info2.files());

    // files set should be immutable
    expectThrows(
        UnsupportedOperationException.class,
        () -> {
          info2.files().add("bogus");
        });

    dir.close();
  }
  /**
   * Read a particular segmentFileName. Note that this may throw an IOException if a commit is in
   * process.
   *
   * @param directory -- directory containing the segments file
   * @param segmentFileName -- segment file to load
   * @throws CorruptIndexException if the index is corrupt
   * @throws IOException if there is a low-level IO error
   */
  public static final SegmentInfos readCommit(Directory directory, String segmentFileName)
      throws IOException {

    long generation = generationFromSegmentsFileName(segmentFileName);
    try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READ)) {
      // NOTE: as long as we want to throw indexformattooold (vs corruptindexexception), we need
      // to read the magic ourselves.
      int magic = input.readInt();
      if (magic != CodecUtil.CODEC_MAGIC) {
        throw new IndexFormatTooOldException(
            input, magic, CodecUtil.CODEC_MAGIC, CodecUtil.CODEC_MAGIC);
      }
      // 4.0+
      int format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_40, VERSION_CURRENT);
      // 5.0+
      byte id[] = null;
      if (format >= VERSION_50) {
        id = new byte[StringHelper.ID_LENGTH];
        input.readBytes(id, 0, id.length);
        CodecUtil.checkIndexHeaderSuffix(input, Long.toString(generation, Character.MAX_RADIX));
      }

      SegmentInfos infos = new SegmentInfos();
      infos.id = id;
      infos.generation = generation;
      infos.lastGeneration = generation;
      if (format >= VERSION_53) {
        // TODO: in the future (7.0?  sigh) we can use this to throw IndexFormatTooOldException ...
        // or just rely on the
        // minSegmentLuceneVersion check instead:
        infos.luceneVersion =
            Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
      } else {
        // else compute the min version down below in the for loop
      }

      infos.version = input.readLong();
      infos.counter = input.readInt();
      int numSegments = input.readInt();
      if (numSegments < 0) {
        throw new CorruptIndexException("invalid segment count: " + numSegments, input);
      }

      if (format >= VERSION_53) {
        if (numSegments > 0) {
          infos.minSegmentLuceneVersion =
              Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
          if (infos.minSegmentLuceneVersion.onOrAfter(Version.LUCENE_4_0_0_ALPHA) == false) {
            throw new IndexFormatTooOldException(
                input,
                "this index contains a too-old segment (version: "
                    + infos.minSegmentLuceneVersion
                    + ")");
          }
        } else {
          // else leave as null: no segments
        }
      } else {
        // else we recompute it below as we visit segments; it can't be used for throwing
        // IndexFormatTooOldExc, but consumers of
        // SegmentInfos can maybe still use it for other reasons
      }

      long totalDocs = 0;
      for (int seg = 0; seg < numSegments; seg++) {
        String segName = input.readString();
        final byte segmentID[];
        if (format >= VERSION_50) {
          byte hasID = input.readByte();
          if (hasID == 1) {
            segmentID = new byte[StringHelper.ID_LENGTH];
            input.readBytes(segmentID, 0, segmentID.length);
          } else if (hasID == 0) {
            segmentID = null; // 4.x segment, doesn't have an ID
          } else {
            throw new CorruptIndexException("invalid hasID byte, got: " + hasID, input);
          }
        } else {
          segmentID = null;
        }
        Codec codec = readCodec(input, format < VERSION_53);
        SegmentInfo info =
            codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ);
        info.setCodec(codec);
        totalDocs += info.maxDoc();
        long delGen = input.readLong();
        int delCount = input.readInt();
        if (delCount < 0 || delCount > info.maxDoc()) {
          throw new CorruptIndexException(
              "invalid deletion count: " + delCount + " vs maxDoc=" + info.maxDoc(), input);
        }
        long fieldInfosGen = -1;
        if (format >= VERSION_46) {
          fieldInfosGen = input.readLong();
        }
        long dvGen = -1;
        if (format >= VERSION_49) {
          dvGen = input.readLong();
        } else {
          dvGen = fieldInfosGen;
        }
        SegmentCommitInfo siPerCommit =
            new SegmentCommitInfo(info, delCount, delGen, fieldInfosGen, dvGen);
        if (format >= VERSION_46) {
          if (format < VERSION_49) {
            // Recorded per-generation files, which were buggy (see
            // LUCENE-5636). We need to read and keep them so we continue to
            // reference those files. Unfortunately it means that the files will
            // be referenced even if the fields are updated again, until the
            // segment is merged.
            final int numGensUpdatesFiles = input.readInt();
            final Map<Long, Set<String>> genUpdatesFiles;
            if (numGensUpdatesFiles == 0) {
              genUpdatesFiles = Collections.emptyMap();
            } else {
              genUpdatesFiles = new HashMap<>(numGensUpdatesFiles);
              for (int i = 0; i < numGensUpdatesFiles; i++) {
                genUpdatesFiles.put(input.readLong(), input.readStringSet());
              }
            }
            siPerCommit.setGenUpdatesFiles(genUpdatesFiles);
          } else {
            if (format >= VERSION_51) {
              siPerCommit.setFieldInfosFiles(input.readSetOfStrings());
            } else {
              siPerCommit.setFieldInfosFiles(Collections.unmodifiableSet(input.readStringSet()));
            }
            final Map<Integer, Set<String>> dvUpdateFiles;
            final int numDVFields = input.readInt();
            if (numDVFields == 0) {
              dvUpdateFiles = Collections.emptyMap();
            } else {
              Map<Integer, Set<String>> map = new HashMap<>(numDVFields);
              for (int i = 0; i < numDVFields; i++) {
                if (format >= VERSION_51) {
                  map.put(input.readInt(), input.readSetOfStrings());
                } else {
                  map.put(input.readInt(), Collections.unmodifiableSet(input.readStringSet()));
                }
              }
              dvUpdateFiles = Collections.unmodifiableMap(map);
            }
            siPerCommit.setDocValuesUpdatesFiles(dvUpdateFiles);
          }
        }
        infos.add(siPerCommit);

        Version segmentVersion = info.getVersion();
        if (format < VERSION_53) {
          if (infos.minSegmentLuceneVersion == null
              || segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) {
            infos.minSegmentLuceneVersion = segmentVersion;
          }
        } else if (segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) {
          throw new CorruptIndexException(
              "segments file recorded minSegmentLuceneVersion="
                  + infos.minSegmentLuceneVersion
                  + " but segment="
                  + info
                  + " has older version="
                  + segmentVersion,
              input);
        }
      }

      if (format >= VERSION_51) {
        infos.userData = input.readMapOfStrings();
      } else {
        infos.userData = Collections.unmodifiableMap(input.readStringStringMap());
      }

      if (format >= VERSION_48) {
        CodecUtil.checkFooter(input);
      } else {
        final long checksumNow = input.getChecksum();
        final long checksumThen = input.readLong();
        if (checksumNow != checksumThen) {
          throw new CorruptIndexException(
              "checksum failed (hardware problem?) : expected="
                  + Long.toHexString(checksumThen)
                  + " actual="
                  + Long.toHexString(checksumNow),
              input);
        }
        CodecUtil.checkEOF(input);
      }

      // LUCENE-6299: check we are in bounds
      if (totalDocs > IndexWriter.getActualMaxDocs()) {
        throw new CorruptIndexException(
            "Too many documents: an index cannot exceed "
                + IndexWriter.getActualMaxDocs()
                + " but readers have total maxDoc="
                + totalDocs,
            input);
      }

      return infos;
    }
  }