// Make sure we don't somehow use more than 1 descriptor
  // when reading a CFS with many subs:
  public void testManySubFiles() throws IOException {

    final Directory d = newFSDirectory(_TestUtil.getTempDir("CFSManySubFiles"));
    final int FILE_COUNT = 10000;

    for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++) {
      IndexOutput out = d.createOutput("file." + fileIdx);
      out.writeByte((byte) fileIdx);
      out.close();
    }

    final CompoundFileWriter cfw = new CompoundFileWriter(d, "c.cfs");
    for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++) {
      cfw.addFile("file." + fileIdx);
    }
    cfw.close();

    final IndexInput[] ins = new IndexInput[FILE_COUNT];
    final CompoundFileReader cfr = new CompoundFileReader(d, "c.cfs");
    for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++) {
      ins[fileIdx] = cfr.openInput("file." + fileIdx);
    }

    for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++) {
      assertEquals((byte) fileIdx, ins[fileIdx].readByte());
    }

    for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++) {
      ins[fileIdx].close();
    }
    cfr.close();
    d.close();
  }
  /** This test creates compound file based on two files. */
  public void testTwoFiles() throws IOException {
    createSequenceFile(dir, "d1", (byte) 0, 15);
    createSequenceFile(dir, "d2", (byte) 0, 114);

    CompoundFileWriter csw = new CompoundFileWriter(dir, "d.csf");
    csw.addFile("d1");
    csw.addFile("d2");
    csw.close();

    CompoundFileReader csr = new CompoundFileReader(dir, "d.csf");
    IndexInput expected = dir.openInput("d1");
    IndexInput actual = csr.openInput("d1");
    assertSameStreams("d1", expected, actual);
    assertSameSeekBehavior("d1", expected, actual);
    expected.close();
    actual.close();

    expected = dir.openInput("d2");
    actual = csr.openInput("d2");
    assertSameStreams("d2", expected, actual);
    assertSameSeekBehavior("d2", expected, actual);
    expected.close();
    actual.close();
    csr.close();
  }
 /**
  * Setup a larger compound file with a number of components, each of which is a sequential file
  * (so that we can easily tell that we are reading in the right byte). The methods sets up 20
  * files - f0 to f19, the size of each file is 1000 bytes.
  */
 private void setUp_2() throws IOException {
   CompoundFileWriter cw = new CompoundFileWriter(dir, "f.comp");
   for (int i = 0; i < 20; i++) {
     createSequenceFile(dir, "f" + i, (byte) 0, 2000);
     cw.addFile("f" + i);
   }
   cw.close();
 }
  /**
   * This test creates a compound file based on a large number of files of various length. The file
   * content is generated randomly. The sizes range from 0 to 1Mb. Some of the sizes are selected to
   * test the buffering logic in the file reading code. For this the chunk variable is set to the
   * length of the buffer used internally by the compound file logic.
   */
  public void testRandomFiles() throws IOException {
    // Setup the test segment
    String segment = "test";
    int chunk = 1024; // internal buffer size used by the stream
    createRandomFile(dir, segment + ".zero", 0);
    createRandomFile(dir, segment + ".one", 1);
    createRandomFile(dir, segment + ".ten", 10);
    createRandomFile(dir, segment + ".hundred", 100);
    createRandomFile(dir, segment + ".big1", chunk);
    createRandomFile(dir, segment + ".big2", chunk - 1);
    createRandomFile(dir, segment + ".big3", chunk + 1);
    createRandomFile(dir, segment + ".big4", 3 * chunk);
    createRandomFile(dir, segment + ".big5", 3 * chunk - 1);
    createRandomFile(dir, segment + ".big6", 3 * chunk + 1);
    createRandomFile(dir, segment + ".big7", 1000 * chunk);

    // Setup extraneous files
    createRandomFile(dir, "onetwothree", 100);
    createRandomFile(dir, segment + ".notIn", 50);
    createRandomFile(dir, segment + ".notIn2", 51);

    // Now test
    CompoundFileWriter csw = new CompoundFileWriter(dir, "test.cfs");
    final String data[] =
        new String[] {
          ".zero",
          ".one",
          ".ten",
          ".hundred",
          ".big1",
          ".big2",
          ".big3",
          ".big4",
          ".big5",
          ".big6",
          ".big7"
        };
    for (int i = 0; i < data.length; i++) {
      csw.addFile(segment + data[i]);
    }
    csw.close();

    CompoundFileReader csr = new CompoundFileReader(dir, "test.cfs");
    for (int i = 0; i < data.length; i++) {
      IndexInput check = dir.openInput(segment + data[i]);
      IndexInput test = csr.openInput(segment + data[i]);
      assertSameStreams(data[i], check, test);
      assertSameSeekBehavior(data[i], check, test);
      test.close();
      check.close();
    }
    csr.close();
  }
  /**
   * This test creates compound file based on a single file. Files of different sizes are tested: 0,
   * 1, 10, 100 bytes.
   */
  public void testSingleFile() throws IOException {
    int data[] = new int[] {0, 1, 10, 100};
    for (int i = 0; i < data.length; i++) {
      String name = "t" + data[i];
      createSequenceFile(dir, name, (byte) 0, data[i]);
      CompoundFileWriter csw = new CompoundFileWriter(dir, name + ".cfs");
      csw.addFile(name);
      csw.close();

      CompoundFileReader csr = new CompoundFileReader(dir, name + ".cfs");
      IndexInput expected = dir.openInput(name);
      IndexInput actual = csr.openInput(name);
      assertSameStreams(name, expected, actual);
      assertSameSeekBehavior(name, expected, actual);
      expected.close();
      actual.close();
      csr.close();
    }
  }