示例#1
0
  public void testRun() throws Exception {
    String wsName = "test";
    DefaultCacheManager dcm = new DefaultCacheManager();
    Cache<String, Metadata> metaCache = dcm.getCache("meta");
    Cache<String, byte[]> dataCache = dcm.getCache("data");

    GridFilesystem gfs = new GridFilesystem(dataCache, metaCache);

    BuildContext bcontext =
        DirectoryBuilder.newDirectoryInstance(metaCache, dataCache, metaCache, wsName);
    bcontext.chunkSize(1024 * 1024);
    Directory directory = bcontext.create();
    Central central = CentralConfig.oldFromDir(directory).build();

    central.newIndexer().index(IndexJobs.create("/bleujin", 10));

    central.newSearcher().createRequest("").find().debugPrint();

    OutputStream output = gfs.getOutput("/test.data");
    IOUtil.copyNClose(new StringInputStream("hello bleujin"), output);

    Debug.line(IOUtil.toStringWithClose(gfs.getInput("/test.data")));

    central.newSearcher().createRequest("").find().debugPrint();

    File root = gfs.getFile("/");
    viewFile(root);

    dcm.stop();
  }
 @Override
 Directory createDirectory(Cache cache) {
   return DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEX_NAME)
       .chunkSize(CHUNK_SIZE)
       .overrideSegmentReadLocker(new LocalLockMergingSegmentReadLocker(cache, INDEX_NAME))
       .create();
 }
 public void multipleFlushTest() throws IOException {
   final String filename = "longFile.writtenInMultipleFlushes";
   final int bufferSize = 300;
   Cache cache = cacheManager.getCache();
   cache.clear();
   Directory dir =
       DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME)
           .chunkSize(13)
           .create();
   byte[] manyBytes = fillBytes(bufferSize);
   IndexOutput indexOutput = dir.createOutput(filename);
   for (int i = 0; i < 10; i++) {
     indexOutput.writeBytes(manyBytes, bufferSize);
     indexOutput.flush();
   }
   indexOutput.close();
   IndexInput input = dir.openInput(filename);
   final int finalSize = (10 * bufferSize);
   AssertJUnit.assertEquals(finalSize, input.length());
   final byte[] resultingBuffer = new byte[finalSize];
   input.readBytes(resultingBuffer, 0, finalSize);
   int index = 0;
   for (int i = 0; i < 10; i++) {
     for (int j = 0; j < bufferSize; j++)
       AssertJUnit.assertEquals(resultingBuffer[index++], manyBytes[j]);
   }
 }
 @Test
 public void testChunkBordersOnInfinispan() throws IOException {
   Cache cache = cacheManager.getCache();
   cache.clear();
   Directory dir =
       DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME)
           .chunkSize(13)
           .create();
   testChunkBorders(dir, cache);
   cache.clear();
 }
 @Test(expectedExceptions = IllegalArgumentException.class)
 public void testFailureOfOverrideWriteLocker() throws IOException {
   Directory dir = null;
   try {
     dir =
         DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEX_NAME)
             .chunkSize(BUFFER_SIZE)
             .overrideWriteLocker(null)
             .create();
   } finally {
     if (dir != null) dir.close();
   }
 }
 @Test(
     expectedExceptions = IllegalArgumentException.class,
     expectedExceptionsMessageRegExp = "chunkSize must be a positive integer")
 public void testInitWithInvalidChunkSize() throws IOException {
   Directory dir = null;
   try {
     dir =
         DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEX_NAME)
             .chunkSize(0)
             .create();
   } finally {
     if (dir != null) dir.close();
   }
 }
  public void testWriteChunks() throws Exception {
    final int BUFFER_SIZE = 64;

    Cache cache = cacheManager.getCache();
    Directory dir =
        DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME)
            .chunkSize(BUFFER_SIZE)
            .create();

    IndexOutput io = dir.createOutput("MyNewFile.txt");

    io.writeByte((byte) 66);
    io.writeByte((byte) 69);

    io.flush();
    io.close();

    assert dir.fileExists("MyNewFile.txt");
    assert null != cache.get(new ChunkCacheKey(INDEXNAME, "MyNewFile.txt", 0, BUFFER_SIZE));

    // test contents by reading:
    byte[] buf = new byte[9];
    IndexInput ii = dir.openInput("MyNewFile.txt");
    ii.readBytes(buf, 0, (int) ii.length());
    ii.close();

    assert new String(new byte[] {66, 69}).equals(new String(buf).trim());

    String testText =
        "This is some rubbish again that will span more than one chunk - one hopes.  Who knows, maybe even three or four chunks.";
    io = dir.createOutput("MyNewFile.txt");
    io.seek(0);
    io.writeBytes(testText.getBytes(), 0, testText.length());
    io.close();
    // now compare.
    byte[] chunk1 =
        (byte[]) cache.get(new ChunkCacheKey(INDEXNAME, "MyNewFile.txt", 0, BUFFER_SIZE));
    byte[] chunk2 =
        (byte[]) cache.get(new ChunkCacheKey(INDEXNAME, "MyNewFile.txt", 1, BUFFER_SIZE));
    assert null != chunk1;
    assert null != chunk2;

    assert testText.equals(new String(chunk1) + new String(chunk2).trim());

    dir.close();
    DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME);
  }
 @Test
 public void testRunningOnMultipleCaches() throws IOException {
   assert metadataCache != chunkCache;
   assert chunkCache != lockCache;
   assert lockCache != metadataCache;
   Directory dir =
       DirectoryBuilder.newDirectoryInstance(metadataCache, chunkCache, lockCache, "testingIndex")
           .chunkSize(100)
           .create();
   writeTextToIndex(dir, 0, "hello world");
   assertTextIsFoundInIds(dir, "hello", 0);
   writeTextToIndex(dir, 1, "hello solar system");
   assertTextIsFoundInIds(dir, "hello", 0, 1);
   assertTextIsFoundInIds(dir, "system", 1);
   optimizeIndex(dir);
   assertTextIsFoundInIds(dir, "hello", 0, 1);
   dir.close();
 }
  @Test
  public void testGetIndexNameAndToString() throws IOException {
    Cache cache = cacheManager.getCache();
    Directory dir = null;

    try {

      dir =
          DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEX_NAME)
              .chunkSize(BUFFER_SIZE)
              .create();
      AssertJUnit.assertEquals(INDEX_NAME, ((DirectoryLuceneV4) dir).getIndexName());
      AssertJUnit.assertEquals(
          "InfinispanDirectory{indexName=\'" + INDEX_NAME + "\'}", dir.toString());

    } finally {
      if (dir != null) dir.close();
    }
  }
  @Test
  public void testWriteChunksDefaultChunks() throws Exception {
    Cache cache = cacheManager.getCache();
    Directory dir = DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME).create();

    final String testText = "This is some rubbish";
    final byte[] testTextAsBytes = testText.getBytes();

    IndexOutput io = dir.createOutput("MyNewFile.txt");

    io.writeByte((byte) 1);
    io.writeByte((byte) 2);
    io.writeByte((byte) 3);
    io.writeBytes(testTextAsBytes, testTextAsBytes.length);
    io.close();
    DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME);

    FileCacheKey fileCacheKey = new FileCacheKey(INDEXNAME, "MyNewFile.txt");
    assert null != cache.get(fileCacheKey);
    FileMetadata metadata = (FileMetadata) cache.get(fileCacheKey);
    AssertJUnit.assertEquals(testTextAsBytes.length + 3, metadata.getSize());
    assert null
        != cache.get(
            new ChunkCacheKey(
                INDEXNAME, "MyNewFile.txt", 0, DirectoryBuilderImpl.DEFAULT_BUFFER_SIZE));

    // test contents by reading:
    IndexInput ii = dir.openInput("MyNewFile.txt");
    assert ii.readByte() == 1;
    assert ii.readByte() == 2;
    assert ii.readByte() == 3;
    byte[] buf = new byte[testTextAsBytes.length];

    ii.readBytes(buf, 0, testTextAsBytes.length);
    ii.close();

    assert testText.equals(new String(buf).trim());

    dir.close();
    DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME);
  }
  public void testOverrideWriteLocker() throws IOException {
    Directory dir = null;
    try {
      dir =
          DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEX_NAME)
              .chunkSize(BUFFER_SIZE)
              .overrideWriteLocker(
                  new LockFactory() {
                    @Override
                    public Lock makeLock(String lockName) {
                      return null;
                    }

                    @Override
                    public void clearLock(String lockName) throws IOException {}
                  })
              .create();

      AssertJUnit.assertEquals(0, dir.listAll().length);
    } finally {
      if (dir != null) dir.close();
    }
  }
  @Test
  public void testReadWholeFile() throws IOException {
    final int BUFFER_SIZE = 64;

    Cache cache = cacheManager.getCache();
    Directory dir =
        DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME)
            .chunkSize(BUFFER_SIZE)
            .create();

    verifyOnBuffer("SingleChunk.txt", 61, BUFFER_SIZE, cache, dir, 15);

    final int VERY_BIG_FILE_SIZE = 10000;
    assert BUFFER_SIZE < VERY_BIG_FILE_SIZE;
    verifyOnBuffer("MultipleChunks.txt", VERY_BIG_FILE_SIZE, BUFFER_SIZE, cache, dir, 33);

    final int LAST_CHUNK_COMPLETELY_FILLED_FILE_SIZE = 256;
    assert (LAST_CHUNK_COMPLETELY_FILLED_FILE_SIZE % BUFFER_SIZE) == 0;
    verifyOnBuffer(
        "LastChunkFilled.txt", LAST_CHUNK_COMPLETELY_FILLED_FILE_SIZE, BUFFER_SIZE, cache, dir, 11);
    assertHasNChunks(4, cache, INDEXNAME, "LastChunkFilled.txt.bak", BUFFER_SIZE);
    DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME);

    final int LAST_CHUNK_WITH_LONELY_BYTE_FILE_SIZE = 257;
    assert (LAST_CHUNK_WITH_LONELY_BYTE_FILE_SIZE % BUFFER_SIZE) == 1;
    verifyOnBuffer(
        "LonelyByteInLastChunk.txt",
        LAST_CHUNK_WITH_LONELY_BYTE_FILE_SIZE,
        BUFFER_SIZE,
        cache,
        dir,
        12);
    assertHasNChunks(5, cache, INDEXNAME, "LonelyByteInLastChunk.txt.bak", BUFFER_SIZE);

    dir.close();
    DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME);
  }
  @Test
  public void testReadRandomSampleFile() throws IOException {
    final int BUFFER_SIZE = 64;

    Cache cache = cacheManager.getCache();
    Directory dir =
        DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME)
            .chunkSize(BUFFER_SIZE)
            .create();

    final int FILE_SIZE = 1000;
    assert BUFFER_SIZE < FILE_SIZE;
    createFileWithRepeatableContent(dir, "RandomSampleFile.txt", FILE_SIZE);

    IndexInput indexInput = dir.openInput("RandomSampleFile.txt");
    assert indexInput.length() == FILE_SIZE;
    RepeatableLongByteSequence bytesGenerator = new RepeatableLongByteSequence();

    Random r = new Random();
    long seekPoint = 0;
    // Now it reads some random byte and it compares to the expected byte
    for (int i = 0; i < FILE_SIZE; i++) {
      if (seekPoint == i) {
        byte expectedByte = bytesGenerator.nextByte();
        byte actualByte = indexInput.readByte();
        assert expectedByte == actualByte;
        seekPoint = indexInput.getFilePointer() + r.nextInt(10);
        indexInput.seek(seekPoint);
      } else {
        bytesGenerator.nextByte();
      }
    }
    indexInput.close();
    dir.close();
    DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME);
  }
示例#14
0
  @Override
  public void start() {
    try {

      String name = ctx.getCache().getName();
      EmbeddedCacheManager cacheManager = ctx.getCache().getCacheManager();
      Cache<?, ?> metaCache = cacheManager.getCache(name + "-meta");
      Cache<?, ?> dataCache = cacheManager.getCache(name + "-chunk");

      BuildContext bcontext =
          DirectoryBuilder.newDirectoryInstance(metaCache, dataCache, metaCache, name);
      bcontext.chunkSize(1024 * 1024);
      Directory directory = bcontext.create();
      this.central =
          CentralConfig.oldFromDir(directory)
              .indexConfigBuilder()
              .executorService(new WithinThreadExecutor())
              .build();

      this.configuration.store(this);
    } catch (Exception e) {
      throw new PersistenceException(e);
    }
  }
  @Test
  public void testWriteUsingSeekMethod() throws IOException {
    final int BUFFER_SIZE = 64;

    Cache cache = cacheManager.getCache();
    Directory dir =
        DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME)
            .chunkSize(BUFFER_SIZE)
            .create();

    String fileName = "SomeText.txt";
    IndexOutput io = dir.createOutput(fileName);
    RepeatableLongByteSequence bytesGenerator = new RepeatableLongByteSequence();
    // It writes repeatable text
    final int REPEATABLE_BUFFER_SIZE = 1501;
    for (int i = 0; i < REPEATABLE_BUFFER_SIZE; i++) {
      io.writeByte(bytesGenerator.nextByte());
    }
    io.flush();
    assert io.length() == REPEATABLE_BUFFER_SIZE;

    // Text to write on file with repeatable text
    final String someText = "This is some text";
    final byte[] someTextAsBytes = someText.getBytes();
    // 4 points in random order where writing someText: at begin of file, at end of file, within a
    // single chunk,
    // between 2 chunks
    final int[] pointers = {0, 635, REPEATABLE_BUFFER_SIZE, 135};
    for (int i = 0; i < pointers.length; i++) {
      io.seek(pointers[i]);
      io.writeBytes(someTextAsBytes, someTextAsBytes.length);
    }

    io.close();
    bytesGenerator.reset();
    final long finalSize = REPEATABLE_BUFFER_SIZE + someTextAsBytes.length;
    assert io.length() == finalSize;
    assert io.length()
        == DirectoryIntegrityCheck.deepCountFileSize(new FileCacheKey(INDEXNAME, fileName), cache);

    int indexPointer = 0;
    Arrays.sort(pointers);
    byte[] buffer = null;
    int chunkIndex = -1;
    // now testing the stream is equal to the produced repeatable but including the edits at pointed
    // positions
    for (int i = 0; i < REPEATABLE_BUFFER_SIZE + someTextAsBytes.length; i++) {
      if (i % BUFFER_SIZE == 0) {
        buffer =
            (byte[]) cache.get(new ChunkCacheKey(INDEXNAME, fileName, ++chunkIndex, BUFFER_SIZE));
      }

      byte predictableByte = bytesGenerator.nextByte();
      if (i < pointers[indexPointer]) {
        // Assert predictable text
        AssertJUnit.assertEquals(predictableByte, buffer[i % BUFFER_SIZE]);
      } else if (pointers[indexPointer] <= i
          && i < pointers[indexPointer] + someTextAsBytes.length) {
        // Assert someText
        AssertJUnit.assertEquals(
            someTextAsBytes[i - pointers[indexPointer]], buffer[i % BUFFER_SIZE]);
      }

      if (i == pointers[indexPointer] + someTextAsBytes.length) {
        // Change pointer
        indexPointer++;
      }
    }

    dir.close();
    DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME);
  }
  @Test(enabled = false)
  public void testReadChunks() throws Exception {
    final int BUFFER_SIZE = 64;

    Cache cache = cacheManager.getCache();
    Directory dir =
        DirectoryBuilder.newDirectoryInstance(cache, cache, cache, INDEXNAME)
            .chunkSize(BUFFER_SIZE)
            .create();

    // create file headers
    FileMetadata file1 = new FileMetadata(5);
    FileCacheKey key1 = new FileCacheKey(INDEXNAME, "Hello.txt");
    cache.put(key1, file1);

    FileMetadata file2 = new FileMetadata(5);
    FileCacheKey key2 = new FileCacheKey(INDEXNAME, "World.txt");
    cache.put(key2, file2);

    // byte array for Hello.txt
    String helloText = "Hello world.  This is some text.";
    cache.put(new ChunkCacheKey(INDEXNAME, "Hello.txt", 0, BUFFER_SIZE), helloText.getBytes());

    // byte array for World.txt - should be in at least 2 chunks.
    String worldText =
        "This String should contain more than sixty four characters but less than one hundred and twenty eight.";
    assert worldText.getBytes().length > BUFFER_SIZE;
    assert worldText.getBytes().length < (2 * BUFFER_SIZE);

    byte[] buf = new byte[BUFFER_SIZE];
    System.arraycopy(worldText.getBytes(), 0, buf, 0, BUFFER_SIZE);
    cache.put(new ChunkCacheKey(INDEXNAME, "World.txt", 0, BUFFER_SIZE), buf);

    String part1 = new String(buf);
    buf = new byte[BUFFER_SIZE];
    System.arraycopy(worldText.getBytes(), BUFFER_SIZE, buf, 0, worldText.length() - BUFFER_SIZE);
    cache.put(new ChunkCacheKey(INDEXNAME, "World.txt", 1, BUFFER_SIZE), buf);
    String part2 = new String(buf);

    // make sure the generated bytes do add up!
    AssertJUnit.assertEquals(part1 + part2.trim(), worldText);

    file1.setSize(helloText.length());
    file2.setSize(worldText.length());

    Set<String> s = new HashSet<String>();
    s.add("Hello.txt");
    s.add("World.txt");
    Set other = new HashSet(Arrays.asList(dir.listAll()));

    // ok, file listing works.
    AssertJUnit.assertEquals(s, other);

    IndexInput ii = dir.openInput("Hello.txt");

    assert ii.length() == helloText.length();

    ByteArrayOutputStream baos = new ByteArrayOutputStream();

    for (int i = 0; i < ii.length(); i++) {
      baos.write(ii.readByte());
    }

    assert new String(baos.toByteArray()).equals(helloText);

    ii = dir.openInput("World.txt");

    assert ii.length() == worldText.length();

    baos = new ByteArrayOutputStream();

    for (int i = 0; i < ii.length(); i++) {
      baos.write(ii.readByte());
    }

    assert new String(baos.toByteArray()).equals(worldText);

    // now with buffered reading

    ii = dir.openInput("Hello.txt");

    assert ii.length() == helloText.length();

    baos = new ByteArrayOutputStream();

    long toRead = ii.length();
    while (toRead > 0) {
      buf = new byte[19]; // suitably arbitrary
      int bytesRead = (int) Math.min(toRead, 19);
      ii.readBytes(buf, 0, bytesRead);
      toRead = toRead - bytesRead;
      baos.write(buf, 0, bytesRead);
    }

    assert new String(baos.toByteArray()).equals(helloText);

    ii = dir.openInput("World.txt");

    assert ii.length() == worldText.length();

    baos = new ByteArrayOutputStream();

    toRead = ii.length();
    while (toRead > 0) {
      buf = new byte[19]; // suitably arbitrary
      int bytesRead = (int) Math.min(toRead, 19);
      ii.readBytes(buf, 0, bytesRead);
      toRead = toRead - bytesRead;
      baos.write(buf, 0, bytesRead);
    }

    assert new String(baos.toByteArray()).equals(worldText);

    dir.deleteFile("Hello.txt");
    assert null == cache.get(new FileCacheKey(INDEXNAME, "Hello.txt"));
    assert null == cache.get(new ChunkCacheKey(INDEXNAME, "Hello.txt", 0, BUFFER_SIZE));

    Object ob1 = cache.get(new FileCacheKey(INDEXNAME, "World.txt"));
    Object ob2 = cache.get(new ChunkCacheKey(INDEXNAME, "World.txt", 0, BUFFER_SIZE));
    Object ob3 = cache.get(new ChunkCacheKey(INDEXNAME, "World.txt", 1, BUFFER_SIZE));

    ((DirectoryExtensions) dir).renameFile("World.txt", "HelloWorld.txt");
    assert null == cache.get(new FileCacheKey(INDEXNAME, "Hello.txt"));
    assert null == cache.get(new ChunkCacheKey(INDEXNAME, "Hello.txt", 0, BUFFER_SIZE));
    assert null == cache.get(new ChunkCacheKey(INDEXNAME, "Hello.txt", 1, BUFFER_SIZE));

    assert cache.get(new FileCacheKey(INDEXNAME, "HelloWorld.txt")).equals(ob1);
    assert cache.get(new ChunkCacheKey(INDEXNAME, "HelloWorld.txt", 0, BUFFER_SIZE)).equals(ob2);
    assert cache.get(new ChunkCacheKey(INDEXNAME, "HelloWorld.txt", 1, BUFFER_SIZE)).equals(ob3);

    // test that contents survives a move
    ii = dir.openInput("HelloWorld.txt");

    assert ii.length() == worldText.length();

    baos = new ByteArrayOutputStream();

    toRead = ii.length();
    while (toRead > 0) {
      buf = new byte[19]; // suitably arbitrary
      int bytesRead = (int) Math.min(toRead, 19);
      ii.readBytes(buf, 0, bytesRead);
      toRead = toRead - bytesRead;
      baos.write(buf, 0, bytesRead);
    }

    assert new String(baos.toByteArray()).equals(worldText);

    dir.close();
    DirectoryIntegrityCheck.verifyDirectoryStructure(cache, INDEXNAME);
  }