@Test(timeout = 3600000)
  public void testOpenCreate() throws IOException {
    try {
      createAndReadFileTest(1024);
    } catch (IOException e) {
      fail(e.getMessage());
    }

    try {
      createAndReadFileTest(5 * 1024 * 1024);
    } catch (IOException e) {
      fail(e.getMessage());
    }

    try {
      createAndReadFileTest(20 * 1024 * 1024);
    } catch (IOException e) {
      fail(e.getMessage());
    }

    /*
    Enable to test the multipart upload
    try {
      createAndReadFileTest((long)6 * 1024 * 1024 * 1024);
    } catch (IOException e) {
      fail(e.getMessage());
    }
    */
  }
Exemple #2
0
    @Override
    public Boolean call(JobContext jc) throws Exception {
      try {
        jc.streamingctx();
        fail("Access before creation: Should throw IllegalStateException");
      } catch (IllegalStateException ex) {
      }
      try {
        jc.stopStreamingCtx();
        fail("Stop before creation: Should throw IllegalStateException");
      } catch (IllegalStateException ex) {
      }
      try {
        jc.createStreamingContext(1000L);
        JavaStreamingContext streamingContext = jc.streamingctx();
        jc.stopStreamingCtx();
        jc.streamingctx();
        fail();
      } catch (IllegalStateException ex) {
      }

      jc.createStreamingContext(1000L);
      JavaStreamingContext streamingContext = jc.streamingctx();
      jc.stopStreamingCtx();
      return streamingContext != null;
    }
 @Test
 public void testCompressorDecompressorWithExeedBufferLimit() {
   int BYTE_SIZE = 100 * 1024;
   byte[] rawData = generate(BYTE_SIZE);
   try {
     CompressDecompressTester.of(rawData)
         .withCompressDecompressPair(
             new ZlibCompressor(
                 org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel
                     .BEST_COMPRESSION,
                 CompressionStrategy.DEFAULT_STRATEGY,
                 org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionHeader
                     .DEFAULT_HEADER,
                 BYTE_SIZE),
             new ZlibDecompressor(
                 org.apache.hadoop.io.compress.zlib.ZlibDecompressor.CompressionHeader
                     .DEFAULT_HEADER,
                 BYTE_SIZE))
         .withTestCases(
             ImmutableSet.of(
                 CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                 CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                 CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                 CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
         .test();
   } catch (Exception ex) {
     fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
   }
 }
  @Test
  public void testZlibCompressDecompress() {
    byte[] rawData = null;
    int rawDataSize = 0;
    rawDataSize = 1024 * 64;
    rawData = generate(rawDataSize);
    try {
      ZlibCompressor compressor = new ZlibCompressor();
      ZlibDecompressor decompressor = new ZlibDecompressor();
      assertFalse("testZlibCompressDecompress finished error", compressor.finished());
      compressor.setInput(rawData, 0, rawData.length);
      assertTrue(
          "testZlibCompressDecompress getBytesRead before error", compressor.getBytesRead() == 0);
      compressor.finish();

      byte[] compressedResult = new byte[rawDataSize];
      int cSize = compressor.compress(compressedResult, 0, rawDataSize);
      assertTrue(
          "testZlibCompressDecompress getBytesRead ather error",
          compressor.getBytesRead() == rawDataSize);
      assertTrue(
          "testZlibCompressDecompress compressed size no less then original size",
          cSize < rawDataSize);
      decompressor.setInput(compressedResult, 0, cSize);
      byte[] decompressedBytes = new byte[rawDataSize];
      decompressor.decompress(decompressedBytes, 0, decompressedBytes.length);
      assertArrayEquals(
          "testZlibCompressDecompress arrays not equals ", rawData, decompressedBytes);
      compressor.reset();
      decompressor.reset();
    } catch (IOException ex) {
      fail("testZlibCompressDecompress ex !!!" + ex);
    }
  }
  @Test(timeout = 20000)
  public void testDelete() throws IOException {
    // Test deleting an empty directory
    assertTrue(fs.mkdirs(path("/tests3a/d")));
    assertTrue(fs.delete(path("/tests3a/d"), true));
    assertFalse(fs.exists(path("/tests3a/d")));

    // Test deleting a deep empty directory
    assertTrue(fs.mkdirs(path("/tests3a/e/f/g/h")));
    assertTrue(fs.delete(path("/tests3a/e/f/g"), true));
    assertFalse(fs.exists(path("/tests3a/e/f/g/h")));
    assertFalse(fs.exists(path("/tests3a/e/f/g")));
    assertTrue(fs.exists(path("/tests3a/e/f")));

    // Test delete of just a file
    writeFile(path("/tests3a/f/f/file"), 1000);
    assertTrue(fs.exists(path("/tests3a/f/f/file")));
    assertTrue(fs.delete(path("/tests3a/f/f/file"), false));
    assertFalse(fs.exists(path("/tests3a/f/f/file")));

    // Test delete of a path with files in various directories
    writeFile(path("/tests3a/g/h/i/file"), 1000);
    assertTrue(fs.exists(path("/tests3a/g/h/i/file")));
    writeFile(path("/tests3a/g/h/j/file"), 1000);
    assertTrue(fs.exists(path("/tests3a/g/h/j/file")));
    try {
      assertFalse(fs.delete(path("/tests3a/g/h"), false));
      fail("Expected delete to fail with recursion turned off");
    } catch (IOException e) {
    }
    assertTrue(fs.exists(path("/tests3a/g/h/j/file")));
    assertTrue(fs.delete(path("/tests3a/g/h"), true));
    assertFalse(fs.exists(path("/tests3a/g/h/j")));
  }
 private static void assertCanCreateWritableFile(File destination) throws IOException {
   boolean createdFreshFile = destination.createNewFile();
   if (createdFreshFile) {
     try {
       if (destination.canWrite()) {
         try (FileWriter writer = new FileWriter(destination)) {
           writer.append("dummy test output");
         }
       } else {
         fail(String.format("Cannot write to %s", destination));
       }
     } finally {
       destination.delete();
     }
   } else {
     fail(String.format("Cannot create file: %s", destination));
   }
 }
 @Test
 public void testZlibDirectCompressDecompress() {
   int[] size = {1, 4, 16, 4 * 1024, 64 * 1024, 128 * 1024, 1024 * 1024};
   assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
   try {
     for (int i = 0; i < size.length; i++) {
       compressDecompressLoop(size[i]);
     }
   } catch (IOException ex) {
     fail("testZlibDirectCompressDecompress ex !!!" + ex);
   }
 }
Exemple #8
0
  @Test
  public void whenChannelOverflowsThrowException() throws Exception {
    assumeThat(policy, is(OverflowPolicy.THROW));
    assumeThat(mailboxSize, greaterThan(0));

    final Channel<Integer> ch = newChannel();

    int i = 0;
    try {
      for (i = 0; i < 10; i++) ch.send(i);
      fail();
    } catch (QueueCapacityExceededException e) {
      System.out.println("i = " + i);
    }
  }
  @Test
  public void testZlibCompressorDecompressor() {
    try {
      int SIZE = 44 * 1024;
      byte[] rawData = generate(SIZE);

      CompressDecompressTester.of(rawData)
          .withCompressDecompressPair(new ZlibCompressor(), new ZlibDecompressor())
          .withTestCases(
              ImmutableSet.of(
                  CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                  CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                  CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                  CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
          .test();
    } catch (Exception ex) {
      fail("testCompressorDecompressor error !!!" + ex);
    }
  }
  public void testConnectCancellation(Bootstrap cb) throws Throwable {
    cb.handler(new TestHandler()).option(ChannelOption.CONNECT_TIMEOUT_MILLIS, 4000);
    ChannelFuture future = cb.connect(BAD_HOST, BAD_PORT);
    try {
      if (future.await(1000)) {
        if (future.isSuccess()) {
          fail("A connection attempt to " + BAD_HOST + " must not succeed.");
        } else {
          throw future.cause();
        }
      }

      if (future.cancel(true)) {
        assertThat(future.channel().closeFuture().await(500), is(true));
        assertThat(future.isCancelled(), is(true));
      } else {
        // Cancellation not supported by the transport.
      }
    } finally {
      future.channel().close();
    }
  }
 @Test
 public void testZlibCompressorDecompressorWithConfiguration() {
   Configuration conf = new Configuration();
   conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
   if (ZlibFactory.isNativeZlibLoaded(conf)) {
     byte[] rawData;
     int tryNumber = 5;
     int BYTE_SIZE = 10 * 1024;
     Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
     Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
     rawData = generate(BYTE_SIZE);
     try {
       for (int i = 0; i < tryNumber; i++)
         compressDecompressZlib(
             rawData, (ZlibCompressor) zlibCompressor, (ZlibDecompressor) zlibDecompressor);
       zlibCompressor.reinit(conf);
     } catch (Exception ex) {
       fail("testZlibCompressorDecompressorWithConfiguration ex error " + ex);
     }
   } else {
     assertTrue(
         "ZlibFactory is using native libs against request", ZlibFactory.isNativeZlibLoaded(conf));
   }
 }
  @Test
  public void testBuiltInGzipDecompressorExceptions() {
    BuiltInGzipDecompressor decompresser = new BuiltInGzipDecompressor();
    try {
      decompresser.setInput(null, 0, 1);
    } catch (NullPointerException ex) {
      // expected
    } catch (Exception ex) {
      fail("testBuiltInGzipDecompressorExceptions npe error " + ex);
    }

    try {
      decompresser.setInput(new byte[] {0}, 0, -1);
    } catch (ArrayIndexOutOfBoundsException ex) {
      // expected
    } catch (Exception ex) {
      fail("testBuiltInGzipDecompressorExceptions aioob error" + ex);
    }

    assertTrue("decompresser.getBytesRead error", decompresser.getBytesRead() == 0);
    assertTrue("decompresser.getRemaining error", decompresser.getRemaining() == 0);
    decompresser.reset();
    decompresser.end();

    InputStream decompStream = null;
    try {
      // invalid 0 and 1 bytes , must be 31, -117
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] {0, 0, 1, 1, 1, 1, 11, 1, 1, 1, 1}, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
      fail("invalid 0 and 1 byte in gzip stream" + ex);
    }

    // invalid 2 byte, must be 8
    try {
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] {31, -117, 7, 1, 1, 1, 1, 11, 1, 1, 1, 1}, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
      fail("invalid 2 byte in gzip stream" + ex);
    }

    try {
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] {31, -117, 8, -32, 1, 1, 1, 11, 1, 1, 1, 1}, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
      fail("invalid 3 byte in gzip stream" + ex);
    }
    try {
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] {31, -117, 8, 4, 1, 1, 1, 11, 1, 1, 1, 1}, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
      fail("invalid 3 byte make hasExtraField" + ex);
    }
  }