Exemplo n.º 1
0
  @Test
  /**
   * 1. JaveCV Face Detect image in SequenceFile from S3:// 2. JaveCV Face Detect image in
   * SequenceFile from hdfs:// Before hdfs over HDFS is implemented, do $hadoop fs -cp
   * hdfs://<path>/tmp/lena.png.seq s3://ori-tmp/lena.png.seq Eclipse:
   * -Djava.library.path=/home/heq/hadoop-2.2.0/lib/native
   *
   * @throws Exception
   */
  public void testJavaCVFaceDetectionFromS3HDFS() throws Exception {
    String inputURI =
        "file://" + new File(this.getClass().getResource("/lena.png").getFile()).getAbsolutePath();
    String s3URI = "s3n://ori-tmp/lena.png.seq";
    Util.writeToSequenceFile(inputURI, s3URI, new SnappyCodec());
    byte[] pngbytes = Util.readSequenceFileFromS3(s3URI);
    BufferedImage rawimage = ImageIO.read(new ByteArrayInputStream(pngbytes));
    List<int[]> faces = OpenCV.detectFace(rawimage);
    assertTrue(faces.size() == 1);

    String hdfsURI = hadoopMaster + "/tmp/lena.png.seq";
    Util.writeToSequenceFile(inputURI, hdfsURI, new SnappyCodec());
    pngbytes = Util.readSequenceFileFromHDFS(hdfsURI);
    rawimage = ImageIO.read(new ByteArrayInputStream(pngbytes));
    faces = OpenCV.detectFace(rawimage);
    assertTrue(faces.size() == 1);
  }
Exemplo n.º 2
0
 @Test
 public void testReadWriteFromNativeFSToHDFS() throws Exception {
   Util.writeToSequenceFile("file:///etc/passwd", "file:///tmp/passwd.seq", new DefaultCodec());
   Map<Text, byte[]> passwd = Util.readSequenceFile("file:///tmp/passwd.seq");
   for (Map.Entry<Text, byte[]> entry : passwd.entrySet()) {
     assertEquals(entry.getKey().toString(), "/etc/passwd");
   }
 }
Exemplo n.º 3
0
 @Test
 public void testRecursiveCopyAndEncodingFromS3ToHdfs() throws Exception {
   List<String> ncfiles =
       Util.listFiles(
           "s3://nasanex/MODIS/MOLT/MOD13Q1.005/2013.09.30/MOD13Q1.A2013273.h21v00.005.2013303115726.hdf",
           "hdf");
   for (String uri : ncfiles) {
     String output = new File(uri).getName();
     Util.writeToSequenceFile(uri, hadoopMaster + "/tmp/" + output + ".seq", new DefaultCodec());
   }
 }
Exemplo n.º 4
0
  @Test
  public void testReadnetCDFinSequnceFileFormat() throws Exception {

    String path = this.getClass().getResource("/ncar.nc").getPath();
    Util.writeToSequenceFile("file://" + path, hadoopMaster + "/tmp/ncar.seq", new DefaultCodec());
    Map<Text, byte[]> netcdfsequnce = Util.readSequenceFile(hadoopMaster + "/tmp/ncar.seq");
    for (Map.Entry<Text, byte[]> entry : netcdfsequnce.entrySet()) {
      NetcdfFile ncFile = NetcdfFile.openInMemory(entry.getKey().toString(), entry.getValue());
      assertEquals(ncFile.getDimensions().size(), 5);
    }
  }
Exemplo n.º 5
0
  @Test
  public void testCopyfromS3ViaHttpToHdfs() throws Exception {
    String inputURI =
        "http://nasanex.s3.amazonaws.com/NEX-DCP30/BCSD/rcp26/mon/atmos/pr/r1i1p1/v1.0/CONUS/pr_amon_BCSD_rcp26_r1i1p1_CONUS_HadGEM2-ES_200512-200512.nc";
    Util.writeToSequenceFile(inputURI, hadoopMaster + "/tmp/nasa-nc.seq", new SnappyCodec());

    String existingBucketName = "ori-tmp"; // dir
    String keyName = "passwd"; // file
    inputURI = "s3://" + existingBucketName + ".s3.amazonaws.com/" + keyName;
    Util.writeToSequenceFile(inputURI, "file:///tmp/passwd.seq", new SnappyCodec());
    Util.writeToSequenceFile(inputURI, hadoopMaster + "/tmp/passwd.seq", new SnappyCodec());
  }
Exemplo n.º 6
0
 @Test
 public void testGzipBzip2Lz4SnappyCodecs() throws Exception {
   // should work if all native in enabled by checking $hadoop checknative
   // -a
   String path = this.getClass().getResource("/ncar.nc").getPath();
   Util.writeToSequenceFile("file://" + path, hadoopMaster + "/tmp/ncar.nc.seq", new Lz4Codec());
   Util.writeToSequenceFile("file://" + path, hadoopMaster + "/tmp/ncar.nc.seq", new BZip2Codec());
   Util.writeToSequenceFile("file://" + path, hadoopMaster + "/tmp/ncar.nc.seq", new GzipCodec());
   path = this.getClass().getResource("/TRAXLZU12903D05F94.h5").getPath();
   Util.writeToSequenceFile(
       "file://" + path, hadoopMaster + "/tmp/TRAXLZU12903D05F94.h5.seq", new SnappyCodec());
 }
Exemplo n.º 7
0
 @Test
 /**
  * Test image in compressed PPM format as used in NIST Colorferet database Eclipse:
  * -Djava.library.path=/home/heq/hadoop-2.2.0/lib/native
  *
  * @throws Exception
  */
 public void testFaceDetectionInPPMFromS3() throws Exception {
   String file = "00001_930831_hl_a.ppm";
   String inputURI = "s3n://ori-colorferetsubset/00001/" + file + ".bz2";
   String outputURI = hadoopMaster + "/tmp/" + file + ".seq";
   Util.writeToSequenceFile(inputURI, outputURI, new SnappyCodec());
   byte[] ppmbytes = Util.readSequenceFileFromHDFS(outputURI);
   logger.debug("file size= {}", ppmbytes.length);
   ImageInputStream iis = ImageIO.createImageInputStream(new ByteArrayInputStream(ppmbytes));
   BufferedImage rawimage = PPMImageReader.read(iis);
   List<int[]> faces = OpenCV.detectFace(rawimage);
   assertTrue(faces.size() == 1);
 }
Exemplo n.º 8
0
  @Test
  /**
   * List NASA OpenNex netCDF files under an randomly-selected folder
   *
   * @throws Exception
   */
  public void testCopyFilesRecursivelyFromS3() throws Exception {
    List<String> ncfiles =
        Util.listFiles("s3://nasanex/NEX-DCP30/BCSD/rcp26/mon/atmos/pr/r1i1p1/v1.0/", "nc");
    assertTrue(ncfiles.size() >= 100); // a lot
    for (String url : ncfiles) {
      String file = org.apache.commons.io.FilenameUtils.getBaseName(url);
      Util.writeToSequenceFile(url, hadoopMaster + "/opennex/" + file + ".seq", new SnappyCodec());
    }

    List<String> fileUrls = Util.listFiles("s3://ori-colorferetsubset/00001", "bz2");
    for (String url : fileUrls) {
      logger.debug(url);
      String file = org.apache.commons.io.FilenameUtils.getBaseName(url);
      Util.writeToSequenceFile(url, hadoopMaster + "/tmp/" + file + ".seq", new SnappyCodec());
    }
  }
Exemplo n.º 9
0
 @Test
 public void testTemp() throws Exception {
   Util.packS3FilesToHDFS(
       "s3://nasanex/Landsat/gls/2010/1/2009/", "/output", "tif", new SnappyCodec());
   // Util.listSequenceFileKeys(hadoopMaster + "/output/1.seq");
 }
Exemplo n.º 10
0
 @Test
 public void testListSequenceFileKey() throws Exception {
   Util.writeToSequenceFile("file:///etc/passwd", "file:///tmp/passwd.seq", new DefaultCodec());
   Util.listSequenceFileKeys(hadoopMaster + "/tmp/passwd.seq");
 }