/** * Open a ucar.nc2.ft.PointFeatureCollection, write out in CF point format. * * @param fileIn open through TypedDatasetFactory.open(FeatureType.POINT, ..) * @param fileOut write to this netcdf-3 file * @param inMemory if true, read file into memory for efficiency * @return true on success * @throws IOException on read/write error */ public static boolean rewritePointFeatureDataset(String fileIn, String fileOut, boolean inMemory) throws IOException { System.out.println( "Rewrite2 .nc files from " + fileIn + " to " + fileOut + " inMemory= " + inMemory); long start = System.currentTimeMillis(); // do it in memory for speed NetcdfFile ncfile = inMemory ? NetcdfFile.openInMemory(fileIn) : NetcdfFile.open(fileIn); NetcdfDataset ncd = new NetcdfDataset(ncfile); Formatter errlog = new Formatter(); FeatureDataset fd = FeatureDatasetFactoryManager.wrap(FeatureType.ANY_POINT, ncd, null, errlog); if (fd == null) return false; if (fd instanceof FeatureDatasetPoint) { writePointFeatureCollection((FeatureDatasetPoint) fd, fileOut); fd.close(); long took = System.currentTimeMillis() - start; System.out.println(" that took " + (took - start) + " msecs"); return true; } return false; }
private NetcdfDataset getBufrMessageAsDataset(Message m) throws IOException { byte[] mbytes = scan.getMessageBytes(m); NetcdfFile ncfile = null; try { ncfile = NetcdfFile.openInMemory("test", mbytes, "ucar.nc2.iosp.bufr.BufrIosp"); } catch (Exception e) { throw new IOException(e); } return new NetcdfDataset(ncfile); }
@Test public void testReadnetCDFinSequnceFileFormat() throws Exception { String path = this.getClass().getResource("/ncar.nc").getPath(); Util.writeToSequenceFile("file://" + path, hadoopMaster + "/tmp/ncar.seq", new DefaultCodec()); Map<Text, byte[]> netcdfsequnce = Util.readSequenceFile(hadoopMaster + "/tmp/ncar.seq"); for (Map.Entry<Text, byte[]> entry : netcdfsequnce.entrySet()) { NetcdfFile ncFile = NetcdfFile.openInMemory(entry.getKey().toString(), entry.getValue()); assertEquals(ncFile.getDimensions().size(), 5); } }
@Test /** * TODO: python API: * http://stackoverflow.com/questions/16654251/can-h5py-load-a-file-from-a-byte-array-in-memory * * @throws Exception */ public void testNASAModisHDFAccess() throws Exception { File file = new File( this.getClass() .getResource("/MYD13Q1.A2014121.h23v04.005.2014138045119.hdf") .getPath()); byte[] netcdfinbyte = FileUtils.readFileToByteArray(file); NetcdfFile netCDFfile = NetcdfFile.openInMemory("inmemory.hdf", netcdfinbyte); // TODO Processing hdf files }
@Test /** * TODO: python API: * http://stackoverflow.com/questions/16654251/can-h5py-load-a-file-from-a-byte-array-in-memory * * @throws Exception */ public void testNetCDFInterfaceToACcessH5() throws Exception { H5File h5 = hdf5_getters.hdf5_open_readonly( this.getClass().getResource("/TRAXLZU12903D05F94.h5").getPath()); double h5_temp = hdf5_getters.get_tempo(h5); File file = new File(this.getClass().getResource("/TRAXLZU12903D05F94.h5").getPath()); byte[] netcdfinbyte = FileUtils.readFileToByteArray(file); NetcdfFile netCDFfile = NetcdfFile.openInMemory("inmemory.h5", netcdfinbyte); Variable var = (Variable) netCDFfile.findVariable("/analysis/songs.tempo"); Array content = var.read(); // 1D array double netcdf_tempo = content.getDouble(0); // 1 column only assertEquals(h5_temp, netcdf_tempo, 0.001); }
/** * Open a ucar.nc2.dt.PointObsDataset, write out in CF point format. * * @param fileIn open through TypedDatasetFactory.open(FeatureType.POINT, ..) * @param fileOut write to this netcdf-3 file * @param inMemory if true, read file into memory for efficiency * @return true on success * @throws IOException on read/write error */ public static boolean rewritePointObsDataset(String fileIn, String fileOut, boolean inMemory) throws IOException { System.out.println( "Rewrite2 .nc files from " + fileIn + " to " + fileOut + " inMemory= " + inMemory); long start = System.currentTimeMillis(); // do it in memory for speed NetcdfFile ncfile = inMemory ? NetcdfFile.openInMemory(fileIn) : NetcdfFile.open(fileIn); NetcdfDataset ncd = new NetcdfDataset(ncfile); StringBuilder errlog = new StringBuilder(); PointObsDataset pobsDataset = (PointObsDataset) TypedDatasetFactory.open(FeatureType.POINT, ncd, null, errlog); if (pobsDataset == null) return false; writePointObsDataset(pobsDataset, fileOut); pobsDataset.close(); long took = System.currentTimeMillis() - start; System.out.println(" that took " + (took - start) + " msecs"); return true; }
@Test /** * Find min/max/average precipitation for a randomly-positioned but fixed-size region from a nc * file output: filename,origin,size key: value:min, max, average * * @throws Exception */ public void testProcessingNASANexDataInNetCDF() throws Exception { final int SIZE = 100; File file = new File(this.getClass().getResource("/ncar.nc").getPath()); byte[] netcdfinbyte = FileUtils.readFileToByteArray(file); // use any dummy filename for file in memory NetcdfFile netCDFfile = NetcdfFile.openInMemory("inmemory.nc", netcdfinbyte); Variable time = netCDFfile.findVariable("time"); ArrayDouble.D1 days = (ArrayDouble.D1) time.read(); Variable lat = netCDFfile.findVariable("lat"); if (lat == null) { logger.error("Cannot find Variable latitude(lat)"); return; } ArrayFloat.D1 absolutelat = (ArrayFloat.D1) lat.read(); Variable lon = netCDFfile.findVariable("lon"); if (lon == null) { logger.error("Cannot find Variable longitude(lon)"); return; } ArrayFloat.D1 absolutelon = (ArrayFloat.D1) lon.read(); Variable pres = netCDFfile.findVariable("pr"); if (pres == null) { logger.error("Cannot find Variable precipitation(pr)"); return; } Random rand = new Random(); int orig_lat = rand.nextInt((int) lat.getSize()); orig_lat = Math.min(orig_lat, (int) (lat.getSize() - SIZE)); int orig_lon = rand.nextInt((int) lon.getSize()); orig_lon = Math.min(orig_lon, (int) (lon.getSize() - SIZE)); int[] origin = new int[] {0, orig_lat, orig_lon}; int[] size = new int[] {1, SIZE, SIZE}; ArrayFloat.D3 data3D = (ArrayFloat.D3) pres.read(origin, size); double max = Double.NEGATIVE_INFINITY; double min = Double.POSITIVE_INFINITY; double sum = 0; for (int j = 0; j < SIZE; j++) { for (int k = 0; k < SIZE; k++) { double current = data3D.get(0, j, k); max = (current > max ? current : max); min = (current < min ? current : min); sum += current; } } logger.info( days + "," + absolutelat.get(orig_lat) + "," + absolutelon.get(orig_lon) + "," + SIZE + ":" + min + "," + max + "," + sum / (SIZE * SIZE)); }