private void makeMultidimInner( NetcdfDataset ds, TableConfig parentTable, TableConfig childTable) { Dimension parentDim = ds.findDimension(parentTable.dimName); Dimension childDim = ds.findDimension(childTable.innerName); // divide up the variables between the parent and the child List<String> obsVars; List<Variable> vars = ds.getVariables(); List<String> parentVars = new ArrayList<>(vars.size()); obsVars = new ArrayList<>(vars.size()); for (Variable orgV : vars) { if (orgV instanceof Structure) continue; Dimension dim0 = orgV.getDimension(0); if ((dim0 != null) && dim0.equals(parentDim)) { if ((orgV.getRank() == 1) || ((orgV.getRank() == 2) && orgV.getDataType() == DataType.CHAR)) { parentVars.add(orgV.getShortName()); } else { Dimension dim1 = orgV.getDimension(1); if ((dim1 != null) && dim1.equals(childDim)) obsVars.add(orgV.getShortName()); } } } parentTable.vars = parentVars; childTable.vars = obsVars; }
private boolean hasAxisType(NetcdfDataset ds, AxisType a) { List<Variable> varList = ds.getVariables(); for (Variable v : varList) { String axisType = ds.findAttValueIgnoreCase(v, "CoordinateAxisType", null); if ((axisType != null) && axisType.equals(a.toString())) return true; } return false; }
public void setDataset(NetcdfDataset ds) { this.ds = ds; parseInfo = new Formatter(); List<VariableBean> beanList = new ArrayList<>(); List<AxisBean> axisList = new ArrayList<>(); setVariables(ds.getVariables(), axisList, beanList); varTable.setBeans(beanList); axisTable.setBeans(axisList); csTable.setBeans(getCoordinateSystemBeans(ds)); }
@Test public void testNoValid2DVariable() throws Exception { final File file = TestData.file(this, "noVars.nc"); NetcdfDataset dataset = NetcdfDataset.acquireDataset(file.getAbsolutePath(), null); List<Variable> variables = dataset.getVariables(); boolean speedVariableIsPresent = false; String speedVariableName = ""; for (Variable variable : variables) { if (variable.getShortName().equals("spd")) { speedVariableIsPresent = true; speedVariableName = variable.getFullName(); break; } } assertTrue(speedVariableIsPresent); final NetCDFImageReaderSpi unidataImageReaderSpi = new NetCDFImageReaderSpi(); assertTrue(unidataImageReaderSpi.canDecodeInput(file)); NetCDFImageReader reader = null; try { // sample dataset containing a water_speed variable having // only time, depth dimensions. No lon/lat dims are present // resulting into variable not usable. reader = (NetCDFImageReader) unidataImageReaderSpi.createReaderInstance(); reader.setInput(file); final List<Name> names = reader.getCoveragesNames(); boolean isSpeedCoverageAvailable = false; for (Name name : names) { if (name.toString().equals(speedVariableName)) { isSpeedCoverageAvailable = true; break; } } // Checking that only "mask" variable is found assertFalse(isSpeedCoverageAvailable); } finally { if (dataset != null) { dataset.close(); } if (reader != null) { try { reader.dispose(); } catch (Throwable t) { // Does nothing } } } }
/** * Constructor. * * @param ncfile the netccdf file * @param typedDataVariables list of data variables; all record variables will be added to this * list, except . You can remove extra * @param obsTimeVName observation time variable name (required) * @param nomTimeVName nominal time variable name (may be null) * @throws IllegalArgumentException if ncfile has no unlimited dimension and recDimName is null. */ public RecordDatasetHelper( NetcdfDataset ncfile, String obsTimeVName, String nomTimeVName, List<VariableSimpleIF> typedDataVariables, String recDimName, Formatter errBuffer) { this.ncfile = ncfile; this.obsTimeVName = obsTimeVName; this.nomTimeVName = nomTimeVName; this.errs = errBuffer; // check if we already have a structure vs if we have to add it. if (this.ncfile.hasUnlimitedDimension()) { this.ncfile.sendIospMessage(NetcdfFile.IOSP_MESSAGE_ADD_RECORD_STRUCTURE); this.recordVar = (StructureDS) this.ncfile.getRootGroup().findVariable("record"); this.obsDim = ncfile.getUnlimitedDimension(); } else { if (recDimName == null) throw new IllegalArgumentException( "File <" + this.ncfile.getLocation() + "> has no unlimited dimension, specify psuedo record dimension with observationDimension global attribute."); this.obsDim = this.ncfile.getRootGroup().findDimension(recDimName); this.recordVar = new StructurePseudoDS(this.ncfile, null, "record", null, obsDim); } // create member variables List<Variable> recordMembers = ncfile.getVariables(); for (Variable v : recordMembers) { if (v == recordVar) continue; if (v.isScalar()) continue; if (v.getDimension(0) == this.obsDim) typedDataVariables.add(v); } // need the time units Variable timeVar = ncfile.findVariable(obsTimeVName); String timeUnitString = ncfile.findAttValueIgnoreCase(timeVar, CDM.UNITS, "seconds since 1970-01-01"); try { timeUnit = new DateUnit(timeUnitString); } catch (Exception e) { if (null != errs) errs.format("Error on string = %s == %s%n", timeUnitString, e.getMessage()); try { timeUnit = new DateUnit("seconds since 1970-01-01"); } catch (Exception e1) { // cant happen } } }
private Variable hasUnits(NetcdfDataset ds, String unitList) { List<Variable> varList = ds.getVariables(); StringTokenizer stoker = new StringTokenizer(unitList, ","); while (stoker.hasMoreTokens()) { String unit = stoker.nextToken(); for (Variable ve : varList) { String hasUnit = ve.getUnitsString(); if (hasUnit == null) continue; if (hasUnit.equalsIgnoreCase(unit)) return ve; } } return null; }
public void augmentDataset(NetcdfDataset ds, CancelTask cancelTask) throws IOException { for (Variable v : ds.getVariables()) checkIfAxis(v); int year = ds.readAttributeInteger(null, "YEAR", -1); int doy = ds.readAttributeInteger(null, "DAY", -1); double time = ds.readAttributeDouble(null, "TIME", Double.NaN); if ((year > 0) && (doy > 0) && !Double.isNaN(time)) { Calendar cal = new GregorianCalendar(TimeZone.getTimeZone("UTC")); cal.clear(); cal.set(Calendar.YEAR, year); cal.set(Calendar.DAY_OF_YEAR, doy); int hour = (int) time; cal.set(Calendar.HOUR_OF_DAY, hour); time -= hour; time *= 60; int minute = (int) time; cal.set(Calendar.MINUTE, minute); time -= minute; time *= 60; cal.set(Calendar.SECOND, (int) time); VariableDS var = new VariableDS( ds, null, null, "timeFromAtts", DataType.LONG, "", "seconds since 1970-01-01 00:00", "time generated from global attributes"); // LOOK : cant handle scalar coordinates yet // var.addAttribute( new Attribute(_Coordinate.AxisType, AxisType.Time.toString())); ds.addVariable(null, var); ArrayLong.D0 data = new ArrayLong.D0(); data.set(cal.getTime().getTime() / 1000); var.setCachedData(data, true); } ds.finish(); }
/** * Generate a list of ViewVariables direct from a dataset. The list will be filtered to only * include variables with the specified name * * @param ds * @param variableNameFilter if not null, all ViewVariables in the response will have the name * variableNameFilter * @return * @throws IOException */ public static AbstractViewVariable[] fromNetCDFDataset( NetcdfDataset ds, String variableNameFilter) throws IOException { List<AbstractViewVariable> result = new ArrayList<>(); for (Variable var : ds.getVariables()) { if (variableNameFilter != null) { if (!var.getName().equals(variableNameFilter)) { continue; } } AbstractViewVariable parsedViewVar = parseVariableRecursive(var); if (parsedViewVar != null) result.add(parsedViewVar); } return result.toArray(new AbstractViewVariable[result.size()]); }
@Override public AbstractGridDataset createDataset(String id, String location) throws IOException, EdalException { NetcdfDataset nc = null; try { /* * Open the dataset, using the cache for NcML aggregations */ nc = openAndAggregateDataset(location); /*- * We may in future be able to use forecast model run collection aggregations for * dealing with the case of overlapping time axes. To do this the code will look * something like this: * * StringBuilder sb = new StringBuilder(); * Formatter formatter = new Formatter(sb, Locale.UK); * Fmrc f = Fmrc.open(location, formatter); * * in openAndAggregateDataset. It will need to build up an NcML document which * does this. It should look something like: * * <netcdf xmlns="http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2" enhance="true"> * <aggregation dimName="run" type="forecastModelRunCollection" timeUnitsChange="true"> * <!-- scanFmrc actually works, but what we want is something like the following bit --> * <scanFmrc location="/home/guy/Data/POLCOMS_IRISH/" regExp=".*\.nc"/> * <netcdf location="/home/guy/Data/POLCOMS_IRISH/polcoms_irish_hourly_20090320.nc" coordValue="2009-03-20T00:00:00Z" enhance="true" /> * <netcdf location="/home/guy/Data/POLCOMS_IRISH/polcoms_irish_hourly_20090321.nc" coordValue="2009-03-21T00:00:00Z" enhance="true" /> * <netcdf location="/home/guy/Data/POLCOMS_IRISH/polcoms_irish_hourly_20090322.nc" coordValue="2009-03-22T00:00:00Z" enhance="true" /> * </aggregation> * </netcdf> * * For more documentation see: * http://mailman.unidata.ucar.edu/software/thredds/current/netcdf-java/ncml/FmrcAggregation.html * * We then can do stuff like: * * ucar.nc2.dt.GridDataset gridDataset = f.getDatasetBest(); * * To get the single best aggregation of the overlapping time axis * * Then we need to work with GridDatasets in place of NetcdfDatasets. Stuff like: * * for(Variable variable : gridDataset.getNetcdfFile().getVariables()) { * // blah blah * } * * will be necessary. We need to check that that works with remote datasets too */ /* * We look for NetCDF-U variables to group mean/standard-deviation. * * We need to do this here because we want to subsequently ignore * parent variables */ Map<String, String[]> varId2AncillaryVars = new HashMap<String, String[]>(); for (Variable variable : nc.getVariables()) { /* * Just look for parent variables, since these may not have a * grid directly associated with them */ for (Attribute attr : variable.getAttributes()) { if (attr.getFullName().equalsIgnoreCase("ancillary_variables")) { varId2AncillaryVars.put(variable.getFullName(), attr.getStringValue().split(" ")); continue; } } } ucar.nc2.dt.GridDataset gridDataset = CdmUtils.getGridDataset(nc); List<GridVariableMetadata> vars = new ArrayList<GridVariableMetadata>(); /* * Store a map of component names. Key is the compound name, value * is a 2-element String array with x, y component IDs * * Also store a map of whether these components are really * eastward/northward, or whether they are locally u/v */ Map<String, String[]> xyComponentPairs = new HashMap<String, String[]>(); Map<String, Boolean> xyNameToTrueEN = new HashMap<String, Boolean>(); /* * Store a map of variable IDs to UncertML URLs. This will be used * to determine which components are mean/std/etc. * * TODO implement more than just Mean/SD */ Map<String, String> varId2UncertMLRefs = new HashMap<String, String>(); /* * Here we store the parent variable IDs and their corresponding * title. */ Map<String, String> parentVarId2Title = new HashMap<String, String>(); for (Gridset gridset : gridDataset.getGridsets()) { GridCoordSystem coordSys = gridset.getGeoCoordSystem(); HorizontalGrid hDomain = CdmUtils.createHorizontalGrid(coordSys); VerticalAxis zDomain = CdmUtils.createVerticalAxis(coordSys); TimeAxis tDomain = CdmUtils.createTimeAxis(coordSys); /* * Create a VariableMetadata object for each GridDatatype */ for (GridDatatype grid : gridset.getGrids()) { VariableDS variable = grid.getVariable(); String varId = variable.getFullName(); String name = getVariableName(variable); /* * If this is a parent variable for a stats collection, we * don't want it to be a normal variable as well. */ if (varId2AncillaryVars.containsKey(varId)) { parentVarId2Title.put(varId, name); continue; } /* * If it is a child variable is (potentially) referenced by * UncertML, store its ID and the (possible) UncertML URI */ for (Attribute attr : variable.getAttributes()) { if (attr.getFullName().equalsIgnoreCase("ref")) { varId2UncertMLRefs.put(varId, attr.getStringValue()); } } Parameter parameter = new Parameter( varId, variable.getShortName(), variable.getDescription(), variable.getUnitsString(), name); GridVariableMetadata metadata = new GridVariableMetadata( variable.getFullName(), parameter, hDomain, zDomain, tDomain, true); vars.add(metadata); if (name != null) { /* * Check for vector components */ if (name.contains("eastward_")) { String compoundName = name.replaceFirst("eastward_", ""); String[] cData; if (!xyComponentPairs.containsKey(compoundName)) { cData = new String[2]; xyComponentPairs.put(compoundName, cData); xyNameToTrueEN.put(compoundName, true); } cData = xyComponentPairs.get(compoundName); /* * By doing this, we will end up with the merged * coverage */ cData[0] = varId; } else if (name.contains("northward_")) { String compoundName = name.replaceFirst("northward_", ""); String[] cData; if (!xyComponentPairs.containsKey(compoundName)) { cData = new String[2]; xyComponentPairs.put(compoundName, cData); xyNameToTrueEN.put(compoundName, true); } cData = xyComponentPairs.get(compoundName); /* * By doing this, we will end up with the merged * coverage */ cData[1] = varId; } else if (name.matches("u-.*component")) { String compoundName = name.replaceFirst("u-(.*)component", "$1"); String[] cData; if (!xyComponentPairs.containsKey(compoundName)) { cData = new String[2]; xyComponentPairs.put(compoundName, cData); xyNameToTrueEN.put(compoundName, false); } cData = xyComponentPairs.get(compoundName); /* * By doing this, we will end up with the merged * coverage */ cData[0] = varId; } else if (name.matches("v-.*component")) { String compoundName = name.replaceFirst("v-(.*)component", "$1"); String[] cData; if (!xyComponentPairs.containsKey(compoundName)) { cData = new String[2]; xyComponentPairs.put(compoundName, cData); xyNameToTrueEN.put(compoundName, false); } cData = xyComponentPairs.get(compoundName); /* * By doing this, we will end up with the merged * coverage */ cData[1] = varId; } /* * We could potentially add a check for zonal/meridional * here if required. */ } } } CdmGridDataset cdmGridDataset = new CdmGridDataset(id, location, vars, CdmUtils.getOptimumDataReadingStrategy(nc)); for (Entry<String, String[]> componentData : xyComponentPairs.entrySet()) { String commonName = componentData.getKey(); String[] comps = componentData.getValue(); if (comps[0] != null && comps[1] != null) { cdmGridDataset.addVariablePlugin( new VectorPlugin(comps[0], comps[1], commonName, xyNameToTrueEN.get(commonName))); } } for (String statsCollectionId : varId2AncillaryVars.keySet()) { String[] ids = varId2AncillaryVars.get(statsCollectionId); String meanId = null; String stddevId = null; for (String statsVarIds : ids) { String uncertRef = varId2UncertMLRefs.get(statsVarIds); if (uncertRef != null && uncertRef.equalsIgnoreCase("http://www.uncertml.org/statistics/mean")) { meanId = statsVarIds; } if (uncertRef != null && uncertRef.equalsIgnoreCase( "http://www.uncertml.org/statistics/standard-deviation")) { stddevId = statsVarIds; } } if (meanId != null && stddevId != null) { MeanSDPlugin meanSDPlugin = new MeanSDPlugin(meanId, stddevId, parentVarId2Title.get(statsCollectionId)); cdmGridDataset.addVariablePlugin(meanSDPlugin); } } return cdmGridDataset; } finally { CdmUtils.closeDataset(nc); } }
/** * Opens the NetCDF dataset at the given location, using the dataset cache if {@code location} * represents an NcML aggregation. We cannot use the cache for OPeNDAP or single NetCDF files * because the underlying data may have changed and the NetcdfDataset cache may cache a dataset * forever. In the case of NcML we rely on the fact that server administrators ought to have set a * "recheckEvery" parameter for NcML aggregations that may change with time. It is desirable to * use the dataset cache for NcML aggregations because they can be time-consuming to assemble and * we don't want to do this every time a map is drawn. * * @param location The location of the data: a local NetCDF file, an NcML aggregation file or an * OPeNDAP location, {@literal i.e.} anything that can be passed to * NetcdfDataset.openDataset(location). * @return a {@link NetcdfDataset} object for accessing the data at the given location. * @throws IOException if there was an error reading from the data source. */ private NetcdfDataset openAndAggregateDataset(String location) throws IOException, EdalException { NetcdfDataset nc; if (location.startsWith("dods://") || location.startsWith("http://")) { /* * We have a remote dataset */ nc = CdmUtils.openDataset(location); } else { /* * We have a local dataset */ List<File> files = null; try { files = CdmUtils.expandGlobExpression(location); } catch (NullPointerException e) { System.out.println("NPE processing location: " + location); throw e; } if (files.size() == 0) { throw new EdalException( "The location " + location + " doesn't refer to any existing files."); } if (files.size() == 1) { location = files.get(0).getAbsolutePath(); nc = CdmUtils.openDataset(location); } else { /* * We have multiple files in a glob expression. We write some * NcML and use the NetCDF aggregation libs to parse this into * an aggregated dataset. * * If we have already generated the ncML on a previous call, * just use that. */ if (ncmlString == null) { /* * Find the name of the time dimension */ NetcdfDataset first = openAndAggregateDataset(files.get(0).getAbsolutePath()); String timeDimName = null; for (Variable var : first.getVariables()) { if (var.isCoordinateVariable()) { for (Attribute attr : var.getAttributes()) { if (attr.getFullName().equalsIgnoreCase("units") && attr.getStringValue().contains(" since ")) { /* * This is the time dimension. Since this is * a co-ordinate variable, there is only 1 * dimension */ Dimension timeDimension = var.getDimension(0); timeDimName = timeDimension.getFullName(); } } } } first.close(); if (timeDimName == null) { throw new EdalException("Cannot join multiple files without time dimensions"); } /* * We can't assume that the glob expression will have * returned the files in time order. * * We could assume that alphabetical == time ordered (and * for properly named files it will - but let's not rely on * our users having sensible naming conventions... * * Sort the list using a comparator which opens the file and * gets the first value of the time dimension */ final String aggDimName = timeDimName; Collections.sort( files, new Comparator<File>() { @Override public int compare(File ncFile1, File ncFile2) { NetcdfFile nc1 = null; NetcdfFile nc2 = null; try { nc1 = NetcdfFile.open(ncFile1.getAbsolutePath()); nc2 = NetcdfFile.open(ncFile2.getAbsolutePath()); Variable timeVar1 = nc1.findVariable(aggDimName); Variable timeVar2 = nc2.findVariable(aggDimName); long time1 = timeVar1.read().getLong(0); long time2 = timeVar2.read().getLong(0); return Long.compare(time1, time2); } catch (Exception e) { /* * There was a problem reading the data. Sort * alphanumerically by filename and hope for the * best... * * This catches all exceptions because however * it fails this is still our best option. * * If the error is a genuine problem, it'll show * up as soon as we try and aggregate. */ return ncFile1.getAbsolutePath().compareTo(ncFile2.getAbsolutePath()); } finally { if (nc1 != null) { try { nc1.close(); } catch (IOException e) { log.error("Problem closing netcdf file", e); } } if (nc2 != null) { try { nc2.close(); } catch (IOException e) { log.error("Problem closing netcdf file", e); } } } } }); /* * Now create the NcML string and use it to create an * aggregated dataset */ StringBuffer ncmlStringBuffer = new StringBuffer(); ncmlStringBuffer.append( "<netcdf xmlns=\"http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2\">"); ncmlStringBuffer.append( "<aggregation dimName=\"" + timeDimName + "\" type=\"joinExisting\">"); for (File file : files) { ncmlStringBuffer.append("<netcdf location=\"" + file.getAbsolutePath() + "\"/>"); } ncmlStringBuffer.append("</aggregation>"); ncmlStringBuffer.append("</netcdf>"); ncmlString = ncmlStringBuffer.toString(); } nc = NcMLReader.readNcML(new StringReader(ncmlString), null); } } return nc; }
/** create a NetcdfDataset out of this NetcdfFile, adding coordinates etc. */ public void augmentDataset(NetcdfDataset ds, CancelTask cancelTask) throws IOException { // latitude if (!hasAxisType(ds, AxisType.Lat)) { // already has _CoordinateAxisType if (!addAxisType(ds, "latitude", AxisType.Lat)) { // directly named String vname = ds.findAttValueIgnoreCase(null, "latitude_coordinate", null); if (!addAxisType(ds, vname, AxisType.Lat)) { // attribute named Variable v = hasUnits(ds, "degrees_north,degrees_N,degreesN,degree_north,degree_N,degreeN"); if (v != null) addAxisType(v, AxisType.Lat); // CF-1 } } } // longitude if (!hasAxisType(ds, AxisType.Lon)) { // already has _CoordinateAxisType if (!addAxisType(ds, "longitude", AxisType.Lon)) { // directly named String vname = ds.findAttValueIgnoreCase(null, "longitude_coordinate", null); if (!addAxisType(ds, vname, AxisType.Lon)) { // attribute named Variable v = hasUnits(ds, "degrees_east,degrees_E,degreesE,degree_east,degree_E,degreeE"); if (v != null) addAxisType(v, AxisType.Lon); // CF-1 } } } // altitude if (!hasAxisType(ds, AxisType.Height)) { // already has _CoordinateAxisType if (!addAxisType(ds, "altitude", AxisType.Height)) { // directly named if (!addAxisType(ds, "depth", AxisType.Height)) { // directly named String vname = ds.findAttValueIgnoreCase(null, "altitude_coordinate", null); if (!addAxisType(ds, vname, AxisType.Height)) { // attribute named for (int i = 0; i < ds.getVariables().size(); i++) { VariableEnhanced ve = (VariableEnhanced) ds.getVariables().get(i); String positive = ds.findAttValueIgnoreCase((Variable) ve, "positive", null); if (positive != null) { addAxisType((Variable) ve, AxisType.Height); // CF-1 break; } } } } } } // time if (!hasAxisType(ds, AxisType.Time)) { // already has _CoordinateAxisType if (!addAxisType(ds, "time", AxisType.Time)) { // directly named String vname = ds.findAttValueIgnoreCase(null, "time_coordinate", null); if (!addAxisType(ds, vname, AxisType.Time)) { // attribute named for (int i = 0; i < ds.getVariables().size(); i++) { VariableEnhanced ve = (VariableEnhanced) ds.getVariables().get(i); String unit = ve.getUnitsString(); if (unit == null) continue; if (SimpleUnit.isDateUnit(unit)) { addAxisType((Variable) ve, AxisType.Time); // CF-1 break; } } } } } }