private Set<Long> parseMechanisms(String keyword) throws IOException { checkDup(keyword); Set<Long> mechs = new HashSet<Long>(); parseEquals(); parseOpenBraces(); while (true) { int token = nextToken(); if (isCloseBraces(token)) { break; } if (token == TT_EOL) { continue; } if (token != TT_WORD) { throw excToken("Expected mechanism, read"); } long mech = parseMechanism(st.sval); mechs.add(Long.valueOf(mech)); } if (DEBUG) { System.out.print("mechanisms: ["); for (Long mech : mechs) { System.out.print(Functions.getMechanismName(mech)); System.out.print(", "); } System.out.println("]"); } return mechs; }
private long parseObjectClass() throws IOException { String name = parseWord(); try { return Functions.getObjectClassId(name); } catch (IllegalArgumentException e) { throw excLine("Unknown object class " + name); } }
private long decodeAttributeName(String name) throws IOException { if (isNumber(name)) { return decodeNumber(name); } else { try { return Functions.getAttributeId(name); } catch (IllegalArgumentException e) { throw excLine("Unknown attribute name " + name); } } }
private long parseMechanism(String mech) throws IOException { if (isNumber(mech)) { return decodeNumber(mech); } else { try { return Functions.getMechanismId(mech); } catch (IllegalArgumentException e) { throw excLine("Unknown mechanism: " + mech); } } }
private long parseKeyAlgorithm() throws IOException { String name = parseWord(); if (isNumber(name)) { return decodeNumber(name); } else { try { return Functions.getKeyId(name); } catch (IllegalArgumentException e) { throw excLine("Unknown key algorithm " + name); } } }
/** * Save a generated biomass dataset to an HDF5 file in the output directory given by the outputDir * attribute. * * @param biomass The generated biomass as a (num_timesteps) x (num_nodes) array * @param nodeIDs The node IDs. The order must correspond to the columns of the biomass array * @param matchingTimesteps The matching timesteps returned by getMatchingTimesteps() * @param nodeConfig The node configuration string used to generate the data * @param numTimesteps The number of time steps of biomass data to save */ private void saveHDF5OutputFile( double[][] biomass, int[] nodeIDs, int[] matchingTimesteps, String nodeConfig, int numTimesteps) { // Determine the filename File file = Functions.getNewOutputFile(new File(outputDir), "ATN", ".h5"); System.out.println("Writing output to " + file.toString()); // Write the data to the output file IHDF5Writer writer = HDF5Factory.configure(file).writer(); if (Constants.ROUND_BIOMASS) { // Scale biomass for consistency with CSV output. // Round and cast to 32-bit integers to facilitate deflate compression. // Note: there is technically a risk of integer overflow, // but it won't happen unless scaled biomass exceeds 2 billion. int[][] scaledBiomass = new int[numTimesteps][nodeIDs.length]; for (int t = 0; t < numTimesteps; t++) { for (int i = 0; i < nodeIDs.length; i++) { scaledBiomass[t][i] = (int) Math.round((biomass[t][i] * Constants.BIOMASS_SCALE)); } } writer.int32().writeMatrix("biomass", scaledBiomass, HDF5IntStorageFeatures.INT_DEFLATE); } else { // Scale biomass for consistency with CSV output, but do not round. double[][] scaledBiomass = new double[numTimesteps][nodeIDs.length]; for (int t = 0; t < numTimesteps; t++) { for (int i = 0; i < nodeIDs.length; i++) { scaledBiomass[t][i] = (biomass[t][i] * Constants.BIOMASS_SCALE); } } writer.float64().writeMatrix("biomass", scaledBiomass); } writer.writeIntArray("node_ids", nodeIDs); if (matchingTimesteps != null) writer.writeIntArray("matching_timesteps", matchingTimesteps); writer.string().setAttr("/", "node_config", nodeConfig); writer.close(); }
@Override public String getHelpFile() { return Functions.getResourcePath() + "/plugin/android-device/help-buildConfig.html"; }
private void initOutputStreams() { System.out.println("Ecosystem output will be written to:"); System.out.println("Network output will be written to:"); // psATN = Functions.getPrintStream("ATN", userInput.destDir); psATN = Functions.getPrintStream("ATN", outputDir); }
// loop through current job/results, assembling dataset private HashMap<Integer, SpeciesZoneType> genSpeciesDataset( SimJob job, EcosystemTimesteps ecosysTimesteps, Map<Integer, NodeRelationships> ecosysRelationships) { // calc information relevant to entire ecosystem int speciesCnt = ecosysTimesteps.getNodeList().size(); // Number of species int timesteps = ecosysTimesteps.getTimesteps(); // Maximum number of timesteps to run simulation int timestepsToSave = 0; // Number of timesteps of data to save to output file int[] matchingTimesteps = null; // Array of matching timesteps returned by findMatchingTimesteps() // read in link parameters; this was explicitly configured to allow // manipulation of link parameter values, but no manipulation is // performed in this version LinkParams lPs = new LinkParams(propertiesConfig); // loop through node values and assemble summary data int[] speciesID = new int[speciesCnt]; SimJobSZT[] sztArray = new SimJobSZT[speciesCnt]; int spNum = 0; for (NodeTimesteps nodeTimesteps : ecosysTimesteps.getTimestepMapValues()) { SimJobSZT sjSzt = job.getSpeciesZoneByNodeId(nodeTimesteps.getNodeId()); sztArray[spNum] = sjSzt; speciesID[spNum] = sjSzt.getNodeIndex(); spNum++; } // define objects to track species' contributions double[][][] contribs = new double[timesteps][speciesCnt][speciesCnt]; double[][] calcBiomass = new double[timesteps][speciesCnt]; double[][] contribsT; // current timestep // note: WebServices ATN Model uses B0 with default = 0.5. This presumes // that biomasses are small, i.e. < 1.0. Division by biomassScale // here is consistent with usage in WoB_Server.SimulationEngine to // normalize biomasses. // need to store bm as it varies over time through integration; // start with initial bm for each species double[] currBiomass = new double[speciesCnt]; for (int i = 0; i < speciesCnt; i++) { NodeTimesteps nodeTimeSteps = ecosysTimesteps.getTimestepMap().get(speciesID[i]); // manually set biomass vals for excluded initial timesteps; this // includes the first value to be used as input currBiomass[i] = nodeTimeSteps.getBiomass(initTimeIdx) / biomassScale; calcBiomass[0][i] = currBiomass[i]; } if (Constants.useCommonsMathIntegrator) { // Use Apache Commons Math GraggBulirschStoerIntegrator FirstOrderIntegrator integrator = new GraggBulirschStoerIntegrator( 1.0e-8, // minimal step 100.0, // maximal step ATNEquations.EXTINCT, // allowed absolute error 1.0e-10); // allowed relative error // Set up the ATN equations based on the current food web and parameters ATNEquations ode = new ATNEquations(sztArray, ecosysRelationships, lPs); ATNEventHandler eventHandler = new ATNEventHandler(ode); // FIXME: Choose best parameter values integrator.addEventHandler( new EventFilter(eventHandler, FilterType.TRIGGER_ONLY_DECREASING_EVENTS), 1, // maximal time interval between switching function checks (this interval prevents // missing sign changes in case the integration steps becomes very large) 0.0001, // convergence threshold in the event time search 1000, // upper limit of the iteration count in the event time search new BisectionSolver()); // Set up the StepHandler, which is triggered at each time step by the integrator, // and copies the current biomass of each species into calcBiomass[timestep]. // See the "Continuous Output" section of // https://commons.apache.org/proper/commons-math/userguide/ode.html FixedStepHandler fixedStepHandler = new FixedStepHandler() { public void init(double t0, double[] y0, double t) {} private int timestep = 0; public void handleStep(double t, double[] y, double[] yDot, boolean isLast) { // Ensure we don't go past the last time step due to rounding error if (timestep < calcBiomass.length) { System.arraycopy(y, 0, calcBiomass[timestep], 0, speciesCnt); } timestep++; } }; StepHandler stepHandler = new StepNormalizer(timeIntvl, fixedStepHandler); integrator.addStepHandler(stepHandler); // Run the integrator to compute the biomass time series integrator.integrate(ode, 0.0, currBiomass, timeIntvl * timesteps, currBiomass); if (eventHandler.integrationWasStopped()) { timestepsToSave = (int) (eventHandler.getTimeStopped() / timeIntvl); } else { // Check for an oscillating steady state, // and only save the data through the first period of the oscillation matchingTimesteps = findMatchingTimesteps(calcBiomass, timesteps - 1); System.err.println("\nmatchingTimesteps = " + Arrays.toString(matchingTimesteps)); // Save timesteps up through the second matching timestep, // or all timesteps if there was no second matching timestep. if (matchingTimesteps[1] != -1) { timestepsToSave = matchingTimesteps[1] + 1; } else { timestepsToSave = timesteps; } } } else { // Use BulirschStoerIntegration // create integration object boolean isTest = false; BulirschStoerIntegration bsi = new BulirschStoerIntegration( timeIntvl, speciesID, sztArray, ecosysRelationships, lPs, maxBSIErr, equationSet); // calculate delta-biomass and biomass "contributions" from each related // species for (int t = initTimeIdx + 1; t < timesteps; t++) { boolean success = bsi.performIntegration(time(initTime, t), currBiomass); if (!success) { System.out.printf("Integration failed to converge, t = %d\n", t); System.out.print(bsi.extrapArrayToString(biomassScale)); break; } currBiomass = bsi.getYNew(); System.arraycopy(currBiomass, 0, calcBiomass[t], 0, speciesCnt); contribsT = bsi.getContribs(); for (int i = 0; i < speciesCnt; i++) { System.arraycopy(contribsT[i], 0, contribs[t - 1][i], 0, speciesCnt); } } // timestep loop } if (useHDF5) { saveHDF5OutputFile( calcBiomass, speciesID, matchingTimesteps, job.getNode_Config(), timestepsToSave); return null; } double[][] webServicesData = new double[speciesCnt][timesteps]; if (Constants.useSimEngine) { // We need the webServicesData only for marginOfErrorCalculation // extract timestep data from CSV Functions.extractCSVDataRelns(job.getCsv(), ecosysTimesteps, ecosysRelationships); spNum = 0; for (NodeTimesteps nodeTimesteps : ecosysTimesteps.getTimestepMapValues()) { // copy nodetimestep data to local array for easier access System.arraycopy(nodeTimesteps.getBiomassArray(), 0, webServicesData[spNum], 0, timesteps); spNum++; } } // output data // A. print header psATN.printf("timesteps"); for (int i = 0; i < timesteps; i++) { psATN.printf(",%d", i); } psATN.println(); /* Convert to CSV String */ String biomassCSV = ""; biomassCSV = "Manipulation_id: " + job.getATNManipulationId() + "\n\n"; int maxTimestep = job.getTimesteps(); // Create Timestep Labels for (int j = 1; j <= maxTimestep; j++) { biomassCSV += "," + j; } HashMap<Integer, SpeciesZoneType> mSpecies = new HashMap<Integer, SpeciesZoneType>(); // loop through each species for (int i = 0; i < speciesCnt; i++) { if (Constants.useSimEngine) { psATN.printf("i.%d.sim", speciesID[i]); // B. print WebServices simulation data for species for (int t = 0; t < timesteps; t++) { psATN.printf(",%9.0f", webServicesData[i][t]); } psATN.println(); } // B. print combined biomass contributions (i.e. locally calculated biomass) // for current species. psATN.printf("i.%d.calc", speciesID[i]); for (int t = 0; t < timesteps; t++) { psATN.printf(",%9.0f", calcBiomass[t][i] * biomassScale); } psATN.println(); // //C. print individual biomass contributions from other species // for (int j = 0; j < speciesCnt; j++) { // psATN.printf("i.%d.j.%d.", speciesID[i], speciesID[j]); // for (int t = 0; t < timesteps; t++) { // psATN.printf(",%9.0f", contribs[t][i][j] * biomassScale); // } // psATN.println(); // } float extinction = 1.E-15f; SimJobSZT sjSzt = job.getSpeciesZoneByNodeId(speciesID[i]); // add nodes to list in the order that they are received from infos String name = sjSzt.getName().replaceAll(",", " ") + " [" + sjSzt.getNodeIndex() + "]"; String tempStr = name; for (int t = 0; t < maxTimestep; t++) { tempStr += ","; double biomass = calcBiomass[t][i] * biomassScale; if (biomass > 0) { tempStr += biomass > extinction ? Math.ceil(biomass) : 0; } if (t == maxTimestep - 1) { SpeciesZoneType szt = null; if (!mSpecies.containsKey(sjSzt.getNodeIndex())) { szt = new SpeciesZoneType(sjSzt.getName(), sjSzt.getNodeIndex(), 0, 0, biomass, null); mSpecies.put(sjSzt.getNodeIndex(), szt); } else { // update existing species current biomass szt = mSpecies.get(sjSzt.getNodeIndex()); szt.setCurrentBiomass(biomass); } } } biomassCSV += "\n" + tempStr; } // Append node config to the ATN CSV psATN.println(); psATN.println("\"node-config: " + job.getNode_Config() + "\""); biomassCSV += "\n\n"; biomassCSV += job.getConsumeMap().toString() + "\n\n"; biomassCSV += job.getPathTable().toString(); job.setBiomassCsv(biomassCSV); // System.out.println(biomassCSV); return mSpecies; }