Пример #1
0
  public void updateBiomass(
      Ecosystem ecosystem, Map<Integer, SpeciesZoneType> nextSpeciesNodeList) {
    for (Entry<Integer, SpeciesZoneType> entry : nextSpeciesNodeList.entrySet()) {
      int species_id = entry.getKey();
      SpeciesZoneType szt = entry.getValue();
      int biomassValue = (int) entry.getValue().getCurrentBiomass();
      Species species = ecosystem.getSpecies(szt.getSpeciesType().getID());
      for (SpeciesGroup group : species.getGroups().values()) {
        group.setBiomass(biomassValue);

        EcoSpeciesDAO.updateBiomass(group.getID(), group.getBiomass());
      }
    }
  }
Пример #2
0
 public void setSpeciesBiomass(
     SpeciesZoneType szt, double perSpeciesBiomass, String ecosystemManipulationId) {
   // int node_id, int perSpeciesBiomass
   // If first time the ecosystemManipulationId may be null
   if (szt != null) {
     szt.setPerSpeciesBiomass(perSpeciesBiomass);
   }
 }
Пример #3
0
  /**
   * Add multiple new nodes (SpeciesZoneType objects) to a manipulation and then submit. HJR
   *
   * @param manipSpeciesMap - species being added
   * @param fullSpeciesMap - full list; for predator/prey info
   * @param timestep
   * @param isFirstManipulation
   * @param networkOrManipulationId
   * @return manipulation ID (String)
   * @throws SimulationException
   */
  public String addMultipleSpeciesType(
      HashMap<Integer, SpeciesZoneType> manipSpeciesMap,
      HashMap<Integer, SpeciesZoneType> fullSpeciesMap,
      int timestep,
      boolean isFirstManipulation,
      String networkOrManipulationId) {

    // job.setNode_Config("5,
    // [5],2000,1.000,1,K=9431.818,0,
    // [14],1751,20.000,1,X=0.273,0,
    // [31],1415,0.008,1,X=1.000,0,
    // [42],240,0.205,1,X=0.437,0,
    // [70],2494,13.000,1,X=0.155,0");

    //		  		  In addMultipleSpeciesType: node [70], biomass 2494, K = -1, R = -1.0000, X = 0.1233
    //				  In addMultipleSpeciesType: node [5], biomass 2000, K = 10000, R = 1.0000, X = 0.5000
    //				  In addMultipleSpeciesType: node [42], biomass 240, K = -1, R = -1.0000, X = 0.3478
    //				  In addMultipleSpeciesType: node [31], biomass 1415, K = -1, R = -1.0000, X = 0.7953
    //				  In addMultipleSpeciesType: node [14], biomass 1752, K = -1, R = -1.0000, X = 0.0010
    StringBuilder builder = new StringBuilder();
    builder.append(fullSpeciesMap.size()).append(",");
    for (SpeciesZoneType species : fullSpeciesMap.values()) {
      System.out.printf(
          "In addMultipleSpeciesType: node [%d], " + "biomass %d, K = %d, R = %6.4f, X = %6.4f\n",
          species.getNodeIndex(),
          +(int) species.getCurrentBiomass(),
          (int) species.getParamK(),
          species.getParamR(),
          species.getParamX());

      builder.append("[").append(species.getNodeIndex()).append("]").append(",");
      builder.append((int) species.getCurrentBiomass()).append(",");
      builder.append(roundToThreeDigits(species.getPerSpeciesBiomass())).append(",");

      String systemParam = this.setSystemParameters(species, fullSpeciesMap, timestep);
      builder.append(systemParam);
      System.out.println(builder);
    }
    String node_config = builder.substring(0, builder.length() - 1);
    // call processsim job here
    return node_config;
  }
Пример #4
0
  /*5/5/14, JTC, added persistent species data for players; system parameter masterSpeciesList,
  replaces mSpecies.
  Get previous timestep biomass for all species from web service*/
  public HashMap<Integer, SpeciesZoneType> getPrediction(
      String networkOrManipulationId,
      int startTimestep,
      int runTimestep,
      Map<Integer, Integer> addSpeciesNodeList,
      ZoneNodes zoneNodes)
      throws SimulationException {
    long milliseconds = System.currentTimeMillis();

    Log.printf("\nPrediction at %d\n", startTimestep);

    // Get previous timestep biomass for all species from web service
    // JTC, use new HashMap containing all current settings from zoneNodes, masterSpeciesList
    // HJR changing to make a deep copy here , I am getting a null while iterating
    HashMap<Integer, SpeciesZoneType> masterSpeciesList =
        new HashMap<Integer, SpeciesZoneType>(zoneNodes.getNodes());

    HashMap<Integer, SpeciesZoneType> mNewSpecies = new HashMap<Integer, SpeciesZoneType>();
    // JTC, mUpdateBiomass renamed from mUpdateSpecies
    HashMap<Integer, SpeciesZoneType> mUpdateBiomass = new HashMap<Integer, SpeciesZoneType>();
    // JTC, added new update type, mUpdateParams
    HashMap<Integer, SpeciesZoneType> mUpdateParams = new HashMap<Integer, SpeciesZoneType>();

    SpeciesZoneType szt;
    String nodeConfig = null;
    SimJob job = new SimJob();
    // {70=2494, 5=2000, 42=240, 14=1752, 31=1415}
    for (int node_id : addSpeciesNodeList.keySet()) {
      int addedBiomass = addSpeciesNodeList.get(node_id);

      if (!masterSpeciesList.containsKey(node_id)) {
        szt = createSpeciesZoneType(node_id, addedBiomass);
        mNewSpecies.put(node_id, szt);
        // jtc - 04/19/15
        masterSpeciesList.put(node_id, szt);
      } else {
        szt = masterSpeciesList.get(node_id);

        szt.setCurrentBiomass(Math.max(0, szt.getCurrentBiomass() + addedBiomass));
        szt.setBiomassUpdated(true);
      }
    }

    //      //JTC, separated this to capture biomass updates made to ZoneNodes that
    //      //are not received through addSpeciesNodeList (biomass and param updates)
    //      for (SpeciesZoneType species : masterSpeciesList.values()) {
    //          //param update also updates biomass, so insert into that list
    //          //preferentially; o/w use biomass update list
    //          if (species.paramUpdated) {
    //              mUpdateParams.put(species.getNodeIndex(), species);
    //              species.setParamUpdated(false);
    //          } else if (species.biomassUpdated) {
    //              mUpdateBiomass.put(species.getNodeIndex(), species);
    //              species.setBiomassUpdated(false);
    //          }
    //      }

    // Insert new species using web services
    if (!mNewSpecies.isEmpty()) {
      zoneNodes.addNodes(mNewSpecies);
    }
    try {
      nodeConfig =
          addMultipleSpeciesType(
              mNewSpecies, masterSpeciesList, startTimestep, false, networkOrManipulationId);
    } catch (Exception ex) {
      Log.println_e(ex.getMessage());
    }
    //      // Update biomass changes to existing species using web services
    //      if (!mUpdateBiomass.isEmpty()) {
    //          List<NodeBiomass> lNodeBiomass = new ArrayList<NodeBiomass>();
    //          for (SpeciesZoneType s : mUpdateBiomass.values()) {
    //              Log.printf("Updating Biomass: [%d] %s %f\n", s.getNodeIndex(), s.getName(),
    //                      s.getCurrentBiomass() / Constants.BIOMASS_SCALE);
    //              lNodeBiomass.add(new NodeBiomass(
    //                      s.getCurrentBiomass() / Constants.BIOMASS_SCALE, s.getNodeIndex()));
    //          }
    //          try {
    ////              updateBiomass(networkOrManipulationId, lNodeBiomass, startTimestep);
    //          } catch (Exception ex) {
    //              Log.println_e(ex.getMessage());
    //          }
    //      }

    //      // JTC Update changes to existing species parameters using web services (also
    //      // resubmits biomass, but couldn't find a way to do params w/o biomass
    //      if (!mUpdateParams.isEmpty()) {
    //          try {
    ////              increaseMultipleSpeciesType(
    ////                      mUpdateBiomass,
    ////                      masterSpeciesList,
    ////                      startTimestep,
    ////                      false,
    ////                      networkOrManipulationId
    ////              );
    //          } catch (Exception ex) {
    //              Log.println_e(ex.getMessage());
    //          }
    //      }

    //      run(startTimestep, runTimestep, networkOrManipulationId);

    // get new predicted biomass
    try {
      // JTC - changed variable from "mSpecies = " to "mUpdateBiomass = "
      // mUpdateBiomass = getBiomass(networkOrManipulationId, 0, startTimestep + runTimestep);
      if (!masterSpeciesList.isEmpty() || !mNewSpecies.isEmpty()) {
        mUpdateBiomass =
            submitManipRequest("ATN", nodeConfig, startTimestep + runTimestep, false, null);
      }
    } catch (Exception ex) {
      Log.println_e(ex.getMessage());
      return null;
    }
    //      getBiomassInfo(networkOrManipulationId);

    // JTC - add loop to update persistent player species biomass information
    SpeciesZoneType updS;
    for (SpeciesZoneType priorS : masterSpeciesList.values()) {
      System.out.println("priorS.nodeIndex " + priorS.nodeIndex);
      updS = mUpdateBiomass.get(priorS.nodeIndex);
      if (updS != null && updS.currentBiomass != 0) {
        masterSpeciesList
            .get(priorS.nodeIndex)
            .setCurrentBiomass(Math.ceil(updS.getCurrentBiomass()));
      }
      //          else {
      //              zoneNodes.removeNode(priorS.nodeIndex);
      //          }
    }

    Log.printf(
        "Total Time (Get Prediction): %.2f seconds",
        Math.round((System.currentTimeMillis() - milliseconds) / 10.0) / 100.0);

    return (HashMap) zoneNodes.getNodes();
  }
Пример #5
0
  // loop through current job/results, assembling dataset
  private HashMap<Integer, SpeciesZoneType> genSpeciesDataset(
      SimJob job,
      EcosystemTimesteps ecosysTimesteps,
      Map<Integer, NodeRelationships> ecosysRelationships) {
    // calc information relevant to entire ecosystem
    int speciesCnt = ecosysTimesteps.getNodeList().size(); // Number of species
    int timesteps = ecosysTimesteps.getTimesteps(); // Maximum number of timesteps to run simulation
    int timestepsToSave = 0; // Number of timesteps of data to save to output file
    int[] matchingTimesteps =
        null; // Array of matching timesteps returned by findMatchingTimesteps()

    // read in link parameters; this was explicitly configured to allow
    // manipulation of link parameter values, but no manipulation is
    // performed in this version
    LinkParams lPs = new LinkParams(propertiesConfig);

    // loop through node values and assemble summary data
    int[] speciesID = new int[speciesCnt];
    SimJobSZT[] sztArray = new SimJobSZT[speciesCnt];
    int spNum = 0;
    for (NodeTimesteps nodeTimesteps : ecosysTimesteps.getTimestepMapValues()) {
      SimJobSZT sjSzt = job.getSpeciesZoneByNodeId(nodeTimesteps.getNodeId());
      sztArray[spNum] = sjSzt;
      speciesID[spNum] = sjSzt.getNodeIndex();
      spNum++;
    }

    // define objects to track species' contributions
    double[][][] contribs = new double[timesteps][speciesCnt][speciesCnt];
    double[][] calcBiomass = new double[timesteps][speciesCnt];
    double[][] contribsT; // current timestep

    // note: WebServices ATN Model uses B0 with default = 0.5.  This presumes
    // that biomasses are small, i.e. < 1.0.  Division by biomassScale
    // here is consistent with usage in WoB_Server.SimulationEngine to
    // normalize biomasses.
    // need to store bm as it varies over time through integration;
    // start with initial bm for each species
    double[] currBiomass = new double[speciesCnt];
    for (int i = 0; i < speciesCnt; i++) {
      NodeTimesteps nodeTimeSteps = ecosysTimesteps.getTimestepMap().get(speciesID[i]);
      // manually set biomass vals for excluded initial timesteps; this
      // includes the first value to be used as input
      currBiomass[i] = nodeTimeSteps.getBiomass(initTimeIdx) / biomassScale;
      calcBiomass[0][i] = currBiomass[i];
    }

    if (Constants.useCommonsMathIntegrator) {

      // Use Apache Commons Math GraggBulirschStoerIntegrator

      FirstOrderIntegrator integrator =
          new GraggBulirschStoerIntegrator(
              1.0e-8, // minimal step
              100.0, // maximal step
              ATNEquations.EXTINCT, // allowed absolute error
              1.0e-10); // allowed relative error

      // Set up the ATN equations based on the current food web and parameters
      ATNEquations ode = new ATNEquations(sztArray, ecosysRelationships, lPs);

      ATNEventHandler eventHandler = new ATNEventHandler(ode);
      // FIXME: Choose best parameter values
      integrator.addEventHandler(
          new EventFilter(eventHandler, FilterType.TRIGGER_ONLY_DECREASING_EVENTS),
          1, // maximal time interval between switching function checks (this interval prevents
          // missing sign changes in case the integration steps becomes very large)
          0.0001, // convergence threshold in the event time search
          1000, // upper limit of the iteration count in the event time search
          new BisectionSolver());

      // Set up the StepHandler, which is triggered at each time step by the integrator,
      // and copies the current biomass of each species into calcBiomass[timestep].
      // See the "Continuous Output" section of
      // https://commons.apache.org/proper/commons-math/userguide/ode.html
      FixedStepHandler fixedStepHandler =
          new FixedStepHandler() {
            public void init(double t0, double[] y0, double t) {}

            private int timestep = 0;

            public void handleStep(double t, double[] y, double[] yDot, boolean isLast) {
              // Ensure we don't go past the last time step due to rounding error
              if (timestep < calcBiomass.length) {
                System.arraycopy(y, 0, calcBiomass[timestep], 0, speciesCnt);
              }
              timestep++;
            }
          };
      StepHandler stepHandler = new StepNormalizer(timeIntvl, fixedStepHandler);
      integrator.addStepHandler(stepHandler);

      // Run the integrator to compute the biomass time series
      integrator.integrate(ode, 0.0, currBiomass, timeIntvl * timesteps, currBiomass);
      if (eventHandler.integrationWasStopped()) {
        timestepsToSave = (int) (eventHandler.getTimeStopped() / timeIntvl);
      } else {
        // Check for an oscillating steady state,
        // and only save the data through the first period of the oscillation
        matchingTimesteps = findMatchingTimesteps(calcBiomass, timesteps - 1);
        System.err.println("\nmatchingTimesteps =  " + Arrays.toString(matchingTimesteps));

        // Save timesteps up through the second matching timestep,
        // or all timesteps if there was no second matching timestep.
        if (matchingTimesteps[1] != -1) {
          timestepsToSave = matchingTimesteps[1] + 1;
        } else {
          timestepsToSave = timesteps;
        }
      }

    } else {

      // Use BulirschStoerIntegration

      // create integration object
      boolean isTest = false;
      BulirschStoerIntegration bsi =
          new BulirschStoerIntegration(
              timeIntvl, speciesID, sztArray, ecosysRelationships, lPs, maxBSIErr, equationSet);

      // calculate delta-biomass and biomass "contributions" from each related
      // species
      for (int t = initTimeIdx + 1; t < timesteps; t++) {
        boolean success = bsi.performIntegration(time(initTime, t), currBiomass);
        if (!success) {
          System.out.printf("Integration failed to converge, t = %d\n", t);
          System.out.print(bsi.extrapArrayToString(biomassScale));
          break;
        }
        currBiomass = bsi.getYNew();
        System.arraycopy(currBiomass, 0, calcBiomass[t], 0, speciesCnt);

        contribsT = bsi.getContribs();
        for (int i = 0; i < speciesCnt; i++) {
          System.arraycopy(contribsT[i], 0, contribs[t - 1][i], 0, speciesCnt);
        }
      } // timestep loop
    }

    if (useHDF5) {
      saveHDF5OutputFile(
          calcBiomass, speciesID, matchingTimesteps, job.getNode_Config(), timestepsToSave);
      return null;
    }

    double[][] webServicesData = new double[speciesCnt][timesteps];
    if (Constants.useSimEngine) { // We need the webServicesData only for marginOfErrorCalculation
      // extract timestep data from CSV
      Functions.extractCSVDataRelns(job.getCsv(), ecosysTimesteps, ecosysRelationships);
      spNum = 0;
      for (NodeTimesteps nodeTimesteps : ecosysTimesteps.getTimestepMapValues()) {
        // copy nodetimestep data to local array for easier access
        System.arraycopy(nodeTimesteps.getBiomassArray(), 0, webServicesData[spNum], 0, timesteps);

        spNum++;
      }
    }
    // output data
    // A. print header
    psATN.printf("timesteps");
    for (int i = 0; i < timesteps; i++) {
      psATN.printf(",%d", i);
    }
    psATN.println();

    /* Convert to CSV String */
    String biomassCSV = "";
    biomassCSV = "Manipulation_id: " + job.getATNManipulationId() + "\n\n";

    int maxTimestep = job.getTimesteps();
    // Create Timestep Labels
    for (int j = 1; j <= maxTimestep; j++) {
      biomassCSV += "," + j;
    }
    HashMap<Integer, SpeciesZoneType> mSpecies = new HashMap<Integer, SpeciesZoneType>();
    // loop through each species
    for (int i = 0; i < speciesCnt; i++) {
      if (Constants.useSimEngine) {
        psATN.printf("i.%d.sim", speciesID[i]);
        // B. print WebServices simulation data for species
        for (int t = 0; t < timesteps; t++) {
          psATN.printf(",%9.0f", webServicesData[i][t]);
        }
        psATN.println();
      }

      // B. print combined biomass contributions (i.e. locally calculated biomass)
      // for current species.
      psATN.printf("i.%d.calc", speciesID[i]);
      for (int t = 0; t < timesteps; t++) {
        psATN.printf(",%9.0f", calcBiomass[t][i] * biomassScale);
      }
      psATN.println();

      //           //C. print individual biomass contributions from other species
      //           for (int j = 0; j < speciesCnt; j++) {
      //               psATN.printf("i.%d.j.%d.", speciesID[i], speciesID[j]);
      //               for (int t = 0; t < timesteps; t++) {
      //                   psATN.printf(",%9.0f", contribs[t][i][j] * biomassScale);
      //               }
      //               psATN.println();
      //           }

      float extinction = 1.E-15f;
      SimJobSZT sjSzt = job.getSpeciesZoneByNodeId(speciesID[i]);
      // add nodes to list in the order that they are received from infos
      String name = sjSzt.getName().replaceAll(",", " ") + " [" + sjSzt.getNodeIndex() + "]";
      String tempStr = name;
      for (int t = 0; t < maxTimestep; t++) {
        tempStr += ",";

        double biomass = calcBiomass[t][i] * biomassScale;

        if (biomass > 0) {
          tempStr += biomass > extinction ? Math.ceil(biomass) : 0;
        }

        if (t == maxTimestep - 1) {
          SpeciesZoneType szt = null;
          if (!mSpecies.containsKey(sjSzt.getNodeIndex())) {
            szt = new SpeciesZoneType(sjSzt.getName(), sjSzt.getNodeIndex(), 0, 0, biomass, null);
            mSpecies.put(sjSzt.getNodeIndex(), szt);

          } else { // update existing species current biomass
            szt = mSpecies.get(sjSzt.getNodeIndex());

            szt.setCurrentBiomass(biomass);
          }
        }
      }
      biomassCSV += "\n" + tempStr;
    }

    // Append node config to the ATN CSV
    psATN.println();
    psATN.println("\"node-config: " + job.getNode_Config() + "\"");

    biomassCSV += "\n\n";

    biomassCSV += job.getConsumeMap().toString() + "\n\n";

    biomassCSV += job.getPathTable().toString();

    job.setBiomassCsv(biomassCSV);

    // System.out.println(biomassCSV);
    return mSpecies;
  }
Пример #6
0
  /* Set all system parameters for a node (SpeciesZoneType) for a simulation run.
  HJR, original version of this, getSystemParameter() has some problems
  with how it submits link parameters.  (1) orig uses call to SZT.getlPreyIndex(),
  which is not active (set by prior call to SpeciesType.getPreyIndex, which returns
  empty list) i.e. never actually submits any link params, default or otherwise! */
  @SuppressWarnings("unused")
  private String setSystemParameters(
      SpeciesZoneType species, HashMap<Integer, SpeciesZoneType> fullSpeciesMap, int timestepIdx) {

    SpeciesTypeEnum type = species.getType();
    int nodeIdx = species.getNodeIndex();

    List<String> sParams = new ArrayList<String>();
    StringBuilder builder = new StringBuilder();
    if (type == SpeciesTypeEnum.PLANT) {
      // Carrying capacity(k) and GrowthRate(r) are only effective when species is plant
      // Higher Carrying capacity means higher biomass
      // for example, if carrying capacity is 10, maximum biomass of species is 10.
      // Higher growth rate means that species with higher growth rate will gain biomass faster.
      // Metabolic rate (x) are effective for both animals and plants
      // higher metabolic rate means that biomass of species will decrease compared to other species

      // YES, need to divide by Constants.BIOMASS_SCALE.
      setSystemParametersNode(
          sParams,
          timestepIdx,
          nodeIdx,
          species.getParamK(),
          ManipulatingParameterName.k,
          "carryingCapacityDefault");
      if (false) { // HJR Currently I have turned off R and X
        setSystemParametersNode(
            sParams,
            timestepIdx,
            nodeIdx,
            species.getParamR(),
            ManipulatingParameterName.r,
            "growthRateDefault");
        setSystemParametersNode(
            sParams,
            timestepIdx,
            nodeIdx,
            species.getParamX(),
            ManipulatingParameterName.x,
            "metabolicRateDefault");
      }
      // Pack everything
      // [5],2000,1.000,1,K=9431.818,0,
      // [K=10.0, R=1.0, X=0.5]
      builder.append(sParams.size()).append(",");
      for (int i = 0; i < sParams.size(); i++) {
        builder.append(sParams.get(i)).append(",");
      }
      builder.append("0").append(",");
    } else if (type == SpeciesTypeEnum.ANIMAL) {
      // Metabolic rate (x) are effective for both animals and plants
      // higher metabolic rate means that biomass of species will decrease compared to other species
      // Assimilation efficiency (e) is only available for animals.
      // higher assimilation efficiency means that biomass of species will increase.
      setSystemParametersNode(
          sParams,
          timestepIdx,
          nodeIdx,
          species.getParamX(),
          ManipulatingParameterName.x,
          "metabolicRateDefault");
      builder.append(sParams.size()).append(",");
      for (int i = 0; i < sParams.size(); i++) {
        builder.append(sParams.get(i));
        builder.append(",");
      }
      sParams.clear();
      // loop through prey, adding link parameters
      if (Integer.valueOf(propertiesConfig.getProperty("submitLinkParameterSettings")) == 1) {
        int preyCnt = species.getSpeciesType().getPreyNodeIDs().size();
        for (int preyIdx : species.getSpeciesType().getPreyNodeIDs()) {
          if (fullSpeciesMap == null || !fullSpeciesMap.containsKey(preyIdx)) {
            continue;
          }
          /* separate note: there appear to be a limited number of link params
          that can be submitted, over which an "axis fault" error will occur.  Varies
          with number of species in the ecosystem; on a test of a 15 species ecosystem,
          only 3 link params could be used.  Have disabled all, as I am not using them
          at this time.  Not fully evaluated, obviously, but these were not implemented
          at all previously.
          */

          /* default values that mimic web-services internal defaults are:
          predatorInterferenceDefault = 0
          assimilationEfficiencyAnimalDefault=1
          assimilationEfficiencyPlantDefault=1
          functionalResponseControlParameterDefault=0
          halfSaturationDensityDefault=0.5
          maximumIngestionRateDefault=6

          >consistent default values were not found for the following and there is some
          confusion about their role - based on prior code, parameter "a" matches
          relativeHalfSaturationDensityDefault.
          parameter "a" does not appear in any of the equations that I've seen, (possibly
          omega - consumption rate?),
          but DOES impact the simulation results.  No single value (0-1.0) gives result consistent
          to omitting the value, suggesting that species are distinguished somehow.
          Animal/Plant division was tested, but did not yield consistent results.
          relativeHalfSaturationDensityDefault=1.0
          relativeHalfSaturationDensity = 0.01
          */
          //// sequence is
          // linkParamCnt,[prey_Id0],paramID0=value0,[prey_Id1],paramID1=value1,...[prey_IdN],paramIDN=valueN
          // setSystemParametersLink(sParams, timestepIdx, nodeIdx, preyIdx,
          // species.getParamA(preyIdx),
          //        ManipulatingParameterName.a, "relativeHalfSaturationDensityDefault", preyCnt);
          if (false) {
            setSystemParametersLink(
                sParams,
                timestepIdx,
                nodeIdx,
                preyIdx,
                species.getParamB0(preyIdx),
                ManipulatingParameterName.b0,
                "halfSaturationDensityDefault",
                preyCnt);
            setSystemParametersLink(
                sParams,
                timestepIdx,
                nodeIdx,
                preyIdx,
                species.getParamD(preyIdx),
                ManipulatingParameterName.d,
                "predatorInterferenceDefault",
                preyCnt);
            setSystemParametersLink(
                sParams,
                timestepIdx,
                nodeIdx,
                preyIdx,
                species.getParamE(preyIdx),
                ManipulatingParameterName.e,
                "assimilationEfficiencyPlantDefault",
                preyCnt);
            setSystemParametersLink(
                sParams,
                timestepIdx,
                nodeIdx,
                preyIdx,
                species.getParamQ(preyIdx),
                ManipulatingParameterName.q,
                "functionalResponseControlParameterDefault",
                preyCnt);
            setSystemParametersLink(
                sParams,
                timestepIdx,
                nodeIdx,
                preyIdx,
                species.getParamY(preyIdx),
                ManipulatingParameterName.y,
                "maximumIngestionRateDefault",
                preyCnt);
          }
        }
        builder.append(sParams.size()).append(",");
        for (int i = 0; i < sParams.size(); i++) {
          builder.append(sParams.get(i));
          builder.append(",");
        }
        System.out.println(builder);
      }
    }
    return builder.toString();
  }