@Test
  public void testVariableSteps()
      throws DimensionMismatchException, NumberIsTooSmallException, MaxCountExceededException,
          NoBracketingException {

    final TestProblem3 pb = new TestProblem3(0.9);
    double minStep = 0;
    double maxStep = pb.getFinalTime() - pb.getInitialTime();
    double scalAbsoluteTolerance = 1.0e-8;
    double scalRelativeTolerance = scalAbsoluteTolerance;

    FirstOrderIntegrator integ =
        new DormandPrince853Integrator(
            minStep, maxStep, scalAbsoluteTolerance, scalRelativeTolerance);
    integ.addStepHandler(new VariableHandler());
    double stopTime =
        integ.integrate(
            pb,
            pb.getInitialTime(),
            pb.getInitialState(),
            pb.getFinalTime(),
            new double[pb.getDimension()]);
    Assert.assertEquals(pb.getFinalTime(), stopTime, 1.0e-10);
    Assert.assertEquals("Dormand-Prince 8 (5, 3)", integ.getName());
  }
  @Test
  public void testKepler()
      throws DimensionMismatchException, NumberIsTooSmallException, MaxCountExceededException,
          NoBracketingException {

    final TestProblem3 pb = new TestProblem3(0.9);
    double minStep = 0;
    double maxStep = pb.getFinalTime() - pb.getInitialTime();
    double scalAbsoluteTolerance = 1.0e-8;
    double scalRelativeTolerance = scalAbsoluteTolerance;

    FirstOrderIntegrator integ =
        new DormandPrince853Integrator(
            minStep, maxStep, scalAbsoluteTolerance, scalRelativeTolerance);
    integ.addStepHandler(new KeplerHandler(pb));
    integ.integrate(
        pb,
        pb.getInitialTime(),
        pb.getInitialState(),
        pb.getFinalTime(),
        new double[pb.getDimension()]);

    Assert.assertEquals(integ.getEvaluations(), pb.getCalls());
    Assert.assertTrue(pb.getCalls() < 3300);
  }
  @Test
  public void testBackward()
      throws DimensionMismatchException, NumberIsTooSmallException, MaxCountExceededException,
          NoBracketingException {

    TestProblem5 pb = new TestProblem5();
    double minStep = 0;
    double maxStep = pb.getFinalTime() - pb.getInitialTime();
    double scalAbsoluteTolerance = 1.0e-8;
    double scalRelativeTolerance = 0.01 * scalAbsoluteTolerance;

    FirstOrderIntegrator integ =
        new DormandPrince853Integrator(
            minStep, maxStep, scalAbsoluteTolerance, scalRelativeTolerance);
    TestProblemHandler handler = new TestProblemHandler(pb, integ);
    integ.addStepHandler(handler);
    integ.integrate(
        pb,
        pb.getInitialTime(),
        pb.getInitialState(),
        pb.getFinalTime(),
        new double[pb.getDimension()]);

    Assert.assertTrue(handler.getLastError() < 1.1e-7);
    Assert.assertTrue(handler.getMaximalValueError() < 1.1e-7);
    Assert.assertEquals(0, handler.getMaximalTimeError(), 1.0e-12);
    Assert.assertEquals("Dormand-Prince 8 (5, 3)", integ.getName());
  }
 @Test
 public void testUnstableDerivative()
     throws DimensionMismatchException, NumberIsTooSmallException, MaxCountExceededException,
         NoBracketingException {
   final StepProblem stepProblem = new StepProblem(0.0, 1.0, 2.0);
   FirstOrderIntegrator integ = new DormandPrince853Integrator(0.1, 10, 1.0e-12, 0.0);
   integ.addEventHandler(stepProblem, 1.0, 1.0e-12, 1000);
   double[] y = {Double.NaN};
   integ.integrate(stepProblem, 0.0, new double[] {0.0}, 10.0, y);
   Assert.assertEquals(8.0, y[0], 1.0e-12);
 }
  @Test
  public void testEvents()
      throws DimensionMismatchException, NumberIsTooSmallException, MaxCountExceededException,
          NoBracketingException {

    TestProblem4 pb = new TestProblem4();
    double minStep = 0;
    double maxStep = pb.getFinalTime() - pb.getInitialTime();
    double scalAbsoluteTolerance = 1.0e-9;
    double scalRelativeTolerance = 0.01 * scalAbsoluteTolerance;

    FirstOrderIntegrator integ =
        new DormandPrince853Integrator(
            minStep, maxStep, scalAbsoluteTolerance, scalRelativeTolerance);
    TestProblemHandler handler = new TestProblemHandler(pb, integ);
    integ.addStepHandler(handler);
    EventHandler[] functions = pb.getEventsHandlers();
    double convergence = 1.0e-8 * maxStep;
    for (int l = 0; l < functions.length; ++l) {
      integ.addEventHandler(functions[l], Double.POSITIVE_INFINITY, convergence, 1000);
    }
    Assert.assertEquals(functions.length, integ.getEventHandlers().size());
    integ.integrate(
        pb,
        pb.getInitialTime(),
        pb.getInitialState(),
        pb.getFinalTime(),
        new double[pb.getDimension()]);

    Assert.assertEquals(0, handler.getMaximalValueError(), 2.1e-7);
    Assert.assertEquals(0, handler.getMaximalTimeError(), convergence);
    Assert.assertEquals(12.0, handler.getLastTime(), convergence);
    integ.clearEventHandlers();
    Assert.assertEquals(0, integ.getEventHandlers().size());
  }
  private static void computeBeliefBoundaries(
      Double hFFPos, Double hFFSpeed, Double hfFFTargetPos, Double hFFCreationTime) {

    double currentTime = System.nanoTime() / SEC_NANOSEC_FACTOR;
    double[] minBoundaries = new double[1];
    double[] maxBoundaries = new double[1];
    double startTime = 0.0;

    if (hfFFTargetPos != 0.0) {
      if (hFFCreationTime > hInit) {
        startTime = hFFCreationTime;
        hFFPosMin = hFFPos;
        hFFPosMax = hFFPos;
        hFFSpeedMin = hFFSpeed;
        hFFSpeedMax = hFFSpeed;
      } else {
        if (hFFCreationTime <= hLastTime) {
          startTime = hLastTime;
        } else if (hFFCreationTime > hLastTime) {
          startTime = hFFCreationTime;
          hFFPosMin = hFFPos;
          hFFPosMax = hFFPos;
          hFFSpeedMin = hFFSpeed;
          hFFSpeedMax = hFFSpeed;
        }
      }

      // ---------------------- knowledge evaluation --------------------------------

      double accMin =
          Database.getAcceleration(
              hFFSpeedMin, hFFPosMin, Database.lTorques, 0.0, 1.0, Database.lMass);
      double accMax =
          Database.getAcceleration(
              hFFSpeedMax, hFFPosMax, Database.lTorques, 1.0, 0.0, Database.lMass);

      FirstOrderIntegrator integrator = new MidpointIntegrator(1);
      integrator.setMaxEvaluations((int) TIMEPERIOD);
      FirstOrderDifferentialEquations f = new Derivation();
      // ------------- min ----------------------

      minBoundaries[0] = accMin;
      integrator.integrate(f, startTime, minBoundaries, currentTime, minBoundaries);
      hFFSpeedMin += minBoundaries[0];
      integrator.integrate(f, startTime, minBoundaries, currentTime, minBoundaries);
      hFFPosMin += minBoundaries[0];
      // ------------- max ----------------------

      maxBoundaries[0] = accMax;
      integrator.integrate(f, startTime, maxBoundaries, currentTime, maxBoundaries);
      hFFSpeedMax += maxBoundaries[0];
      integrator.integrate(f, startTime, maxBoundaries, currentTime, maxBoundaries);
      hFFPosMax += maxBoundaries[0];
    }
    if (hFFSpeedMax > 200) hFFSpeedMax = 200.0;
    if (hFFSpeedMin < 0.0) hFFSpeedMin = 0.0;
    if (hFFPosMin < 0.0) hFFPosMin = 0.0;
    hLastTime = currentTime;
    hInit = hFFCreationTime;
  }
  @Test
  public void testEventsScheduling() {

    FirstOrderDifferentialEquations sincos =
        new FirstOrderDifferentialEquations() {

          public int getDimension() {
            return 2;
          }

          public void computeDerivatives(double t, double[] y, double[] yDot) {
            yDot[0] = y[1];
            yDot[1] = -y[0];
          }
        };

    SchedulingChecker sinChecker = new SchedulingChecker(0); // events at 0, PI, 2PI ...
    SchedulingChecker cosChecker = new SchedulingChecker(1); // events at PI/2, 3PI/2, 5PI/2 ...

    FirstOrderIntegrator integ = new DormandPrince853Integrator(0.001, 1.0, 1.0e-12, 0.0);
    integ.addEventHandler(sinChecker, 0.01, 1.0e-7, 100);
    integ.addStepHandler(sinChecker);
    integ.addEventHandler(cosChecker, 0.01, 1.0e-7, 100);
    integ.addStepHandler(cosChecker);
    double t0 = 0.5;
    double[] y0 = new double[] {FastMath.sin(t0), FastMath.cos(t0)};
    double t = 10.0;
    double[] y = new double[2];
    integ.integrate(sincos, t0, y0, t, y);
  }
  @Test(expected = NumberIsTooSmallException.class)
  public void testMinStep()
      throws DimensionMismatchException, NumberIsTooSmallException, MaxCountExceededException,
          NoBracketingException {

    TestProblem1 pb = new TestProblem1();
    double minStep = 0.1 * (pb.getFinalTime() - pb.getInitialTime());
    double maxStep = pb.getFinalTime() - pb.getInitialTime();
    double[] vecAbsoluteTolerance = {1.0e-15, 1.0e-16};
    double[] vecRelativeTolerance = {1.0e-15, 1.0e-16};

    FirstOrderIntegrator integ =
        new DormandPrince853Integrator(
            minStep, maxStep, vecAbsoluteTolerance, vecRelativeTolerance);
    TestProblemHandler handler = new TestProblemHandler(pb, integ);
    integ.addStepHandler(handler);
    integ.integrate(
        pb,
        pb.getInitialTime(),
        pb.getInitialState(),
        pb.getFinalTime(),
        new double[pb.getDimension()]);
    Assert.fail("an exception should have been thrown");
  }
Пример #9
0
  // loop through current job/results, assembling dataset
  private HashMap<Integer, SpeciesZoneType> genSpeciesDataset(
      SimJob job,
      EcosystemTimesteps ecosysTimesteps,
      Map<Integer, NodeRelationships> ecosysRelationships) {
    // calc information relevant to entire ecosystem
    int speciesCnt = ecosysTimesteps.getNodeList().size(); // Number of species
    int timesteps = ecosysTimesteps.getTimesteps(); // Maximum number of timesteps to run simulation
    int timestepsToSave = 0; // Number of timesteps of data to save to output file
    int[] matchingTimesteps =
        null; // Array of matching timesteps returned by findMatchingTimesteps()

    // read in link parameters; this was explicitly configured to allow
    // manipulation of link parameter values, but no manipulation is
    // performed in this version
    LinkParams lPs = new LinkParams(propertiesConfig);

    // loop through node values and assemble summary data
    int[] speciesID = new int[speciesCnt];
    SimJobSZT[] sztArray = new SimJobSZT[speciesCnt];
    int spNum = 0;
    for (NodeTimesteps nodeTimesteps : ecosysTimesteps.getTimestepMapValues()) {
      SimJobSZT sjSzt = job.getSpeciesZoneByNodeId(nodeTimesteps.getNodeId());
      sztArray[spNum] = sjSzt;
      speciesID[spNum] = sjSzt.getNodeIndex();
      spNum++;
    }

    // define objects to track species' contributions
    double[][][] contribs = new double[timesteps][speciesCnt][speciesCnt];
    double[][] calcBiomass = new double[timesteps][speciesCnt];
    double[][] contribsT; // current timestep

    // note: WebServices ATN Model uses B0 with default = 0.5.  This presumes
    // that biomasses are small, i.e. < 1.0.  Division by biomassScale
    // here is consistent with usage in WoB_Server.SimulationEngine to
    // normalize biomasses.
    // need to store bm as it varies over time through integration;
    // start with initial bm for each species
    double[] currBiomass = new double[speciesCnt];
    for (int i = 0; i < speciesCnt; i++) {
      NodeTimesteps nodeTimeSteps = ecosysTimesteps.getTimestepMap().get(speciesID[i]);
      // manually set biomass vals for excluded initial timesteps; this
      // includes the first value to be used as input
      currBiomass[i] = nodeTimeSteps.getBiomass(initTimeIdx) / biomassScale;
      calcBiomass[0][i] = currBiomass[i];
    }

    if (Constants.useCommonsMathIntegrator) {

      // Use Apache Commons Math GraggBulirschStoerIntegrator

      FirstOrderIntegrator integrator =
          new GraggBulirschStoerIntegrator(
              1.0e-8, // minimal step
              100.0, // maximal step
              ATNEquations.EXTINCT, // allowed absolute error
              1.0e-10); // allowed relative error

      // Set up the ATN equations based on the current food web and parameters
      ATNEquations ode = new ATNEquations(sztArray, ecosysRelationships, lPs);

      ATNEventHandler eventHandler = new ATNEventHandler(ode);
      // FIXME: Choose best parameter values
      integrator.addEventHandler(
          new EventFilter(eventHandler, FilterType.TRIGGER_ONLY_DECREASING_EVENTS),
          1, // maximal time interval between switching function checks (this interval prevents
          // missing sign changes in case the integration steps becomes very large)
          0.0001, // convergence threshold in the event time search
          1000, // upper limit of the iteration count in the event time search
          new BisectionSolver());

      // Set up the StepHandler, which is triggered at each time step by the integrator,
      // and copies the current biomass of each species into calcBiomass[timestep].
      // See the "Continuous Output" section of
      // https://commons.apache.org/proper/commons-math/userguide/ode.html
      FixedStepHandler fixedStepHandler =
          new FixedStepHandler() {
            public void init(double t0, double[] y0, double t) {}

            private int timestep = 0;

            public void handleStep(double t, double[] y, double[] yDot, boolean isLast) {
              // Ensure we don't go past the last time step due to rounding error
              if (timestep < calcBiomass.length) {
                System.arraycopy(y, 0, calcBiomass[timestep], 0, speciesCnt);
              }
              timestep++;
            }
          };
      StepHandler stepHandler = new StepNormalizer(timeIntvl, fixedStepHandler);
      integrator.addStepHandler(stepHandler);

      // Run the integrator to compute the biomass time series
      integrator.integrate(ode, 0.0, currBiomass, timeIntvl * timesteps, currBiomass);
      if (eventHandler.integrationWasStopped()) {
        timestepsToSave = (int) (eventHandler.getTimeStopped() / timeIntvl);
      } else {
        // Check for an oscillating steady state,
        // and only save the data through the first period of the oscillation
        matchingTimesteps = findMatchingTimesteps(calcBiomass, timesteps - 1);
        System.err.println("\nmatchingTimesteps =  " + Arrays.toString(matchingTimesteps));

        // Save timesteps up through the second matching timestep,
        // or all timesteps if there was no second matching timestep.
        if (matchingTimesteps[1] != -1) {
          timestepsToSave = matchingTimesteps[1] + 1;
        } else {
          timestepsToSave = timesteps;
        }
      }

    } else {

      // Use BulirschStoerIntegration

      // create integration object
      boolean isTest = false;
      BulirschStoerIntegration bsi =
          new BulirschStoerIntegration(
              timeIntvl, speciesID, sztArray, ecosysRelationships, lPs, maxBSIErr, equationSet);

      // calculate delta-biomass and biomass "contributions" from each related
      // species
      for (int t = initTimeIdx + 1; t < timesteps; t++) {
        boolean success = bsi.performIntegration(time(initTime, t), currBiomass);
        if (!success) {
          System.out.printf("Integration failed to converge, t = %d\n", t);
          System.out.print(bsi.extrapArrayToString(biomassScale));
          break;
        }
        currBiomass = bsi.getYNew();
        System.arraycopy(currBiomass, 0, calcBiomass[t], 0, speciesCnt);

        contribsT = bsi.getContribs();
        for (int i = 0; i < speciesCnt; i++) {
          System.arraycopy(contribsT[i], 0, contribs[t - 1][i], 0, speciesCnt);
        }
      } // timestep loop
    }

    if (useHDF5) {
      saveHDF5OutputFile(
          calcBiomass, speciesID, matchingTimesteps, job.getNode_Config(), timestepsToSave);
      return null;
    }

    double[][] webServicesData = new double[speciesCnt][timesteps];
    if (Constants.useSimEngine) { // We need the webServicesData only for marginOfErrorCalculation
      // extract timestep data from CSV
      Functions.extractCSVDataRelns(job.getCsv(), ecosysTimesteps, ecosysRelationships);
      spNum = 0;
      for (NodeTimesteps nodeTimesteps : ecosysTimesteps.getTimestepMapValues()) {
        // copy nodetimestep data to local array for easier access
        System.arraycopy(nodeTimesteps.getBiomassArray(), 0, webServicesData[spNum], 0, timesteps);

        spNum++;
      }
    }
    // output data
    // A. print header
    psATN.printf("timesteps");
    for (int i = 0; i < timesteps; i++) {
      psATN.printf(",%d", i);
    }
    psATN.println();

    /* Convert to CSV String */
    String biomassCSV = "";
    biomassCSV = "Manipulation_id: " + job.getATNManipulationId() + "\n\n";

    int maxTimestep = job.getTimesteps();
    // Create Timestep Labels
    for (int j = 1; j <= maxTimestep; j++) {
      biomassCSV += "," + j;
    }
    HashMap<Integer, SpeciesZoneType> mSpecies = new HashMap<Integer, SpeciesZoneType>();
    // loop through each species
    for (int i = 0; i < speciesCnt; i++) {
      if (Constants.useSimEngine) {
        psATN.printf("i.%d.sim", speciesID[i]);
        // B. print WebServices simulation data for species
        for (int t = 0; t < timesteps; t++) {
          psATN.printf(",%9.0f", webServicesData[i][t]);
        }
        psATN.println();
      }

      // B. print combined biomass contributions (i.e. locally calculated biomass)
      // for current species.
      psATN.printf("i.%d.calc", speciesID[i]);
      for (int t = 0; t < timesteps; t++) {
        psATN.printf(",%9.0f", calcBiomass[t][i] * biomassScale);
      }
      psATN.println();

      //           //C. print individual biomass contributions from other species
      //           for (int j = 0; j < speciesCnt; j++) {
      //               psATN.printf("i.%d.j.%d.", speciesID[i], speciesID[j]);
      //               for (int t = 0; t < timesteps; t++) {
      //                   psATN.printf(",%9.0f", contribs[t][i][j] * biomassScale);
      //               }
      //               psATN.println();
      //           }

      float extinction = 1.E-15f;
      SimJobSZT sjSzt = job.getSpeciesZoneByNodeId(speciesID[i]);
      // add nodes to list in the order that they are received from infos
      String name = sjSzt.getName().replaceAll(",", " ") + " [" + sjSzt.getNodeIndex() + "]";
      String tempStr = name;
      for (int t = 0; t < maxTimestep; t++) {
        tempStr += ",";

        double biomass = calcBiomass[t][i] * biomassScale;

        if (biomass > 0) {
          tempStr += biomass > extinction ? Math.ceil(biomass) : 0;
        }

        if (t == maxTimestep - 1) {
          SpeciesZoneType szt = null;
          if (!mSpecies.containsKey(sjSzt.getNodeIndex())) {
            szt = new SpeciesZoneType(sjSzt.getName(), sjSzt.getNodeIndex(), 0, 0, biomass, null);
            mSpecies.put(sjSzt.getNodeIndex(), szt);

          } else { // update existing species current biomass
            szt = mSpecies.get(sjSzt.getNodeIndex());

            szt.setCurrentBiomass(biomass);
          }
        }
      }
      biomassCSV += "\n" + tempStr;
    }

    // Append node config to the ATN CSV
    psATN.println();
    psATN.println("\"node-config: " + job.getNode_Config() + "\"");

    biomassCSV += "\n\n";

    biomassCSV += job.getConsumeMap().toString() + "\n\n";

    biomassCSV += job.getPathTable().toString();

    job.setBiomassCsv(biomassCSV);

    // System.out.println(biomassCSV);
    return mSpecies;
  }