コード例 #1
0
 @Test
 public void sortByExample4() {
   List<Integer> example = asList(1, 3, 5, 2, 4, 6);
   final List<String> expected = asList("1", "3", "5", "2", "4", "6");
   assertEquals(
       expected,
       Algorithms.sortByExample(
           example,
           Strings.<Integer>string(),
           asList("6", "5", "4", "3", "2", "1"),
           Functions.<String>identity()));
   assertEquals(
       asList("3", "5", "4"),
       Algorithms.sortByExample(
           example,
           Strings.<Integer>string(),
           asList("5", "4", "3"),
           Functions.<String>identity()));
   assertEquals(
       expected,
       Algorithms.sortByExample(
           example,
           Strings.<Integer>string(),
           asList("1", "2", "3", "4", "5", "6", "7", "8", "9"),
           Functions.<String>identity()));
 }
コード例 #2
0
 public static boolean remove_res(Pos position) {
   if (Functions.getPiece(position) != null) {
     return true;
   } else {
     return false;
   }
 }
コード例 #3
0
 public static boolean win_res(Pos position) {
   if (Functions.pieceCount("STONE") == 0) {
     return true;
   } else {
     return false;
   }
 }
コード例 #4
0
ファイル: Config.java プロジェクト: susotajuraj/jdk8u-jdk
 private Set<Long> parseMechanisms(String keyword) throws IOException {
   checkDup(keyword);
   Set<Long> mechs = new HashSet<Long>();
   parseEquals();
   parseOpenBraces();
   while (true) {
     int token = nextToken();
     if (isCloseBraces(token)) {
       break;
     }
     if (token == TT_EOL) {
       continue;
     }
     if (token != TT_WORD) {
       throw excToken("Expected mechanism, read");
     }
     long mech = parseMechanism(st.sval);
     mechs.add(Long.valueOf(mech));
   }
   if (DEBUG) {
     System.out.print("mechanisms: [");
     for (Long mech : mechs) {
       System.out.print(Functions.getMechanismName(mech));
       System.out.print(", ");
     }
     System.out.println("]");
   }
   return mechs;
 }
コード例 #5
0
ファイル: Config.java プロジェクト: susotajuraj/jdk8u-jdk
 private long parseObjectClass() throws IOException {
   String name = parseWord();
   try {
     return Functions.getObjectClassId(name);
   } catch (IllegalArgumentException e) {
     throw excLine("Unknown object class " + name);
   }
 }
コード例 #6
0
ファイル: Config.java プロジェクト: susotajuraj/jdk8u-jdk
 private long decodeAttributeName(String name) throws IOException {
   if (isNumber(name)) {
     return decodeNumber(name);
   } else {
     try {
       return Functions.getAttributeId(name);
     } catch (IllegalArgumentException e) {
       throw excLine("Unknown attribute name " + name);
     }
   }
 }
コード例 #7
0
ファイル: Config.java プロジェクト: susotajuraj/jdk8u-jdk
 private long parseMechanism(String mech) throws IOException {
   if (isNumber(mech)) {
     return decodeNumber(mech);
   } else {
     try {
       return Functions.getMechanismId(mech);
     } catch (IllegalArgumentException e) {
       throw excLine("Unknown mechanism: " + mech);
     }
   }
 }
コード例 #8
0
ファイル: Config.java プロジェクト: susotajuraj/jdk8u-jdk
 private long parseKeyAlgorithm() throws IOException {
   String name = parseWord();
   if (isNumber(name)) {
     return decodeNumber(name);
   } else {
     try {
       return Functions.getKeyId(name);
     } catch (IllegalArgumentException e) {
       throw excLine("Unknown key algorithm " + name);
     }
   }
 }
コード例 #9
0
 public static void runHighlightingTypeMigration(
     final Project project,
     final Editor editor,
     final TypeMigrationRules rules,
     final PsiElement root,
     final PsiType migrationType,
     final boolean optimizeImports) {
   runHighlightingTypeMigration(
       project,
       editor,
       rules,
       new PsiElement[] {root},
       Functions.constant(migrationType),
       optimizeImports);
 }
コード例 #10
0
  /**
   * Save a generated biomass dataset to an HDF5 file in the output directory given by the outputDir
   * attribute.
   *
   * @param biomass The generated biomass as a (num_timesteps) x (num_nodes) array
   * @param nodeIDs The node IDs. The order must correspond to the columns of the biomass array
   * @param matchingTimesteps The matching timesteps returned by getMatchingTimesteps()
   * @param nodeConfig The node configuration string used to generate the data
   * @param numTimesteps The number of time steps of biomass data to save
   */
  private void saveHDF5OutputFile(
      double[][] biomass,
      int[] nodeIDs,
      int[] matchingTimesteps,
      String nodeConfig,
      int numTimesteps) {

    // Determine the filename
    File file = Functions.getNewOutputFile(new File(outputDir), "ATN", ".h5");
    System.out.println("Writing output to " + file.toString());

    // Write the data to the output file
    IHDF5Writer writer = HDF5Factory.configure(file).writer();

    if (Constants.ROUND_BIOMASS) {
      // Scale biomass for consistency with CSV output.
      // Round and cast to 32-bit integers to facilitate deflate compression.
      // Note: there is technically a risk of integer overflow,
      // but it won't happen unless scaled biomass exceeds 2 billion.
      int[][] scaledBiomass = new int[numTimesteps][nodeIDs.length];
      for (int t = 0; t < numTimesteps; t++) {
        for (int i = 0; i < nodeIDs.length; i++) {
          scaledBiomass[t][i] = (int) Math.round((biomass[t][i] * Constants.BIOMASS_SCALE));
        }
      }

      writer.int32().writeMatrix("biomass", scaledBiomass, HDF5IntStorageFeatures.INT_DEFLATE);

    } else {
      // Scale biomass for consistency with CSV output, but do not round.
      double[][] scaledBiomass = new double[numTimesteps][nodeIDs.length];
      for (int t = 0; t < numTimesteps; t++) {
        for (int i = 0; i < nodeIDs.length; i++) {
          scaledBiomass[t][i] = (biomass[t][i] * Constants.BIOMASS_SCALE);
        }
      }
      writer.float64().writeMatrix("biomass", scaledBiomass);
    }

    writer.writeIntArray("node_ids", nodeIDs);

    if (matchingTimesteps != null) writer.writeIntArray("matching_timesteps", matchingTimesteps);

    writer.string().setAttr("/", "node_config", nodeConfig);
    writer.close();
  }
コード例 #11
0
  @Test
  public void testCollectDocLevelWhereClause() throws Throwable {
    EqOperator op =
        (EqOperator)
            functions.get(
                new FunctionIdent(
                    EqOperator.NAME,
                    ImmutableList.<DataType>of(DataTypes.INTEGER, DataTypes.INTEGER)));
    List<Symbol> toCollect = Collections.<Symbol>singletonList(testDocLevelReference);
    WhereClause whereClause =
        new WhereClause(
            new Function(
                op.info(), Arrays.<Symbol>asList(testDocLevelReference, Literal.newLiteral(2))));
    CollectPhase collectNode = getCollectNode(toCollect, whereClause);

    Bucket result = collect(collectNode);
    assertThat(result, contains(isRow(2)));
  }
コード例 #12
0
  @Test
  public void testCollectShardExpressionsWhereShardIdIs0() throws Exception {
    EqOperator op =
        (EqOperator)
            functions.get(
                new FunctionIdent(
                    EqOperator.NAME,
                    ImmutableList.<DataType>of(DataTypes.INTEGER, DataTypes.INTEGER)));

    List<Symbol> toCollect = ImmutableList.<Symbol>of(testShardIdReference);
    CollectPhase collectNode =
        new CollectPhase(
            UUID.randomUUID(), 0, "shardCollect", shardRouting(0, 1), toCollect, EMPTY_PROJECTIONS);
    collectNode.whereClause(
        new WhereClause(
            new Function(op.info(), Arrays.asList(testShardIdReference, Literal.newLiteral(0)))));
    collectNode.maxRowGranularity(RowGranularity.SHARD);
    Bucket result = getBucket(collectNode);
    assertThat(result, contains(isRow(0)));
  }
コード例 #13
0
 @Test
 public void testCollectWithNullWhereClause() throws Exception {
   EqOperator op =
       (EqOperator)
           functions.get(
               new FunctionIdent(
                   EqOperator.NAME,
                   ImmutableList.<DataType>of(DataTypes.INTEGER, DataTypes.INTEGER)));
   CollectPhase collectNode =
       new CollectPhase(
           UUID.randomUUID(),
           0,
           "whereClause",
           testRouting,
           TO_COLLECT_TEST_REF,
           EMPTY_PROJECTIONS);
   collectNode.whereClause(
       new WhereClause(
           new Function(op.info(), Arrays.<Symbol>asList(Literal.NULL, Literal.NULL))));
   Bucket result = getBucket(collectNode);
   assertThat(result.size(), is(0));
 }
コード例 #14
0
ファイル: Functions.java プロジェクト: Ruritariye/basex
  /**
   * Returns an instance of a with the specified name and number of arguments, or {@code null}.
   *
   * @param name name of the function
   * @param args optional arguments
   * @param dyn compile-/run-time flag
   * @param ctx query context
   * @param ii input info
   * @return function instance
   * @throws QueryException query exception
   */
  public static TypedFunc get(
      final QNm name,
      final Expr[] args,
      final boolean dyn,
      final QueryContext ctx,
      final InputInfo ii)
      throws QueryException {

    // get namespace and local name
    // parse data type constructors
    if (eq(name.uri(), XSURI)) {
      final byte[] ln = name.local();
      final AtomType type = AtomType.find(name, false);
      if (type == null) {
        final Levenshtein ls = new Levenshtein();
        for (final AtomType t : AtomType.values()) {
          if (t.par != null
              && t != AtomType.NOT
              && t != AtomType.AAT
              && t != AtomType.BIN
              && ls.similar(lc(ln), lc(t.string()), 0))
            FUNSIMILAR.thrw(ii, name.string(), t.string());
        }
      }
      // no constructor function found, or abstract type specified
      if (type == null || type == AtomType.NOT || type == AtomType.AAT) {
        FUNCUNKNOWN.thrw(ii, name.string());
      }

      if (args.length != 1) FUNCTYPE.thrw(ii, name.string());
      final SeqType to = SeqType.get(type, Occ.ZERO_ONE);
      return TypedFunc.constr(new Cast(ii, args[0], to), to);
    }

    // pre-defined functions
    final StandardFunc fun = Functions.get().get(name, args, ii);
    if (fun != null) {
      if (!ctx.sc.xquery3 && fun.xquery3()) FEATURE30.thrw(ii);
      for (final Function f : Function.UPDATING) {
        if (fun.sig == f) {
          ctx.updating(true);
          break;
        }
      }
      return new TypedFunc(fun, fun.sig.type(args.length));
    }

    // user-defined function
    final TypedFunc tf = ctx.funcs.get(name, args, ii);
    if (tf != null) return tf;

    // Java function (only allowed with administrator permissions)
    final JavaMapping jf = JavaMapping.get(name, args, ctx, ii);
    if (jf != null) return TypedFunc.java(jf);

    // add user-defined function that has not been declared yet
    if (!dyn && FuncType.find(name) == null) return ctx.funcs.add(name, args, ii, ctx);

    // no function found
    return null;
  }
コード例 #15
0
 private void initOutputStreams() {
   System.out.println("Ecosystem output will be written to:");
   System.out.println("Network output will be written to:");
   // psATN = Functions.getPrintStream("ATN", userInput.destDir);
   psATN = Functions.getPrintStream("ATN", outputDir);
 }
コード例 #16
0
  // loop through current job/results, assembling dataset
  private HashMap<Integer, SpeciesZoneType> genSpeciesDataset(
      SimJob job,
      EcosystemTimesteps ecosysTimesteps,
      Map<Integer, NodeRelationships> ecosysRelationships) {
    // calc information relevant to entire ecosystem
    int speciesCnt = ecosysTimesteps.getNodeList().size(); // Number of species
    int timesteps = ecosysTimesteps.getTimesteps(); // Maximum number of timesteps to run simulation
    int timestepsToSave = 0; // Number of timesteps of data to save to output file
    int[] matchingTimesteps =
        null; // Array of matching timesteps returned by findMatchingTimesteps()

    // read in link parameters; this was explicitly configured to allow
    // manipulation of link parameter values, but no manipulation is
    // performed in this version
    LinkParams lPs = new LinkParams(propertiesConfig);

    // loop through node values and assemble summary data
    int[] speciesID = new int[speciesCnt];
    SimJobSZT[] sztArray = new SimJobSZT[speciesCnt];
    int spNum = 0;
    for (NodeTimesteps nodeTimesteps : ecosysTimesteps.getTimestepMapValues()) {
      SimJobSZT sjSzt = job.getSpeciesZoneByNodeId(nodeTimesteps.getNodeId());
      sztArray[spNum] = sjSzt;
      speciesID[spNum] = sjSzt.getNodeIndex();
      spNum++;
    }

    // define objects to track species' contributions
    double[][][] contribs = new double[timesteps][speciesCnt][speciesCnt];
    double[][] calcBiomass = new double[timesteps][speciesCnt];
    double[][] contribsT; // current timestep

    // note: WebServices ATN Model uses B0 with default = 0.5.  This presumes
    // that biomasses are small, i.e. < 1.0.  Division by biomassScale
    // here is consistent with usage in WoB_Server.SimulationEngine to
    // normalize biomasses.
    // need to store bm as it varies over time through integration;
    // start with initial bm for each species
    double[] currBiomass = new double[speciesCnt];
    for (int i = 0; i < speciesCnt; i++) {
      NodeTimesteps nodeTimeSteps = ecosysTimesteps.getTimestepMap().get(speciesID[i]);
      // manually set biomass vals for excluded initial timesteps; this
      // includes the first value to be used as input
      currBiomass[i] = nodeTimeSteps.getBiomass(initTimeIdx) / biomassScale;
      calcBiomass[0][i] = currBiomass[i];
    }

    if (Constants.useCommonsMathIntegrator) {

      // Use Apache Commons Math GraggBulirschStoerIntegrator

      FirstOrderIntegrator integrator =
          new GraggBulirschStoerIntegrator(
              1.0e-8, // minimal step
              100.0, // maximal step
              ATNEquations.EXTINCT, // allowed absolute error
              1.0e-10); // allowed relative error

      // Set up the ATN equations based on the current food web and parameters
      ATNEquations ode = new ATNEquations(sztArray, ecosysRelationships, lPs);

      ATNEventHandler eventHandler = new ATNEventHandler(ode);
      // FIXME: Choose best parameter values
      integrator.addEventHandler(
          new EventFilter(eventHandler, FilterType.TRIGGER_ONLY_DECREASING_EVENTS),
          1, // maximal time interval between switching function checks (this interval prevents
          // missing sign changes in case the integration steps becomes very large)
          0.0001, // convergence threshold in the event time search
          1000, // upper limit of the iteration count in the event time search
          new BisectionSolver());

      // Set up the StepHandler, which is triggered at each time step by the integrator,
      // and copies the current biomass of each species into calcBiomass[timestep].
      // See the "Continuous Output" section of
      // https://commons.apache.org/proper/commons-math/userguide/ode.html
      FixedStepHandler fixedStepHandler =
          new FixedStepHandler() {
            public void init(double t0, double[] y0, double t) {}

            private int timestep = 0;

            public void handleStep(double t, double[] y, double[] yDot, boolean isLast) {
              // Ensure we don't go past the last time step due to rounding error
              if (timestep < calcBiomass.length) {
                System.arraycopy(y, 0, calcBiomass[timestep], 0, speciesCnt);
              }
              timestep++;
            }
          };
      StepHandler stepHandler = new StepNormalizer(timeIntvl, fixedStepHandler);
      integrator.addStepHandler(stepHandler);

      // Run the integrator to compute the biomass time series
      integrator.integrate(ode, 0.0, currBiomass, timeIntvl * timesteps, currBiomass);
      if (eventHandler.integrationWasStopped()) {
        timestepsToSave = (int) (eventHandler.getTimeStopped() / timeIntvl);
      } else {
        // Check for an oscillating steady state,
        // and only save the data through the first period of the oscillation
        matchingTimesteps = findMatchingTimesteps(calcBiomass, timesteps - 1);
        System.err.println("\nmatchingTimesteps =  " + Arrays.toString(matchingTimesteps));

        // Save timesteps up through the second matching timestep,
        // or all timesteps if there was no second matching timestep.
        if (matchingTimesteps[1] != -1) {
          timestepsToSave = matchingTimesteps[1] + 1;
        } else {
          timestepsToSave = timesteps;
        }
      }

    } else {

      // Use BulirschStoerIntegration

      // create integration object
      boolean isTest = false;
      BulirschStoerIntegration bsi =
          new BulirschStoerIntegration(
              timeIntvl, speciesID, sztArray, ecosysRelationships, lPs, maxBSIErr, equationSet);

      // calculate delta-biomass and biomass "contributions" from each related
      // species
      for (int t = initTimeIdx + 1; t < timesteps; t++) {
        boolean success = bsi.performIntegration(time(initTime, t), currBiomass);
        if (!success) {
          System.out.printf("Integration failed to converge, t = %d\n", t);
          System.out.print(bsi.extrapArrayToString(biomassScale));
          break;
        }
        currBiomass = bsi.getYNew();
        System.arraycopy(currBiomass, 0, calcBiomass[t], 0, speciesCnt);

        contribsT = bsi.getContribs();
        for (int i = 0; i < speciesCnt; i++) {
          System.arraycopy(contribsT[i], 0, contribs[t - 1][i], 0, speciesCnt);
        }
      } // timestep loop
    }

    if (useHDF5) {
      saveHDF5OutputFile(
          calcBiomass, speciesID, matchingTimesteps, job.getNode_Config(), timestepsToSave);
      return null;
    }

    double[][] webServicesData = new double[speciesCnt][timesteps];
    if (Constants.useSimEngine) { // We need the webServicesData only for marginOfErrorCalculation
      // extract timestep data from CSV
      Functions.extractCSVDataRelns(job.getCsv(), ecosysTimesteps, ecosysRelationships);
      spNum = 0;
      for (NodeTimesteps nodeTimesteps : ecosysTimesteps.getTimestepMapValues()) {
        // copy nodetimestep data to local array for easier access
        System.arraycopy(nodeTimesteps.getBiomassArray(), 0, webServicesData[spNum], 0, timesteps);

        spNum++;
      }
    }
    // output data
    // A. print header
    psATN.printf("timesteps");
    for (int i = 0; i < timesteps; i++) {
      psATN.printf(",%d", i);
    }
    psATN.println();

    /* Convert to CSV String */
    String biomassCSV = "";
    biomassCSV = "Manipulation_id: " + job.getATNManipulationId() + "\n\n";

    int maxTimestep = job.getTimesteps();
    // Create Timestep Labels
    for (int j = 1; j <= maxTimestep; j++) {
      biomassCSV += "," + j;
    }
    HashMap<Integer, SpeciesZoneType> mSpecies = new HashMap<Integer, SpeciesZoneType>();
    // loop through each species
    for (int i = 0; i < speciesCnt; i++) {
      if (Constants.useSimEngine) {
        psATN.printf("i.%d.sim", speciesID[i]);
        // B. print WebServices simulation data for species
        for (int t = 0; t < timesteps; t++) {
          psATN.printf(",%9.0f", webServicesData[i][t]);
        }
        psATN.println();
      }

      // B. print combined biomass contributions (i.e. locally calculated biomass)
      // for current species.
      psATN.printf("i.%d.calc", speciesID[i]);
      for (int t = 0; t < timesteps; t++) {
        psATN.printf(",%9.0f", calcBiomass[t][i] * biomassScale);
      }
      psATN.println();

      //           //C. print individual biomass contributions from other species
      //           for (int j = 0; j < speciesCnt; j++) {
      //               psATN.printf("i.%d.j.%d.", speciesID[i], speciesID[j]);
      //               for (int t = 0; t < timesteps; t++) {
      //                   psATN.printf(",%9.0f", contribs[t][i][j] * biomassScale);
      //               }
      //               psATN.println();
      //           }

      float extinction = 1.E-15f;
      SimJobSZT sjSzt = job.getSpeciesZoneByNodeId(speciesID[i]);
      // add nodes to list in the order that they are received from infos
      String name = sjSzt.getName().replaceAll(",", " ") + " [" + sjSzt.getNodeIndex() + "]";
      String tempStr = name;
      for (int t = 0; t < maxTimestep; t++) {
        tempStr += ",";

        double biomass = calcBiomass[t][i] * biomassScale;

        if (biomass > 0) {
          tempStr += biomass > extinction ? Math.ceil(biomass) : 0;
        }

        if (t == maxTimestep - 1) {
          SpeciesZoneType szt = null;
          if (!mSpecies.containsKey(sjSzt.getNodeIndex())) {
            szt = new SpeciesZoneType(sjSzt.getName(), sjSzt.getNodeIndex(), 0, 0, biomass, null);
            mSpecies.put(sjSzt.getNodeIndex(), szt);

          } else { // update existing species current biomass
            szt = mSpecies.get(sjSzt.getNodeIndex());

            szt.setCurrentBiomass(biomass);
          }
        }
      }
      biomassCSV += "\n" + tempStr;
    }

    // Append node config to the ATN CSV
    psATN.println();
    psATN.println("\"node-config: " + job.getNode_Config() + "\"");

    biomassCSV += "\n\n";

    biomassCSV += job.getConsumeMap().toString() + "\n\n";

    biomassCSV += job.getPathTable().toString();

    job.setBiomassCsv(biomassCSV);

    // System.out.println(biomassCSV);
    return mSpecies;
  }