@Override
  public Map<String, Results> validateAll() {
    CacheSchemaLibrary schemaLibrary = schemaLibrary(true);
    if (store.getAdvancedCache().getRpcManager() == null) {
      // This is a non-clustered cache, which cannot run Map-Reduce. In this case, just go through
      // them
      // all and run validation manually using the mapper...
      DocumentValidationMapper mapper =
          new DocumentValidationMapper(schemaLibrary, defaultSchemaUri);
      ResultsCollector resultsCollector = new ResultsCollector();
      for (Map.Entry<String, SchematicEntry> entry : store.entrySet()) {
        String key = entry.getKey();
        SchematicEntry value = entry.getValue();
        mapper.map(key, value, resultsCollector);
      }
      return resultsCollector.getResultsByKey();
    }

    // It is a clustered cache, so we can run Map-Reduce ...

    // Create a copy of all of the JSON Schema documents ...
    InMemoryDocumentLibrary schemaDocs = new InMemoryDocumentLibrary(schemaLibrary.getName());
    for (Map.Entry<String, SchematicEntry> entry : schemaLibrary.store().entrySet()) {
      String key = entry.getKey();
      SchematicEntry value = entry.getValue();
      schemaDocs.put(key, value.getContentAsDocument());
    }

    // Now create the Map-Reduce task, using the copy of the JSON Schema library ...
    MapReduceTask<String, SchematicEntry, String, Results> task =
        new MapReduceTask<String, SchematicEntry, String, Results>(this.store);
    task.mappedWith(new DocumentValidationMapper(schemaDocs, defaultSchemaUri));
    task.reducedWith(new DocumentValidationReducer());

    // Now execute ...
    return task.execute();
  }
Example #2
0
  @Override
  public ExperimentResults compute(Builder builder, Set<SimArgs> inputs) {
    final IdMap<MASConfiguration> configMap = new IdMap<>("c", MASConfiguration.class);
    final IdMap<ScenarioProvider> scenarioMap = new IdMap<>("s", ScenarioProvider.class);
    final IdMap<ObjectiveFunction> objFuncMap = new IdMap<>("o", ObjectiveFunction.class);

    final List<ResultListener> listeners = newArrayList(builder.resultListeners);

    @SuppressWarnings({"rawtypes", "unchecked"})
    final IdMap<PostProcessor<?>> ppMap = new IdMap("p", PostProcessor.class);
    final Map<String, Scenario> scenariosMap = newLinkedHashMap();

    // create tasks
    final List<SimulationTask> tasks = newArrayList();
    constructTasks(inputs, tasks, configMap, scenarioMap, objFuncMap, ppMap, scenariosMap);

    // this sorts tasks using this chain: scenario, configuration, objective
    // function, postprocessor, seed
    Collections.sort(tasks);

    // determine size of batches
    final int numBatches = Math.min(tasks.size(), builder.numBatches);
    final int batchSize =
        DoubleMath.roundToInt(tasks.size() / (double) numBatches, RoundingMode.CEILING);

    final Map<Task<?>, JPPFJob> taskJobMap = newLinkedHashMap();
    final ResultsCollector res =
        new ResultsCollector(tasks.size(), scenariosMap, taskJobMap, listeners);
    final List<JPPFJob> jobs = newArrayList();

    for (int i = 0; i < numBatches; i++) {
      final JPPFJob job = new JPPFJob(new MemoryMapDataProvider(), res);
      job.setName(Joiner.on("").join(JOB_NAME, " ", i + 1, "/", numBatches));
      jobs.add(job);
      for (final SimulationTask t : tasks.subList(i * batchSize, (i + 1) * batchSize)) {
        try {
          final MASConfiguration config = configMap.getValue(t.getConfigurationId());
          final ScenarioProvider scenario = scenarioMap.getValue(t.getScenarioId());
          final ObjectiveFunction objFunc = objFuncMap.getValue(t.getObjectiveFunctionId());
          job.getDataProvider()
              .setParameter(t.getPostProcessorId(), ppMap.getValue(t.getPostProcessorId()));
          job.getDataProvider().setParameter(t.getConfigurationId(), config);
          job.getDataProvider().setParameter(t.getScenarioId(), scenario);
          job.getDataProvider().setParameter(t.getObjectiveFunctionId(), objFunc);

          job.add(t);
        } catch (final JPPFException e) {
          throw new IllegalStateException(e);
        }
        taskJobMap.put(t, job);
      }
    }

    for (final ResultListener l : listeners) {
      l.startComputing(tasks.size());
    }

    checkState(!getJPPFClient().isClosed());
    try {
      for (final JPPFJob job : jobs) {
        getJPPFClient().submitJob(job);
      }
    } catch (final Exception e) {
      throw new IllegalStateException(e);
    }
    res.awaitResults();
    for (final ResultListener l : listeners) {
      l.doneComputing();
    }
    return ExperimentResults.create(builder, res.buildResults());
  }