Ejemplo n.º 1
0
  public CacheSchematicDb(Cache<String, SchematicEntry> store) {
    this.name = store.getName();
    this.store = store;
    String defaultContentTypeForDocs = Schematic.ContentTypes.JSON;
    String defaultContentTypeForBinary = Schematic.ContentTypes.BINARY;
    String defaultSchemaUri = JsonSchema.Version.Latest.CORE_METASCHEMA_URL;
    String description = "";
    String schemaCacheName = store.getName() + "Schemas";

    // Load the database document from the cache ...
    SchematicEntry databaseDocument = store.get("");
    if (databaseDocument != null && databaseDocument.hasDocumentContent()) {
      Document dbDoc = databaseDocument.getContentAsDocument();
      defaultContentTypeForDocs =
          dbDoc.getString("defaultContentTypeForDocuments", defaultContentTypeForDocs);
      defaultContentTypeForBinary =
          dbDoc.getString("defaultContentTypeForBinary", defaultContentTypeForBinary);
      defaultSchemaUri = dbDoc.getString("defaultSchemaUri", defaultSchemaUri);
      description = dbDoc.getString("description", description);
      schemaCacheName = dbDoc.getString("schemaCacheName", schemaCacheName);
    }

    this.defaultContentTypeForBinary = defaultContentTypeForBinary;
    this.defaultContentTypeForDocument = defaultContentTypeForDocs;
    this.defaultSchemaUri = defaultSchemaUri;
    this.description = description;
    this.schemaCacheName = schemaCacheName;
    this.context = new CacheContext(store.getAdvancedCache());
  }
Ejemplo n.º 2
0
  @Override
  public Map<String, Results> validateAll() {
    CacheSchemaLibrary schemaLibrary = schemaLibrary(true);
    if (store.getAdvancedCache().getRpcManager() == null) {
      // This is a non-clustered cache, which cannot run Map-Reduce. In this case, just go through
      // them
      // all and run validation manually using the mapper...
      DocumentValidationMapper mapper =
          new DocumentValidationMapper(schemaLibrary, defaultSchemaUri);
      ResultsCollector resultsCollector = new ResultsCollector();
      for (Map.Entry<String, SchematicEntry> entry : store.entrySet()) {
        String key = entry.getKey();
        SchematicEntry value = entry.getValue();
        mapper.map(key, value, resultsCollector);
      }
      return resultsCollector.getResultsByKey();
    }

    // It is a clustered cache, so we can run Map-Reduce ...

    // Create a copy of all of the JSON Schema documents ...
    InMemoryDocumentLibrary schemaDocs = new InMemoryDocumentLibrary(schemaLibrary.getName());
    for (Map.Entry<String, SchematicEntry> entry : schemaLibrary.store().entrySet()) {
      String key = entry.getKey();
      SchematicEntry value = entry.getValue();
      schemaDocs.put(key, value.getContentAsDocument());
    }

    // Now create the Map-Reduce task, using the copy of the JSON Schema library ...
    MapReduceTask<String, SchematicEntry, String, Results> task =
        new MapReduceTask<String, SchematicEntry, String, Results>(this.store);
    task.mappedWith(new DocumentValidationMapper(schemaDocs, defaultSchemaUri));
    task.reducedWith(new DocumentValidationReducer());

    // Now execute ...
    return task.execute();
  }