Exemple #1
0
  @Override
  public void populateEntity(
      RdfPersistenceContext persistenceContext,
      Object entity,
      Node subject,
      Graph graph,
      Consumer<Triple> outSink) {
    Model model = ModelFactory.createModelForGraph(graph);
    RDFNode root = ModelUtils.convertGraphNodeToRDFNode(subject, model);

    // <Object, Object>
    Map map = createMapView.apply(entity);

    for (Statement stmt : root.asResource().listProperties(entry).toList()) {
      Resource e = stmt.getObject().asResource();

      Node kNode = e.getProperty(key).getObject().asNode();
      Node vNode = e.getProperty(value).getObject().asNode();

      // TODO: We need to dynamically figure out which entity the node could be
      RdfType rdfType = null;
      Object k =
          persistenceContext.entityFor(
              Object.class, kNode, null); // new TypedNode(rdfType, kNode));
      Object v =
          persistenceContext.entityFor(
              Object.class, vNode, null); // new TypedNode(rdfType, vNode));

      map.put(k, v);
    }
  }
 /** Return a list of all tests of the given type, according to the current filters */
 public List<Resource> findTestsOfType(Resource testType) {
   ArrayList<Resource> result = new ArrayList<>();
   StmtIterator si = testDefinitions.listStatements(null, RDF.type, testType);
   while (si.hasNext()) {
     Resource test = si.nextStatement().getSubject();
     boolean accept = true;
     // Check test status
     Literal status = (Literal) test.getProperty(RDFTest.status).getObject();
     if (approvedOnly) {
       accept = status.getString().equals(STATUS_FLAGS[0]);
     } else {
       accept = false;
       for (String STATUS_FLAG : STATUS_FLAGS) {
         if (status.getString().equals(STATUS_FLAG)) {
           accept = true;
           break;
         }
       }
     }
     // Check for blocked tests
     for (String BLOCKED_TEST : BLOCKED_TESTS) {
       if (BLOCKED_TEST.equals(test.toString())) {
         accept = false;
       }
     }
     // End of filter tests
     if (accept) {
       result.add(test);
     }
   }
   return result;
 }
Exemple #3
0
 /**
  * Update or persist a domain object outside String, Date, and the usual primitive types. We set
  * the write style to shallow=true, causing an end of recursive traversal of the object graph.
  *
  * @param subject
  * @param property
  * @param o
  */
 private void setPropertyValue(Resource subject, Property property, Object o) {
   Statement s = subject.getProperty(property);
   Resource existing = null;
   if (s != null) {
     existing = s.getResource();
     if (existing.isAnon()) existing.removeProperties();
   }
   subject.removeAll(property).addProperty(property, _write(o, true));
 }
Exemple #4
0
 private Resource toResource(Object bean) {
   String uri = instanceURI(bean);
   Resource type = getRDFSClass(bean);
   if (jpa.isEmbedded(bean) || uri == null) return m.createResource(type);
   else {
     // added by saeed to differentiate between save and update
     Resource resource = m.createResource(uri);
     if (resource.getProperty(RDF.type) == null) {
       resource.addProperty(RDF.type, type);
     }
     return resource;
   }
 }
Exemple #5
0
  public static ArrayList<RdfModel> processRDF(InputStream in) {
    Model model = ModelFactory.createDefaultModel();
    ArrayList<RdfModel> result = new ArrayList<RdfModel>();
    if (in != null) {
      model.read(in, "RDF/XML");
      // Now, I only care these propties: has-title, year-of, full-name. All three of them must
      // exist'
      for (final ResIterator it = model.listSubjectsWithProperty(RdfPropertyList.p_hasTitle);
          it.hasNext(); ) {
        RdfModel rm = new RdfModel();
        try {
          final Resource node =
              it.next().asResource(); // node is a resource which has title property
          rm.setHasTitle(node.getProperty(RdfPropertyList.p_hasTitle).getString());

          StringBuilder authors = new StringBuilder();
          StringBuilder dates = new StringBuilder();

          for (final StmtIterator all_props = node.listProperties(); all_props.hasNext(); ) {
            try {
              Resource all_res = all_props.next().getObject().asResource();
              StmtIterator fullnames = all_res.listProperties(RdfPropertyList.p_fullName);
              StmtIterator years = all_res.listProperties(RdfPropertyList.p_year);
              // Just for now I may have mutiple author or dates in a String, seperated by comma
              RdfProcess newprocess = new RdfProcess();

              while (fullnames.hasNext()) {
                String fullname = newprocess.getValue(fullnames.next().getObject());
                if (!fullname.equals("Invalid/Lack of Information")) {
                  authors.append(fullname + " , ");
                }
              }
              while (years.hasNext()) {
                String year = newprocess.getValue(years.next().getObject());
                if (!year.equals("Invalid/Lack of Information")) {
                  dates.append(year + " , ");
                }
              }
            } catch (Exception e) {
            }
          }
          rm.setHasDate(dates.toString());
          rm.setHasAuthor(authors.toString());
        } catch (Exception e) {
        }
        result.add(rm);
      }
    }
    return result;
  }
  @Override
  public EntityDefinition open(Assembler a, Resource root, Mode mode) {
    String prologue = "PREFIX : <" + NS + ">   PREFIX list: <http://jena.apache.org/ARQ/list#> ";
    Model model = root.getModel();

    String qs1 =
        StrUtils.strjoinNL(
            prologue,
            "SELECT * {",
            "  ?eMap  :entityField  ?entityField ;",
            "         :map ?map ;",
            "         :defaultField ?dftField .",
            "  OPTIONAL {",
            "    ?eMap :graphField ?graphField",
            "  }",
            "  OPTIONAL {",
            "    ?eMap :langField ?langField",
            "  }",
            "  OPTIONAL {",
            "    ?eMap :uidField ?uidField",
            "  }",
            "}");
    ParameterizedSparqlString pss = new ParameterizedSparqlString(qs1);
    pss.setIri("eMap", root.getURI());

    Query query1 = QueryFactory.create(pss.toString());
    QueryExecution qexec1 = QueryExecutionFactory.create(query1, model);
    ResultSet rs1 = qexec1.execSelect();
    List<QuerySolution> results = ResultSetFormatter.toList(rs1);
    if (results.size() == 0) {
      Log.warn(this, "Failed to find a valid EntityMap for : " + root);
      throw new TextIndexException("Failed to find a valid EntityMap for : " + root);
    }

    if (results.size() != 1) {
      Log.warn(this, "Multiple matches for EntityMap for : " + root);
      throw new TextIndexException("Multiple matches for EntityMap for : " + root);
    }

    QuerySolution qsol1 = results.get(0);
    String entityField = qsol1.getLiteral("entityField").getLexicalForm();
    String graphField =
        qsol1.contains("graphField") ? qsol1.getLiteral("graphField").getLexicalForm() : null;
    String langField =
        qsol1.contains("langField") ? qsol1.getLiteral("langField").getLexicalForm() : null;
    String defaultField =
        qsol1.contains("dftField") ? qsol1.getLiteral("dftField").getLexicalForm() : null;
    String uniqueIdField =
        qsol1.contains("uidField") ? qsol1.getLiteral("uidField").getLexicalForm() : null;

    Multimap<String, Node> mapDefs = HashMultimap.create();
    Map<String, Analyzer> analyzerDefs = new HashMap<>();

    Statement listStmt = root.getProperty(TextVocab.pMap);
    while (listStmt != null) {
      RDFNode n = listStmt.getObject();
      if (!n.isResource()) {
        throw new TextIndexException("Text list node is not a resource : " + n);
      }
      Resource listResource = n.asResource();
      if (listResource.equals(RDF.nil)) {
        break; // end of the list
      }

      Statement listEntryStmt = listResource.getProperty(RDF.first);
      if (listEntryStmt == null) {
        throw new TextIndexException("Text map list is not well formed.  No rdf:first property");
      }
      n = listEntryStmt.getObject();
      if (!n.isResource()) {
        throw new TextIndexException("Text map list entry is not a resource : " + n);
      }
      Resource listEntry = n.asResource();

      Statement fieldStatement = listEntry.getProperty(TextVocab.pField);
      if (fieldStatement == null) {
        throw new TextIndexException("Text map entry has no field property");
      }
      n = fieldStatement.getObject();
      if (!n.isLiteral()) {
        throw new TextIndexException("Text map entry field property has no literal value : " + n);
      }
      String field = n.asLiteral().getLexicalForm();

      Statement predicateStatement = listEntry.getProperty(TextVocab.pPredicate);
      if (predicateStatement == null) {
        throw new TextIndexException("Text map entry has no predicate property");
      }
      n = predicateStatement.getObject();
      if (!n.isURIResource()) {
        throw new TextIndexException(
            "Text map entry predicate property has non resource value : " + n);
      }
      mapDefs.put(field, n.asNode());

      Statement analyzerStatement = listEntry.getProperty(TextVocab.pAnalyzer);
      if (analyzerStatement != null) {
        n = analyzerStatement.getObject();
        if (!n.isResource()) {
          throw new TextIndexException("Text map entry analyzer property is not a resource : " + n);
        }
        Resource analyzerResource = n.asResource();
        Analyzer analyzer = (Analyzer) a.open(analyzerResource);
        analyzerDefs.put(field, analyzer);
      }

      // move on to the next element in the list
      listStmt = listResource.getProperty(RDF.rest);
    }

    // Primary field/predicate
    if (defaultField != null) {
      Collection<Node> c = mapDefs.get(defaultField);
      if (c.isEmpty())
        throw new TextIndexException("No definition of primary field '" + defaultField + "'");
    }

    EntityDefinition docDef = new EntityDefinition(entityField, defaultField);
    docDef.setGraphField(graphField);
    docDef.setLangField(langField);
    docDef.setUidField(uniqueIdField);
    for (String f : mapDefs.keys()) {
      for (Node p : mapDefs.get(f)) docDef.set(f, p);
    }
    for (String f : analyzerDefs.keySet()) {
      docDef.setAnalyzer(f, analyzerDefs.get(f));
    }
    return docDef;
  }