コード例 #1
0
  public MyOWLSubClassOfAxiom(OWLAxiom b, MyOWLOntology onto) throws Exception {
    super(b, onto);
    OWLClassExpression subA = ((OWLSubClassOfAxiom) b).getSubClass();
    OWLClassExpression superA = ((OWLSubClassOfAxiom) b).getSuperClass();

    subConcept = o.getOWLConcept(subA.asOWLClass().toStringID());
    switch (superA.getClassExpressionType()) {
      case OBJECT_SOME_VALUES_FROM:
        {
          OWLObjectProperty ra =
              (OWLObjectProperty) ((OWLObjectSomeValuesFrom) superA).getProperty();
          OWLClassExpression fa = ((OWLObjectSomeValuesFrom) superA).getFiller();
          rel = o.getOWLRelation(ra.toStringID());
          superConcept = o.getOWLConcept(fa.asOWLClass().toStringID());
          break;
        }
      case OWL_CLASS:
        {
          superConcept = o.getOWLConcept(superA.asOWLClass().toStringID());
          rel = null;
          break;
        }
      default:
        throw new Exception(
            "We do not know how to deal with the OWLClassExpresions \n"
                + superA.getClassExpressionType());
    }
  }
コード例 #2
0
ファイル: OntologyManager.java プロジェクト: gesteban/sened
  public boolean isDomainOrRangeOfObjectProperty(OWLClass clase, OWLObjectProperty prop) {

    for (OWLClass owlClass1 : _reasoner.getObjectPropertyDomains(prop, false).getFlattened()) {
      if (owlClass1.getIRI().equals(clase.getIRI())) {
        Log.d(
            TAG,
            "<isDomainOrRangeOfObjectProperty> "
                + clase.getIRI().getRemainder().get()
                + " dominio o rango de "
                + prop.getIRI().getRemainder().get()
                + "? SI!");
        return true;
      }
    }
    for (OWLClass owlClass1 : _reasoner.getObjectPropertyRanges(prop, false).getFlattened()) {
      if (owlClass1.getIRI().equals(clase.getIRI())) {
        Log.d(
            TAG,
            "<isDomainOrRangeOfObjectProperty> "
                + clase.getIRI().getRemainder().get()
                + " dominio o rango de "
                + prop.getIRI().getRemainder().get()
                + "? SI!");
        return true;
      }
    }
    Log.d(
        TAG,
        "<isDomainOrRangeOfObjectProperty> "
            + clase.getIRI().getRemainder().get()
            + " dominio o rango de "
            + prop.getIRI().getRemainder().get()
            + "? NO");
    return false;
  }
コード例 #3
0
  private void runWithSeparateFiles() {
    if (owlFile == null) {
      throw new NullPointerException("You have to specify an ontology file!");
    }

    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    OWLOntology ontology = null;
    OBDADataFactory obdaDataFactory = OBDADataFactoryImpl.getInstance();
    try {
      ontology = manager.loadOntologyFromOntologyDocument((new File(owlFile)));

      if (disableReasoning) {
        /*
         * when reasoning is disabled, we extract only the declaration assertions for the vocabulary
         */
        ontology = extractDeclarations(manager, ontology);
      }

      Collection<Predicate> predicates = new ArrayList<>();

      for (OWLClass owlClass : ontology.getClassesInSignature()) {
        Predicate predicate = obdaDataFactory.getClassPredicate(owlClass.getIRI().toString());
        predicates.add(predicate);
      }
      for (OWLDataProperty owlDataProperty : ontology.getDataPropertiesInSignature()) {
        Predicate predicate =
            obdaDataFactory.getDataPropertyPredicate(owlDataProperty.getIRI().toString());
        predicates.add(predicate);
      }
      for (OWLObjectProperty owlObjectProperty : ontology.getObjectPropertiesInSignature()) {
        Predicate predicate =
            obdaDataFactory.getObjectPropertyPredicate(owlObjectProperty.getIRI().toString());
        predicates.add(predicate);
      }

      OBDAModel obdaModel = loadMappingFile(mappingFile);

      Ontology inputOntology = OWLAPI3TranslatorUtility.translate(ontology);

      obdaModel.declareAll(inputOntology.getVocabulary());

      int numPredicates = predicates.size();

      int i = 1;
      for (Predicate predicate : predicates) {
        System.err.println(String.format("Materializing %s (%d/%d)", predicate, i, numPredicates));
        serializePredicate(ontology, inputOntology, obdaModel, predicate, outputFile, format);
        i++;
      }

    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
コード例 #4
0
  /**
   * This method makes sure is used to setup a new/fresh OBDA model. This is done by replacing the
   * instance this.obdacontroller (the OBDA model) with a new object. On creation listeners for the
   * datasources, mappings and queries are setup so that changes in these trigger and ontology
   * change.
   *
   * <p>Additionally, this method configures all available OBDAOWLReasonerFacotry objects to have a
   * reference to the newly created OBDA model and to the global preference object. This is
   * necessary so that the factories are able to pass the OBDA model to the reasoner instances when
   * they are created.
   */
  private void setupNewOBDAModel() {
    OBDAModel activeOBDAModel = getActiveOBDAModel();

    if (activeOBDAModel != null) {
      return;
    }
    activeOBDAModel = dfac.getOBDAModel();

    activeOBDAModel.addSourcesListener(dlistener);
    activeOBDAModel.addMappingsListener(mlistener);
    queryController.addListener(qlistener);

    OWLModelManager mmgr = owlEditorKit.getOWLWorkspace().getOWLModelManager();

    Set<OWLOntology> ontologies = mmgr.getOntologies();
    for (OWLOntology ontology : ontologies) {
      // Setup the entity declarations
      for (OWLClass c : ontology.getClassesInSignature()) {
        OClass pred = ofac.createClass(c.getIRI().toString());
        activeOBDAModel.declareClass(pred);
      }
      for (OWLObjectProperty r : ontology.getObjectPropertiesInSignature()) {
        ObjectPropertyExpression pred = ofac.createObjectProperty(r.getIRI().toString());
        activeOBDAModel.declareObjectProperty(pred);
      }
      for (OWLDataProperty p : ontology.getDataPropertiesInSignature()) {
        DataPropertyExpression pred = ofac.createDataProperty(p.getIRI().toString());
        activeOBDAModel.declareDataProperty(pred);
      }
    }

    // Setup the prefixes
    PrefixOWLOntologyFormat prefixManager =
        PrefixUtilities.getPrefixOWLOntologyFormat(mmgr.getActiveOntology());
    //		addOBDACommonPrefixes(prefixManager);

    PrefixManagerWrapper prefixwrapper = new PrefixManagerWrapper(prefixManager);
    activeOBDAModel.setPrefixManager(prefixwrapper);

    OWLOntology activeOntology = mmgr.getActiveOntology();

    String defaultPrefix = prefixManager.getDefaultPrefix();
    if (defaultPrefix == null) {
      OWLOntologyID ontologyID = activeOntology.getOntologyID();
      defaultPrefix = ontologyID.getOntologyIRI().toURI().toString();
    }
    activeOBDAModel.getPrefixManager().addPrefix(PrefixManager.DEFAULT_PREFIX, defaultPrefix);

    // Add the model
    URI modelUri = activeOntology.getOntologyID().getOntologyIRI().toURI();
    obdamodels.put(modelUri, activeOBDAModel);
  }
コード例 #5
0
  private Iterable<Concept> getDomainsConcepts(OWLOntology o, OWLObjectProperty objectProperty) {
    String uri = objectProperty.getIRI().toString();
    if (objectProperty.getDomains(o).size() == 0) {
      throw new LumifyException("Invalid number of domain properties on " + uri);
    }

    List<Concept> domains = new ArrayList<Concept>();
    for (OWLClassExpression rangeClassExpr : objectProperty.getDomains(o)) {
      OWLClass rangeClass = rangeClassExpr.asOWLClass();
      String rangeClassUri = rangeClass.getIRI().toString();
      Concept ontologyClass = getConceptByIRI(rangeClassUri);
      checkNotNull(ontologyClass, "Could not find class with uri: " + rangeClassUri);
      domains.add(ontologyClass);
    }
    return domains;
  }
コード例 #6
0
  protected void importInverseOf(OWLOntology o, OWLObjectProperty objectProperty) {
    String iri = objectProperty.getIRI().toString();
    Relationship fromRelationship = null;

    for (OWLObjectPropertyExpression inverseOf : objectProperty.getInverses(o)) {
      if (inverseOf instanceof OWLObjectProperty) {
        if (fromRelationship == null) {
          fromRelationship = getRelationshipByIRI(iri);
        }

        OWLObjectProperty inverseOfOWLObjectProperty = (OWLObjectProperty) inverseOf;
        String inverseOfIri = inverseOfOWLObjectProperty.getIRI().toString();
        Relationship inverseOfRelationship = getRelationshipByIRI(inverseOfIri);
        getOrCreateInverseOfRelationship(fromRelationship, inverseOfRelationship);
      }
    }
  }
コード例 #7
0
 // TODO optimize translatin: (e) can be discarded for roles for which there
 // aren't assertions
 private Set<Rule> translation(QLAxiomsTranslator axiomsTranslator) {
   final Set<Rule> result = new HashSet<Rule>();
   for (final OWLClassAssertionAxiom assertion : ontologyNormalization.conceptAssertions())
     result.addAll(axiomsTranslator.assertionTranslation(assertion));
   for (final OWLObjectPropertyAssertionAxiom assertion : ontologyNormalization.roleAssertions())
     result.addAll(axiomsTranslator.assertionTranslation(assertion));
   for (final OWLDataPropertyAssertionAxiom assertion : ontologyNormalization.dataAssertions())
     result.addAll(axiomsTranslator.assertionTranslation(assertion));
   for (final OWLSubClassOfAxiom subsumption : ontologyNormalization.conceptSubsumptions())
     result.addAll(
         axiomsTranslator.subsumptionTranslation(
             subsumption.getSubClass(), subsumption.getSuperClass()));
   for (final OWLSubPropertyAxiom<?> subsumption : ontologyNormalization.roleSubsumptions())
     if (subsumption instanceof OWLSubObjectPropertyOfAxiom) {
       result.addAll(
           axiomsTranslator.subsumptionTranslation(
               subsumption.getSubProperty(), subsumption.getSuperProperty()));
       final OWLSubObjectPropertyOfAxiom axiom = (OWLSubObjectPropertyOfAxiom) subsumption;
       final OWLObjectPropertyExpression ope1 = axiom.getSubProperty();
       final OWLObjectPropertyExpression ope2 = axiom.getSuperProperty();
       final OWLObjectPropertyExpression invOpe1 = ope1.getInverseProperty().getSimplified();
       final OWLObjectPropertyExpression invOpe2 = ope2.getInverseProperty().getSimplified();
       if ((ontologyNormalization.isSuper(some(ope1)) || ontologyNormalization.isSuper(ope1))
           && (ontologyNormalization.isSub(some(ope2)) || ontologyNormalization.isSub(ope2)))
         result.add(
             axiomsTranslator.domainSubsumptionTranslation(
                 axiom.getSubProperty(), axiom.getSuperProperty()));
       if ((ontologyNormalization.isSuper(some(invOpe1)) || ontologyNormalization.isSuper(invOpe1))
           && (ontologyNormalization.isSub(some(invOpe2)) || ontologyNormalization.isSub(invOpe2)))
         result.add(axiomsTranslator.rangeSubsumptionTranslation(invOpe1, invOpe2));
     } else if (subsumption instanceof OWLSubDataPropertyOfAxiom)
       result.addAll(
           axiomsTranslator.subsumptionTranslation(
               subsumption.getSubProperty(), subsumption.getSuperProperty()));
   for (final OWLPropertyExpression ope : ontologyNormalization.getRoles())
     if (ope instanceof OWLObjectPropertyExpression) {
       final OWLObjectProperty p = ((OWLObjectPropertyExpression) ope).getNamedProperty();
       final OWLObjectPropertyExpression invP = p.getInverseProperty();
       if (ontologyNormalization.isSub(some(p)) || ontologyNormalization.isSub(p))
         result.add(axiomsTranslator.domainTranslation(p));
       if (ontologyNormalization.isSub(some(invP)) || ontologyNormalization.isSub(invP))
         result.add(axiomsTranslator.rangeTranslation(p));
     }
   return result;
 }
コード例 #8
0
  protected void importObjectProperty(OWLOntology o, OWLObjectProperty objectProperty) {
    String iri = objectProperty.getIRI().toString();
    String label = getLabel(o, objectProperty);
    checkNotNull(label, "label cannot be null or empty for " + iri);
    LOGGER.info("Importing ontology object property " + iri + " (label: " + label + ")");

    for (Concept domain : getDomainsConcepts(o, objectProperty)) {
      for (Concept range : getRangesConcepts(o, objectProperty)) {
        getOrCreateRelationshipType(domain, range, iri, label);
      }
    }
  }
コード例 #9
0
 public LintReport<OWLObjectProperty> detected(Collection<? extends OWLOntology> targets)
     throws LintException {
   SimpleMatchBasedLintReport<OWLObjectProperty> report =
       new SimpleMatchBasedLintReport<OWLObjectProperty>(this);
   for (OWLOntology ontology : targets) {
     for (OWLObjectProperty objectProperty : ontology.getObjectPropertiesInSignature()) {
       if (objectProperty.isTransitive(ontology)) {
         Set<OWLObjectPropertyExpression> superProperties =
             objectProperty.getSuperProperties(ontology);
         for (OWLObjectPropertyExpression objectPropertyExpression : superProperties) {
           if (objectPropertyExpression.isTransitive(ontology)) {
             report.add(
                 objectProperty,
                 ontology,
                 "The property "
                     + objectProperty.toString()
                     + " is transitive and has a transitive super property");
           }
         }
       }
     }
   }
   return report;
 }
コード例 #10
0
 @Override
 public void visit(OWLObjectProperty property) {
   handleObject(property);
   property.getIRI().accept(this);
 }
コード例 #11
0
    @Override
    public void ontologiesChanged(List<? extends OWLOntologyChange> changes) throws OWLException {
      Map<OWLEntity, OWLEntity> renamings = new HashMap<OWLEntity, OWLEntity>();
      Set<OWLEntity> removals = new HashSet<OWLEntity>();

      for (int idx = 0; idx < changes.size(); idx++) {
        OWLOntologyChange change = changes.get(idx);
        if (change instanceof SetOntologyID) {
          IRI newiri = ((SetOntologyID) change).getNewOntologyID().getOntologyIRI();

          if (newiri == null) continue;

          IRI oldiri = ((SetOntologyID) change).getOriginalOntologyID().getOntologyIRI();

          log.debug("Ontology ID changed");
          log.debug("Old ID: {}", oldiri);
          log.debug("New ID: {}", newiri);

          OBDAModel model = obdamodels.get(oldiri.toURI());

          if (model == null) {
            setupNewOBDAModel();
            model = getActiveOBDAModel();
          }

          PrefixManager prefixManager = model.getPrefixManager();
          prefixManager.addPrefix(PrefixManager.DEFAULT_PREFIX, newiri.toURI().toString());

          obdamodels.remove(oldiri.toURI());
          obdamodels.put(newiri.toURI(), model);
          continue;

        } else if (change instanceof AddAxiom) {
          OWLAxiom axiom = change.getAxiom();
          if (axiom instanceof OWLDeclarationAxiom) {
            OWLEntity entity = ((OWLDeclarationAxiom) axiom).getEntity();
            OBDAModel activeOBDAModel = getActiveOBDAModel();
            if (entity instanceof OWLClass) {
              OWLClass oc = (OWLClass) entity;
              OClass c = ofac.createClass(oc.getIRI().toString());
              activeOBDAModel.declareClass(c);
            } else if (entity instanceof OWLObjectProperty) {
              OWLObjectProperty or = (OWLObjectProperty) entity;
              ObjectPropertyExpression r = ofac.createObjectProperty(or.getIRI().toString());
              activeOBDAModel.declareObjectProperty(r);
            } else if (entity instanceof OWLDataProperty) {
              OWLDataProperty op = (OWLDataProperty) entity;
              DataPropertyExpression p = ofac.createDataProperty(op.getIRI().toString());
              activeOBDAModel.declareDataProperty(p);
            }
          }

        } else if (change instanceof RemoveAxiom) {
          OWLAxiom axiom = change.getAxiom();
          if (axiom instanceof OWLDeclarationAxiom) {
            OWLEntity entity = ((OWLDeclarationAxiom) axiom).getEntity();
            OBDAModel activeOBDAModel = getActiveOBDAModel();
            if (entity instanceof OWLClass) {
              OWLClass oc = (OWLClass) entity;
              OClass c = ofac.createClass(oc.getIRI().toString());
              activeOBDAModel.unDeclareClass(c);
            } else if (entity instanceof OWLObjectProperty) {
              OWLObjectProperty or = (OWLObjectProperty) entity;
              ObjectPropertyExpression r = ofac.createObjectProperty(or.getIRI().toString());
              activeOBDAModel.unDeclareObjectProperty(r);
            } else if (entity instanceof OWLDataProperty) {
              OWLDataProperty op = (OWLDataProperty) entity;
              DataPropertyExpression p = ofac.createDataProperty(op.getIRI().toString());
              activeOBDAModel.unDeclareDataProperty(p);
            }
          }
        }

        if (idx + 1 >= changes.size()) {
          continue;
        }

        if (change instanceof RemoveAxiom && changes.get(idx + 1) instanceof AddAxiom) {
          // Found the pattern of a renaming refactoring
          RemoveAxiom remove = (RemoveAxiom) change;
          AddAxiom add = (AddAxiom) changes.get(idx + 1);

          if (!(remove.getAxiom() instanceof OWLDeclarationAxiom
              && add.getAxiom() instanceof OWLDeclarationAxiom)) {
            continue;
          }
          // Found the patter we are looking for, a remove and add of
          // declaration axioms
          OWLEntity olde = ((OWLDeclarationAxiom) remove.getAxiom()).getEntity();
          OWLEntity newe = ((OWLDeclarationAxiom) add.getAxiom()).getEntity();
          renamings.put(olde, newe);

        } else if (change instanceof RemoveAxiom
            && ((RemoveAxiom) change).getAxiom() instanceof OWLDeclarationAxiom) {
          // Found the pattern of a deletion
          OWLDeclarationAxiom declaration = (OWLDeclarationAxiom) ((RemoveAxiom) change).getAxiom();
          OWLEntity removedEntity = declaration.getEntity();
          removals.add(removedEntity);
        }
      }

      // Applying the renaming to the OBDA model
      OBDAModel obdamodel = getActiveOBDAModel();
      for (OWLEntity olde : renamings.keySet()) {
        OWLEntity removedEntity = olde;
        OWLEntity newEntity = renamings.get(removedEntity);

        // This set of changes appears to be a "renaming" operation,
        // hence we will modify the OBDA model accordingly
        Predicate removedPredicate = getPredicate(removedEntity);
        Predicate newPredicate = getPredicate(newEntity);

        obdamodel.renamePredicate(removedPredicate, newPredicate);
      }

      // Applying the deletions to the obda model
      for (OWLEntity removede : removals) {
        Predicate removedPredicate = getPredicate(removede);
        obdamodel.deletePredicate(removedPredicate);
      }
    }
コード例 #12
0
 public ArrowElementVis(OWLObjectProperty property) {
   this.property = property;
   this.labelElement = property.getIRI().getFragment();
   this.backgroudColor = Color.LIGHT_GRAY;
 }
コード例 #13
0
ファイル: ALCRenderer.java プロジェクト: jmayaalv/mapo
 /*
  * (non-Javadoc)
  *
  * @seeorg.semanticweb.owlapi.model.OWLPropertyExpressionVisitor#visit(org.
  * semanticweb.owlapi.model.OWLObjectProperty)
  */
 @Override
 public void visit(OWLObjectProperty axiom) {
   sb.append(print(axiom.toString()));
 }
コード例 #14
0
 @Override
 public String getAsText() {
   /** Get the text value of this object - for displaying in GUIS, etc */
   return value.toStringID();
 }
コード例 #15
0
 @Override
 protected void writeObjectPropertyComment(OWLObjectProperty prop) {
   writeComment(prop.getIRI().toString());
 }
コード例 #16
0
  // Main wrapping for adding non-ontology documents to GOlr.
  // Also see OntologySolrLoader.
  private void add(Bioentity e) {

    String eid = e.getId();
    String esym = e.getSymbol();
    String edb = e.getDb();
    String etype = e.getTypeCls();
    String ename = e.getFullName();
    String edbid = e.getDBID();
    // LOG.info("Adding: " + eid + " " + esym);

    SolrInputDocument bioentity_doc = new SolrInputDocument();

    // Bioentity document base.
    bioentity_doc.addField("document_category", "bioentity");
    bioentity_doc.addField("id", eid);
    bioentity_doc.addField("bioentity", eid);
    bioentity_doc.addField("bioentity_internal_id", edbid);
    bioentity_doc.addField("bioentity_label", esym);
    bioentity_doc.addField("bioentity_name", ename);
    bioentity_doc.addField("source", edb);
    bioentity_doc.addField("type", etype);

    // A little more work for the synonyms.
    List<String> esynonyms = e.getSynonyms();
    if (!esynonyms.isEmpty()) {
      bioentity_doc.addField("synonym", esynonyms);
    }

    // Various taxon and taxon closure calculations, including map.
    String etaxid = e.getNcbiTaxonId();
    TaxonDetails taxonDetails = null;
    if (etaxid != null) {
      taxonDetails = createTaxonDetails(etaxid);
      taxonDetails.addToSolrDocument(bioentity_doc);
    }

    // Optionally, pull information from the PANTHER file set.
    List<String> pantherFamilyIDs = new ArrayList<String>();
    List<String> pantherFamilyLabels = new ArrayList<String>();
    List<String> pantherTreeGraphs = new ArrayList<String>();
    // List<String> pantherTreeAnnAncestors = new ArrayList<String>();
    // List<String> pantherTreeAnnDescendants = new ArrayList<String>();
    if (pset != null && pset.getNumberOfFilesInSet() > 0) {
      Set<PANTHERTree> pTrees = pset.getAssociatedTrees(eid);
      if (pTrees != null) {
        Iterator<PANTHERTree> piter = pTrees.iterator();
        int pcnt = 0; // DEBUG
        while (piter.hasNext()) {
          pcnt++; // DEBUG
          PANTHERTree ptree = piter.next();
          pantherFamilyIDs.add(ptree.getPANTHERID());
          pantherFamilyLabels.add(StringUtils.lowerCase(ptree.getTreeLabel()));
          pantherTreeGraphs.add(ptree.getOWLShuntGraph().toJSON());
          // pantherTreeAnnAncestors = new ArrayList<String>(ptree.getAncestorAnnotations(eid));
          // pantherTreeAnnDescendants = new ArrayList<String>(ptree.getDescendantAnnotations(eid));
          if (pcnt > 1) { // DEBUG
            LOG.info(
                "Belongs to multiple families ("
                    + eid
                    + "): "
                    + StringUtils.join(pantherFamilyIDs, ", "));
          }

          // Store that we saw this for later use in the tree.
          ptree.addAssociatedGeneProduct(eid, esym);
        }
      }
    }
    // Optionally, actually /add/ the PANTHER family data to the document.
    if (!pantherFamilyIDs.isEmpty()) {
      // BUG/TODO (but probably not ours): We only store the one tree for now as we're assuming that
      // there is just one family.
      // Unfortunately, PANTHER still produces data that sez sometimes something belongs to more
      // than one
      // family (eg something with fly in PTHR10919 PTHR10032), so we block it and just choose the
      // first.
      bioentity_doc.addField("panther_family", pantherFamilyIDs.get(0));
      bioentity_doc.addField("panther_family_label", pantherFamilyLabels.get(0));
      bioentity_doc.addField("phylo_graph_json", pantherTreeGraphs.get(0));
      // if( ! pantherTreeAnnAncestors.isEmpty() ){
      //	bioentity_doc.addField("phylo_ancestor_closure", pantherTreeAnnAncestors);
      // }
      // if( ! pantherTreeAnnDescendants.isEmpty() ){
      //	bioentity_doc.addField("phylo_descendant_closure", pantherTreeAnnDescendants);
      // }
    }

    // We're also going to want to make note of the direct annotations to this bioentity.
    // This will mean getting ready and then storing all of c5 when we pass through through
    // the annotation loop. We'll add to the document on the other side.
    // Collect information: ids and labels.
    Map<String, String> direct_list_map = new HashMap<String, String>();

    // Something that we'll need for the annotation evidence aggregate later.
    Map<String, SolrInputDocument> evAggDocMap = new HashMap<String, SolrInputDocument>();

    // Annotation doc.
    // We'll also need to be collecting some aggregate information, like for the GP term closures,
    // which will be
    // added at the end of this section.
    Map<String, String> isap_map = new HashMap<String, String>();
    Map<String, String> reg_map = new HashMap<String, String>();
    for (GeneAnnotation a : gafDocument.getGeneAnnotations(e.getId())) {
      SolrInputDocument annotation_doc = new SolrInputDocument();

      String clsId = a.getCls();

      // Annotation document base from static and previous bioentity.
      annotation_doc.addField("document_category", "annotation"); // n/a
      annotation_doc.addField("source", edb); // Col. 1 (from bioentity above)
      annotation_doc.addField("bioentity", eid); // n/a, should be c1+c2.
      annotation_doc.addField("bioentity_internal_id", edbid); // Col. 2 (from bioentity above)
      annotation_doc.addField("bioentity_label", esym); // Col. 3 (from bioentity above)
      // NOTE: Col. 4 generation is below...
      annotation_doc.addField("annotation_class", clsId); // Col. 5
      addLabelField(annotation_doc, "annotation_class_label", clsId); // n/a
      // NOTE: Col. 6 generation is below...
      String a_ev_type = a.getShortEvidence();
      annotation_doc.addField("evidence_type", a_ev_type); // Col. 7
      // NOTE: Col. 8 generation is below...
      String a_aspect = a.getAspect();
      annotation_doc.addField("aspect", a_aspect); // Col. 9
      annotation_doc.addField("bioentity_name", ename); // Col. 10 (from bioentity above)
      annotation_doc.addField("synonym", esynonyms); // Col. 11 (from bioentity above)
      annotation_doc.addField("type", etype); // Col. 12 (from bioentity above)
      String adate = a.getLastUpdateDate();
      annotation_doc.addField("date", adate); // Col. 14
      String assgnb = a.getAssignedBy();
      annotation_doc.addField("assigned_by", assgnb); // Col. 15
      // NOTE: Col. generation is 16 below...
      annotation_doc.addField("bioentity_isoform", a.getGeneProductForm()); // Col. 17

      // Optionally, if there is enough taxon for a map, add the collections to the document.
      if (taxonDetails != null) {
        taxonDetails.addToSolrDocument(annotation_doc);
      }

      // Optionally, actually /add/ the PANTHER family data to the document.
      if (!pantherFamilyIDs.isEmpty()) {
        annotation_doc.addField("panther_family", pantherFamilyIDs.get(0));
        annotation_doc.addField("panther_family_label", pantherFamilyLabels.get(0));
      }

      // Evidence type closure.
      Set<OWLClass> ecoClasses = eco.getClassesForGoCode(a_ev_type);
      Set<OWLClass> ecoSuper = eco.getAncestors(ecoClasses, true);
      List<String> ecoIDClosure = new ArrayList<String>();
      for (OWLClass es : ecoSuper) {
        String itemID = es.toStringID();
        ecoIDClosure.add(itemID);
      }
      addLabelFields(annotation_doc, "evidence_type_closure", ecoIDClosure);

      // Col 4: qualifier generation.
      String comb_aqual = "";
      if (a.hasQualifiers()) {
        if (a.isNegated()) {
          comb_aqual = comb_aqual + "not";
          annotation_doc.addField("qualifier", "not");
        }
        if (a.isContributesTo()) {
          comb_aqual = comb_aqual + "contributes_to";
          annotation_doc.addField("qualifier", "contributes_to");
        }
        if (a.isIntegralTo()) {
          comb_aqual = comb_aqual + "integral_to";
          annotation_doc.addField("qualifier", "integral_to");
        }
        if (a.isColocatesWith()) {
          comb_aqual = comb_aqual + "colocalizes_with";
          annotation_doc.addField("qualifier", "colocalizes_with");
        }
        if (a.isCut()) {
          comb_aqual = comb_aqual + "cut";
          annotation_doc.addField("qualifier", "cut");
        }
      }

      // Drag in the reference (col 6)
      List<String> refIds = a.getReferenceIds();
      String refIdList = ""; // used to help make unique ID.
      for (String refId : refIds) {
        annotation_doc.addField("reference", refId);
        refIdList = refIdList + "_" + refId;
      }

      // Drag in "with" (col 8).
      // annotation_doc.addField("evidence_with", a.getWithExpression());
      String withList = ""; // used to help make unique ID.
      for (String wi : a.getWithInfos()) {
        annotation_doc.addField("evidence_with", wi);
        withList = withList + "_" + wi;
      }

      ///
      /// isa_partof_closure
      ///

      OWLObject cls = graph.getOWLObjectByIdentifier(clsId);
      // TODO: This may be a bug workaround, or it may be the way things are.
      // getOWLObjectByIdentifier returns null on alt_ids, so skip them for now.
      if (cls != null) {
        //	System.err.println(clsId);

        // Is-a part-of closures.
        ArrayList<String> isap = new ArrayList<String>();
        isap.add("BFO:0000050");
        Map<String, String> curr_isap_map =
            addClosureToAnnAndBio(
                isap,
                "isa_partof_closure",
                "isa_partof_closure_label",
                "isa_partof_closure_map",
                cls,
                graph,
                annotation_doc,
                bioentity_doc,
                a.isNegated());
        isap_map.putAll(curr_isap_map); // add to aggregate map

        //				// Add to annotation and bioentity isa_partof closures; label and id.
        //				List<String> idClosure = graph.getRelationIDClosure(cls, isap);
        //				List<String> labelClosure = graph.getRelationLabelClosure(cls, isap);
        //				annotation_doc.addField("isa_partof_closure", idClosure);
        //				annotation_doc.addField("isa_partof_closure_label", labelClosure);
        //				for( String tlabel : labelClosure){
        //					addFieldUnique(bioentity_doc, "isa_partof_closure_label", tlabel);
        //				}
        //				for( String tid : idClosure){
        //					addFieldUnique(bioentity_doc, "isa_partof_closure", tid);
        //				}
        //
        //				// Compile closure maps to JSON.
        //				Map<String, String> isa_partof_map = graph.getRelationClosureMap(cls, isap);
        //				if( ! isa_partof_map.isEmpty() ){
        //					String jsonized_isa_partof_map = gson.toJson(isa_partof_map);
        //					annotation_doc.addField("isa_partof_closure_map", jsonized_isa_partof_map);
        //				}

        // Regulates closures.
        List<String> reg = RelationSets.getRelationSet(RelationSets.COMMON);
        Map<String, String> curr_reg_map =
            addClosureToAnnAndBio(
                reg,
                "regulates_closure",
                "regulates_closure_label",
                "regulates_closure_map",
                cls,
                graph,
                annotation_doc,
                bioentity_doc,
                a.isNegated());
        reg_map.putAll(curr_reg_map); // add to aggregate map

        //				///
        //				/// Next, work on the evidence aggregate...
        //				///
        //
        //				// Bug/TODO: This is a bit os a slowdown since we're not reusing our work from above
        // here anymore.
        //				List<String> idIsapClosure = graph.getRelationIDClosure(cls, isap);
        //				Map<String, String> isaPartofMap = graph.getRelationClosureMap(cls, isap);
        //
        //				// When we cycle, we'll also want to do some stuff to track all of the evidence codes
        // we see.
        //				List<String> aggEvIDClosure = new ArrayList<String>();
        //				List<String> aggEvWiths = new ArrayList<String>();
        //
        //				// Cycle through and pick up all the associated bits for the terms in the closure.
        //				SolrInputDocument ev_agg_doc = null;
        //				for( String tid : idIsapClosure ){
        //
        //					String tlabel = isaPartofMap.get(tid);
        //					//OWLObject c = graph.getOWLObjectByIdentifier(tid);
        //
        //					// Only have to do the annotation evidence aggregate base once.
        //					// Otherwise, just skip over and add the multi fields separately.
        //					String evAggId = eid + "_:ev:_" + clsId;
        //					if (evAggDocMap.containsKey(evAggId)) {
        //						ev_agg_doc = evAggDocMap.get(evAggId);
        //					} else {
        //						ev_agg_doc = new SolrInputDocument();
        //						evAggDocMap.put(evAggId, ev_agg_doc);
        //						ev_agg_doc.addField("id", evAggId);
        //						ev_agg_doc.addField("document_category", "annotation_evidence_aggregate");
        //						ev_agg_doc.addField("bioentity", eid);
        //						ev_agg_doc.addField("bioentity_label", esym);
        //						ev_agg_doc.addField("annotation_class", tid);
        //						ev_agg_doc.addField("annotation_class_label", tlabel);
        //						ev_agg_doc.addField("taxon", etaxid);
        //						addLabelField(ev_agg_doc, "taxon_label", etaxid);
        //
        //						// Optionally, if there is enough taxon for a map, add the collections to the
        // document.
        //						if( jsonized_taxon_map != null ){
        //							ev_agg_doc.addField("taxon_closure", taxIDClosure);
        //							ev_agg_doc.addField("taxon_closure_label", taxLabelClosure);
        //							ev_agg_doc.addField("taxon_closure_map", jsonized_taxon_map);
        //						}
        //
        //						// Optionally, actually /add/ the PANTHER family data to the document.
        //						if( ! pantherFamilyIDs.isEmpty() ){
        //							ev_agg_doc.addField("panther_family", pantherFamilyIDs.get(0));
        //							ev_agg_doc.addField("panther_family_label", pantherFamilyLabels.get(0));
        //						}
        //					}
        //
        //					// Drag in "with" (col 8), this time for ev_agg.
        //					for (String wi : a.getWithInfos()) {
        //						aggEvWiths.add(wi);
        //					}
        //
        //					// Make note for the evidence type closure.
        //					aggEvIDClosure.add(a.getShortEvidence());
        //				}
        //
        //				// If there was actually a doc created/there, add the cumulative fields to it.
        //				if( ev_agg_doc != null ){
        //					addLabelFields(ev_agg_doc, "evidence_type_closure", aggEvIDClosure);
        //					addLabelFields(ev_agg_doc, "evidence_with", aggEvWiths);
        //				}
      }

      // Let's piggyback on a little of the work above and cache the extra stuff that we'll be
      // adding to the bioenity at the end
      // for the direct annotations. c5 and ???.
      if (a.isNegated() == false) {
        String dlbl = graph.getLabel(cls);
        direct_list_map.put(clsId, dlbl);
      }

      //			Map<String,String> isa_partof_map = new HashMap<String,String>(); // capture labels/ids
      //			OWLObject c = graph.getOWLObjectByIdentifier(clsId);
      //			Set<OWLPropertyExpression> ps =
      // Collections.singleton((OWLPropertyExpression)getPartOfProperty());
      //			Set<OWLObject> ancs = graph.getAncestors(c, ps);
      //			for (OWLObject t : ancs) {
      //				if (! (t instanceof OWLClass))
      //					continue;
      //				String tid = graph.getIdentifier(t);
      //				//System.out.println(edge+" TGT:"+tid);
      //				String tlabel = null;
      //				if (t != null)
      //					tlabel = graph.getLabel(t);
      //				annotation_doc.addField("isa_partof_closure", tid);
      //				addFieldUnique(bioentity_doc, "isa_partof_closure", tid);
      //				if (tlabel != null) {
      //					annotation_doc.addField("isa_partof_closure_label", tlabel);
      //					addFieldUnique(bioentity_doc, "isa_partof_closure_label", tlabel);
      //					// Map both ways.
      //					// TODO: collisions shouldn't be an issue here?
      //					isa_partof_map.put(tid, tlabel);
      //					isa_partof_map.put(tlabel, tid);
      //				}else{
      //					// For the time being at least, I want to ensure that the id and label closures
      //					// mirror eachother as much as possible (for facets and mapping, etc.). Without
      //					// this, in some cases there is simply nothing returned to drill on.
      //					annotation_doc.addField("isa_partof_closure_label", tid);
      //					addFieldUnique(bioentity_doc, "isa_partof_closure_label", tid);
      //					// Map just the one way I guess--see above.
      //					isa_partof_map.put(tid, tid);
      //				}
      //
      //				// Annotation evidence aggregate base.
      //				String evAggId = eid + "_:ev:_" + clsId;
      //				SolrInputDocument ev_agg_doc;
      //				if (evAggDocMap.containsKey(evAggId)) {
      //					ev_agg_doc = evAggDocMap.get(evAggId);
      //				}
      //				else {
      //					ev_agg_doc = new SolrInputDocument();
      //					evAggDocMap.put(evAggId, ev_agg_doc);
      //					ev_agg_doc.addField("id", evAggId);
      //					ev_agg_doc.addField("document_category", "annotation_evidence_aggregate");
      //					ev_agg_doc.addField("bioentity", eid);
      //					ev_agg_doc.addField("bioentity_label", esym);
      //					ev_agg_doc.addField("annotation_class", tid);
      //					ev_agg_doc.addField("annotation_class_label", tlabel);
      //					ev_agg_doc.addField("taxon", taxId);
      //					addLabelField(ev_agg_doc, "taxon_label", taxId);
      //				}
      //
      //				//evidence_type is single valued
      //				//aggDoc.addField("evidence_type", a.getEvidenceCls());
      //
      //				// Drag in "with" (col 8), this time for ev_agg.
      //				for (WithInfo wi : a.getWithInfos()) {
      //					ev_agg_doc.addField("evidence_with", wi.getWithXref());
      //				}
      //
      //				//aggDoc.getFieldValues(name)
      //				// TODO:
      //				ev_agg_doc.addField("evidence_type_closure", a.getEvidenceCls());
      //			}

      // Column 16.
      // We only want to climb the is_a/part_of parts here.
      ArrayList<String> aecc_rels = new ArrayList<String>();
      aecc_rels.add("BFO:0000050");
      // And capture the label and ID mappings for when we're done the loop.
      Map<String, String> ann_ext_map = new HashMap<String, String>(); // capture labels/ids
      for (List<ExtensionExpression> groups : a.getExtensionExpressions()) {
        // TODO handle extension expression groups
        for (ExtensionExpression ee : groups) {
          String eeid = ee.getCls();
          OWLObject eObj = graph.getOWLObjectByIdentifier(eeid);
          annotation_doc.addField("annotation_extension_class", eeid);
          String eLabel = addLabelField(annotation_doc, "annotation_extension_class_label", eeid);
          if (eLabel == null) eLabel = eeid; // ensure the label

          ///////////////
          // New
          ///////////////

          // Get the closure maps.
          if (eObj != null) {
            Map<String, String> aecc_cmap = graph.getRelationClosureMap(eObj, aecc_rels);
            if (!aecc_cmap.isEmpty()) {
              for (String aecc_id : aecc_cmap.keySet()) {
                String aecc_lbl = aecc_cmap.get(aecc_id);

                // Add all items to the document.
                annotation_doc.addField("annotation_extension_class_closure", aecc_id);
                annotation_doc.addField("annotation_extension_class_closure_label", aecc_lbl);

                // And make sure that both id and label are in the per-term map.
                ann_ext_map.put(aecc_lbl, aecc_id);
                ann_ext_map.put(aecc_id, aecc_lbl);
              }
            }
          }

          //				///////////////
          //				// Old
          //				///////////////
          //
          //				if (eObj != null) {
          //					for (OWLGraphEdge edge : graph.getOutgoingEdgesClosureReflexive(eObj)) {
          //						OWLObject t = edge.getTarget();
          //						if (!(t instanceof OWLClass))
          //							continue;
          //						String annExtID = graph.getIdentifier(t);
          //						String annExtLabel = graph.getLabel(edge.getTarget());
          //						annotation_doc.addField("annotation_extension_class_closure", annExtID);
          //						annotation_doc.addField("annotation_extension_class_closure_label", annExtLabel);
          //						ann_ext_map.put(annExtID, annExtLabel);
          //						ann_ext_map.put(annExtLabel, annExtID);
          //					}
          //				}

          // Ugly. Hand roll out the data for the c16 special handler. Have mercy on me--I'm going
          // to just do this by hand since it's a limited case and I don't want to mess with Gson
          // right now.
          String complicated_c16r = ee.getRelation();
          if (complicated_c16r != null) {
            List<OWLObjectProperty> relations = graph.getRelationOrChain(complicated_c16r);
            if (relations != null) {

              ArrayList<String> relChunk = new ArrayList<String>();
              for (OWLObjectProperty rel : relations) {
                // Use the IRI to get the BFO:0000050 as ID for the part_of OWLObjectProperty
                String rID = graph.getIdentifier(rel.getIRI());
                String rLabel = graph.getLabel(rel);
                if (rLabel == null) rLabel = rID; // ensure the label
                relChunk.add("{\"id\": \"" + rID + "\", \"label\": \"" + rLabel + "\"}");
              }
              String finalSpan = StringUtils.join(relChunk, ", ");

              // Assemble final JSON blob.
              String aeJSON =
                  "{\"relationship\": {\"relation\": ["
                      + finalSpan
                      + "], \"id\": \""
                      + eeid
                      + "\", \"label\": \""
                      + eLabel
                      + "\"}}";

              annotation_doc.addField("annotation_extension_json", aeJSON);
              // LOG.info("added complicated c16: (" + eeid + ", " + eLabel + ") " + aeJSON);
            } else {
              // The c16r is unknown to the ontology--render it as just a normal label, without the
              // link.
              annotation_doc.addField("annotation_extension_json", complicated_c16r);
              LOG.info("added unknown c16: " + complicated_c16r);
            }
          }
        }
      }

      // Add annotation ext closure map to annotation doc (needs to be outside loop since there are
      // multiple extensions).
      if (!ann_ext_map.isEmpty()) {
        String jsonized_ann_ext_map = gson.toJson(ann_ext_map);
        annotation_doc.addField("annotation_extension_class_closure_map", jsonized_ann_ext_map);
      }

      // Final doc assembly; make the ID /really/ unique.
      // WARNING: We're relying on the current truth that the toString() method returns a nice
      // concatenated version
      // of the GAF line, which is fundamentally unique (or should be). If this changes, revert to
      // what we were
      // doing here pre-20150930, which was assembling out own unique line manually.
      annotation_doc.addField("id", a.toString());

      // Finally add doc.
      add(annotation_doc);
    }

    // Add the necessary aggregates to the bio doc. These cannot be done incrementally like the
    // multi-valued closures
    // sonce there can only be a single map.
    if (!isap_map.isEmpty()) {
      String jsonized_cmap = gson.toJson(isap_map);
      bioentity_doc.addField("isa_partof_closure_map", jsonized_cmap);
    }
    if (!reg_map.isEmpty()) {
      String jsonized_cmap = gson.toJson(reg_map);
      bioentity_doc.addField("regulates_closure_map", jsonized_cmap);
    }

    // Add c5 to bioentity.
    // Compile closure map to JSON and add to the document.
    String jsonized_direct_map = null;
    if (!direct_list_map.isEmpty()) {
      jsonized_direct_map = gson.toJson(direct_list_map);
    }
    // Optionally, if there is enough direct annotations for a map, add the collections to the
    // document.
    if (jsonized_direct_map != null) {
      List<String> directIDList = new ArrayList<String>(direct_list_map.keySet());
      List<String> directLabelList = new ArrayList<String>(direct_list_map.values());
      bioentity_doc.addField("annotation_class_list", directIDList);
      bioentity_doc.addField("annotation_class_list_label", directLabelList);
      bioentity_doc.addField("annotation_class_list_map", jsonized_direct_map);
    }

    add(bioentity_doc);

    for (SolrInputDocument ev_agg_doc : evAggDocMap.values()) {
      add(ev_agg_doc);
    }

    // Now repeat some of the same to help populate the "general" index for bioentities.
    SolrInputDocument general_doc = new SolrInputDocument();
    // Watch out for "id" collision!
    general_doc.addField("id", "general_bioentity_" + eid);
    general_doc.addField("entity", eid);
    general_doc.addField("entity_label", esym);
    general_doc.addField("document_category", "general");
    general_doc.addField("category", "bioentity");
    general_doc.addField(
        "general_blob", ename + " " + edbid + " " + StringUtils.join(esynonyms, " "));
    add(general_doc);
  }
コード例 #17
0
ファイル: LodeServlet.java プロジェクト: zednis/LODE
  private OWLOntology parseWithReasoner(OWLOntologyManager manager, OWLOntology ontology) {
    try {
      PelletOptions.load(new URL("http://" + cssLocation + "pellet.properties"));
      PelletReasoner reasoner = PelletReasonerFactory.getInstance().createReasoner(ontology);
      reasoner.getKB().prepare();
      List<InferredAxiomGenerator<? extends OWLAxiom>> generators =
          new ArrayList<InferredAxiomGenerator<? extends OWLAxiom>>();
      generators.add(new InferredSubClassAxiomGenerator());
      generators.add(new InferredClassAssertionAxiomGenerator());
      generators.add(new InferredDisjointClassesAxiomGenerator());
      generators.add(new InferredEquivalentClassAxiomGenerator());
      generators.add(new InferredEquivalentDataPropertiesAxiomGenerator());
      generators.add(new InferredEquivalentObjectPropertyAxiomGenerator());
      generators.add(new InferredInverseObjectPropertiesAxiomGenerator());
      generators.add(new InferredPropertyAssertionGenerator());
      generators.add(new InferredSubDataPropertyAxiomGenerator());
      generators.add(new InferredSubObjectPropertyAxiomGenerator());

      InferredOntologyGenerator iog = new InferredOntologyGenerator(reasoner, generators);

      OWLOntologyID id = ontology.getOntologyID();
      Set<OWLImportsDeclaration> declarations = ontology.getImportsDeclarations();
      Set<OWLAnnotation> annotations = ontology.getAnnotations();

      Map<OWLEntity, Set<OWLAnnotationAssertionAxiom>> entityAnnotations =
          new HashMap<OWLEntity, Set<OWLAnnotationAssertionAxiom>>();
      for (OWLClass aEntity : ontology.getClassesInSignature()) {
        entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology));
      }
      for (OWLObjectProperty aEntity : ontology.getObjectPropertiesInSignature()) {
        entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology));
      }
      for (OWLDataProperty aEntity : ontology.getDataPropertiesInSignature()) {
        entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology));
      }
      for (OWLNamedIndividual aEntity : ontology.getIndividualsInSignature()) {
        entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology));
      }
      for (OWLAnnotationProperty aEntity : ontology.getAnnotationPropertiesInSignature()) {
        entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology));
      }
      for (OWLDatatype aEntity : ontology.getDatatypesInSignature()) {
        entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology));
      }

      manager.removeOntology(ontology);
      OWLOntology inferred = manager.createOntology(id);
      iog.fillOntology(manager, inferred);

      for (OWLImportsDeclaration decl : declarations) {
        manager.applyChange(new AddImport(inferred, decl));
      }
      for (OWLAnnotation ann : annotations) {
        manager.applyChange(new AddOntologyAnnotation(inferred, ann));
      }
      for (OWLClass aEntity : inferred.getClassesInSignature()) {
        applyAnnotations(aEntity, entityAnnotations, manager, inferred);
      }
      for (OWLObjectProperty aEntity : inferred.getObjectPropertiesInSignature()) {
        applyAnnotations(aEntity, entityAnnotations, manager, inferred);
      }
      for (OWLDataProperty aEntity : inferred.getDataPropertiesInSignature()) {
        applyAnnotations(aEntity, entityAnnotations, manager, inferred);
      }
      for (OWLNamedIndividual aEntity : inferred.getIndividualsInSignature()) {
        applyAnnotations(aEntity, entityAnnotations, manager, inferred);
      }
      for (OWLAnnotationProperty aEntity : inferred.getAnnotationPropertiesInSignature()) {
        applyAnnotations(aEntity, entityAnnotations, manager, inferred);
      }
      for (OWLDatatype aEntity : inferred.getDatatypesInSignature()) {
        applyAnnotations(aEntity, entityAnnotations, manager, inferred);
      }

      return inferred;
    } catch (FileNotFoundException e1) {
      return ontology;
    } catch (MalformedURLException e1) {
      return ontology;
    } catch (IOException e1) {
      return ontology;
    } catch (OWLOntologyCreationException e) {
      return ontology;
    }
  }
コード例 #18
0
  public static void main(String[] args) throws Exception {
    if (args.length == 0) {
      //			args = new String[] { "/home/yzhou/backup/20141212/univ-bench-dl-queries.owl"};
      args = new String[] {PagodaTester.onto_dir + "fly/fly-all-in-one_rolledUp.owl"};
      //			args = new String[] { PagodaTester.onto_dir +
      // "dbpedia/integratedOntology-all-in-one-minus-datatype.owl" };
      //			args = new String[] { PagodaTester.onto_dir + "npd/npd-all-minus-datatype.owl" };
      //			args = new String[] { PagodaTester.onto_dir + "bio2rdf/chembl/cco-noDPR.ttl" };
      //			args = new String[] { PagodaTester.onto_dir +
      // "bio2rdf/reactome/biopax-level3-processed.owl" };
      //			args = new String[] { PagodaTester.onto_dir + "bio2rdf/uniprot/core-processed-noDis.owl"
      // };
    }

    //		OWLOntology ontology = OWLHelper.getMergedOntology(args[0], null);
    //		OWLHelper.correctDataTypeRangeAxioms(ontology);
    OWLOntology ontology = OWLHelper.loadOntology(args[0]);

    OWLOntologyManager manager = ontology.getOWLOntologyManager();
    OWLDataFactory factory = manager.getOWLDataFactory();
    //		manager.saveOntology(ontology, new FileOutputStream(args[0].replace(".owl",
    // "_owlapi.owl")));

    if (outputFile != null) Utility.redirectCurrentOut(outputFile);

    int queryID = 0;
    for (OWLClass cls : ontology.getClassesInSignature(true)) {
      if (cls.equals(factory.getOWLThing()) || cls.equals(factory.getOWLNothing())) continue;
      if (!cls.toStringID().contains("Query")) continue;
      System.out.println("^[Query" + ++queryID + "]");
      System.out.println(template.replace("@CLASS", cls.toStringID()));
      System.out.println();
    }

    for (OWLOntology onto : ontology.getImportsClosure())
      for (OWLObjectProperty prop : onto.getObjectPropertiesInSignature()) {
        //				if (!prop.toStringID().contains("Query")) continue;
        System.out.println("^[Query" + ++queryID + "]");
        System.out.println("SELECT ?X ?Y");
        System.out.println("WHERE {");
        System.out.println("?X <" + prop.toStringID() + "> ?Y .");
        System.out.println("}");
        System.out.println();
      }

    String[] answerVars = new String[] {"?X", "?Y"};

    for (OWLOntology onto : ontology.getImportsClosure())
      for (OWLObjectProperty prop : onto.getObjectPropertiesInSignature()) {
        //				if (!prop.toStringID().contains("Query")) continue;
        for (int i = 0; i < answerVars.length; ++i) {
          System.out.println("^[Query" + ++queryID + "]");
          System.out.println("SELECT " + answerVars[i]);
          System.out.println("WHERE {");
          System.out.println("?X <" + prop.toStringID() + "> ?Y .");
          System.out.println("}");
          System.out.println();
        }
      }

    if (outputFile != null) Utility.closeCurrentOut();
  }
コード例 #19
0
  public void writeOntology() throws OWLRendererException {
    if (ontologies.size() != 1) {
      throw new OWLRuntimeException("Can only render one ontology");
    }
    OWLOntology ontology = getOntologies().iterator().next();
    writePrefixMap();
    writeNewLine();
    writeOntologyHeader(ontology);

    for (OWLAnnotationProperty prop : ontology.getAnnotationPropertiesInSignature()) {
      write(prop);
    }
    for (OWLDatatype datatype : ontology.getDatatypesInSignature()) {
      write(datatype);
    }
    for (OWLObjectProperty prop : ontology.getObjectPropertiesInSignature()) {
      write(prop);
      OWLObjectPropertyExpression invProp = prop.getInverseProperty();
      if (!ontology.getAxioms(invProp).isEmpty()) {
        write(invProp);
      }
    }
    for (OWLDataProperty prop : ontology.getDataPropertiesInSignature()) {
      write(prop);
    }
    for (OWLClass cls : ontology.getClassesInSignature()) {
      write(cls);
    }
    for (OWLNamedIndividual ind : ontology.getIndividualsInSignature()) {
      write(ind);
    }
    for (OWLAnonymousIndividual ind : ontology.getReferencedAnonymousIndividuals()) {
      write(ind);
    }
    // Nary disjoint classes axioms
    event = new RendererEvent(this, ontology);
    for (OWLDisjointClassesAxiom ax : ontology.getAxioms(AxiomType.DISJOINT_CLASSES)) {
      if (ax.getClassExpressions().size() > 2) {
        SectionMap map = new SectionMap();
        map.add(ax.getClassExpressions(), ax);
        writeSection(DISJOINT_CLASSES, map, ",", false, ontology);
      }
    }
    // Nary equivalent classes axioms
    for (OWLEquivalentClassesAxiom ax : ontology.getAxioms(AxiomType.EQUIVALENT_CLASSES)) {
      if (ax.getClassExpressions().size() > 2) {
        SectionMap map = new SectionMap();
        map.add(ax.getClassExpressions(), ax);
        writeSection(EQUIVALENT_CLASSES, map, ",", false, ontology);
      }
    }
    // Nary disjoint properties
    for (OWLDisjointObjectPropertiesAxiom ax :
        ontology.getAxioms(AxiomType.DISJOINT_OBJECT_PROPERTIES)) {
      if (ax.getProperties().size() > 2) {
        SectionMap map = new SectionMap();
        map.add(ax.getProperties(), ax);
        writeSection(DISJOINT_PROPERTIES, map, ",", false, ontology);
      }
    }
    // Nary equivlant properties
    for (OWLEquivalentObjectPropertiesAxiom ax :
        ontology.getAxioms(AxiomType.EQUIVALENT_OBJECT_PROPERTIES)) {
      if (ax.getProperties().size() > 2) {
        SectionMap map = new SectionMap();
        map.add(ax.getProperties(), ax);
        writeSection(EQUIVALENT_PROPERTIES, map, ",", false, ontology);
      }
    }
    // Nary disjoint properties
    for (OWLDisjointDataPropertiesAxiom ax :
        ontology.getAxioms(AxiomType.DISJOINT_DATA_PROPERTIES)) {
      if (ax.getProperties().size() > 2) {
        SectionMap map = new SectionMap();
        map.add(ax.getProperties(), ax);
        writeSection(DISJOINT_PROPERTIES, map, ",", false, ontology);
      }
    }
    // Nary equivalent properties
    for (OWLEquivalentDataPropertiesAxiom ax :
        ontology.getAxioms(AxiomType.EQUIVALENT_DATA_PROPERTIES)) {
      if (ax.getProperties().size() > 2) {
        SectionMap map = new SectionMap();
        map.add(ax.getProperties(), ax);
        writeSection(EQUIVALENT_PROPERTIES, map, ",", false, ontology);
      }
    }
    // Nary different individuals
    for (OWLDifferentIndividualsAxiom ax : ontology.getAxioms(AxiomType.DIFFERENT_INDIVIDUALS)) {
      if (ax.getIndividuals().size() > 2) {
        SectionMap map = new SectionMap();
        map.add(ax.getIndividuals(), ax);
        writeSection(DIFFERENT_INDIVIDUALS, map, ",", false, ontology);
      }
    }
    for (SWRLRule rule : ontology.getAxioms(AxiomType.SWRL_RULE)) {
      writeSection(RULE, Collections.singleton(rule), ", ", false);
    }
    flush();
  }
コード例 #20
0
 @Override
 public String getJavaInitializationString() {
   /** This returns the value needed to reconstitute the object from a string */
   return value.toStringID();
 }