/** * * Protege wont trigger a save action unless it detects that the OWLOntology currently opened * has suffered a change. The OBDA plugin requires that protege triggers a save action also in the * case when only the OBDA model has suffered chagnes. To acomplish this, this method will "fake" * an ontology change by inserting and removing a class into the OWLModel. */ private void triggerOntologyChanged() { if (loadingData) { return; } OWLModelManager owlmm = owlEditorKit.getOWLModelManager(); OWLOntology ontology = owlmm.getActiveOntology(); if (ontology == null) { return; } OWLClass newClass = owlmm .getOWLDataFactory() .getOWLClass(IRI.create("http://www.unibz.it/krdb/obdaplugin#RandomClass6677841155")); OWLAxiom axiom = owlmm.getOWLDataFactory().getOWLDeclarationAxiom(newClass); try { AddAxiom addChange = new AddAxiom(ontology, axiom); owlmm.applyChange(addChange); RemoveAxiom removeChange = new RemoveAxiom(ontology, axiom); owlmm.applyChange(removeChange); } catch (Exception e) { log.warn( "Exception forcing an ontology change. Your OWL model might contain a new class that you need to remove manually: {}", newClass.getIRI()); log.warn(e.getMessage()); log.debug(e.getMessage(), e); } }
public void testIgnoreAnnotations() throws Exception { OWLOntologyManager man = getManager(); // OWLManager.createOWLOntologyManager(); OWLOntology ont = man.createOntology(); OWLDataFactory df = man.getOWLDataFactory(); OWLClass clsA = df.getOWLClass(IRI.create("http://ont.com#A")); OWLClass clsB = df.getOWLClass(IRI.create("http://ont.com#B")); OWLSubClassOfAxiom sca = df.getOWLSubClassOfAxiom(clsA, clsB); man.addAxiom(ont, sca); OWLAnnotationProperty rdfsComment = df.getRDFSComment(); OWLLiteral lit = df.getOWLLiteral("Hello world"); OWLAnnotationAssertionAxiom annoAx1 = df.getOWLAnnotationAssertionAxiom(rdfsComment, clsA.getIRI(), lit); man.addAxiom(ont, annoAx1); OWLAnnotationPropertyDomainAxiom annoAx2 = df.getOWLAnnotationPropertyDomainAxiom(rdfsComment, clsA.getIRI()); man.addAxiom(ont, annoAx2); OWLAnnotationPropertyRangeAxiom annoAx3 = df.getOWLAnnotationPropertyRangeAxiom(rdfsComment, clsB.getIRI()); man.addAxiom(ont, annoAx3); OWLAnnotationProperty myComment = df.getOWLAnnotationProperty(IRI.create("http://ont.com#myComment")); OWLSubAnnotationPropertyOfAxiom annoAx4 = df.getOWLSubAnnotationPropertyOfAxiom(myComment, rdfsComment); man.addAxiom(ont, annoAx4); reload(ont, new RDFXMLOntologyFormat()); reload(ont, new OWLXMLOntologyFormat()); reload(ont, new TurtleOntologyFormat()); reload(ont, new OWLFunctionalSyntaxOntologyFormat()); }
protected Concept getParentConcept( OWLOntology o, OWLClass ontologyClass, File inDir, Authorizations authorizations) throws IOException { Set<OWLClassExpression> superClasses = ontologyClass.getSuperClasses(o); if (superClasses.size() == 0) { return getEntityConcept(); } else if (superClasses.size() == 1) { OWLClassExpression superClassExpr = superClasses.iterator().next(); OWLClass superClass = superClassExpr.asOWLClass(); String superClassUri = superClass.getIRI().toString(); Concept parent = getConceptByIRI(superClassUri); if (parent != null) { return parent; } parent = importOntologyClass(o, superClass, inDir, authorizations); if (parent == null) { throw new LumifyException("Could not find or create parent: " + superClass); } return parent; } else { throw new LumifyException( "Unhandled multiple super classes. Found " + superClasses.size() + ", expected 0 or 1."); } }
protected void importDataProperty(OWLOntology o, OWLDataProperty dataTypeProperty) { String propertyIRI = dataTypeProperty.getIRI().toString(); String propertyDisplayName = getLabel(o, dataTypeProperty); PropertyType propertyType = getPropertyType(o, dataTypeProperty); boolean userVisible = getUserVisible(o, dataTypeProperty); boolean searchable = getSearchable(o, dataTypeProperty); Boolean displayTime = getDisplayTime(o, dataTypeProperty); Double boost = getBoost(o, dataTypeProperty); if (propertyType == null) { throw new LumifyException("Could not get property type on data property " + propertyIRI); } for (OWLClassExpression domainClassExpr : dataTypeProperty.getDomains(o)) { OWLClass domainClass = domainClassExpr.asOWLClass(); String domainClassUri = domainClass.getIRI().toString(); Concept domainConcept = getConceptByIRI(domainClassUri); checkNotNull(domainConcept, "Could not find class with uri: " + domainClassUri); LOGGER.info("Adding data property " + propertyIRI + " to class " + domainConcept.getTitle()); ArrayList<PossibleValueType> possibleValues = getPossibleValues(o, dataTypeProperty); Collection<TextIndexHint> textIndexHints = getTextIndexHints(o, dataTypeProperty); addPropertyTo( domainConcept, propertyIRI, propertyDisplayName, propertyType, possibleValues, textIndexHints, userVisible, searchable, displayTime, boost); } }
public void addAncestor(OWLClass s, OWLClass t) { if (!ancestors.containsKey(s.getIRI())) { ancestors.put(s.getIRI(), t.getIRI()); } else { throw new IllegalStateException("Child with more than 1 parent ! " + s + " " + t); } }
/** * Создаем фрагмент запроса из списка именованных классов аксиомы и переданной sparql переменной, * привоенной субаксиоме. */ public String getSparqlQueryPart(ArrayList<OWLClass> clsList, String SxAxVar) { LOGGER.info(" ===getSparqlQueryPart=="); String fragmentOfQuery = ""; for (OWLClass cls : clsList) { // Находи концепт в ОПП IRI conceptIRI = UPOont.getUPOont() .getConceptByIRIinAnnotationValue( IRI.create(ConstantsOntConverter.SKOS_HIDDEN_LABEL), cls.getIRI()); // Берем его переменную - нет переменная должна быть равна // переменной субаксиомы // String var = ontUPO.getAnnotationValue(conceptIRI, // IRI.create(UPO_SPARQL_VARIABLE_LABEL) ); // Добавляем строку в запрос fragmentOfQuery = fragmentOfQuery + (SxAxVar + " " + ConstantsOntConverter.IRI_RDF_TYPE_SHORT + " <" + conceptIRI + ">" + ".\n"); LOGGER.info(" ConceptIRI:" + conceptIRI); LOGGER.info(" Var of subAx:" + SxAxVar); LOGGER.info(" Fragment:" + fragmentOfQuery); } LOGGER.info(" ===END-getSparqlQueryPart=="); return fragmentOfQuery; }
private Set<OWLClassExpression> getInferredParents( OWLOntology sourceOntology, OWLClass child, Set<OWLClass> viewed) { Set<OWLClassExpression> result = new TreeSet<OWLClassExpression>(); if (!viewed.contains(child)) { viewed.add(child); Set<OWLClassExpression> parents = new TreeSet<OWLClassExpression>(); parents.addAll(child.getSuperClasses(sourceOntology)); parents.addAll(child.getEquivalentClasses(sourceOntology)); for (OWLClassExpression parent : parents) { if (parent instanceof OWLClass) { result.add(parent); result.addAll(getInferredParents(sourceOntology, (OWLClass) parent, viewed)); } else if (parent instanceof OWLObjectIntersectionOf) { Set<OWLClassExpression> inferredParents = ((OWLObjectIntersectionOf) parent).getOperands(); result.addAll(inferredParents); for (OWLClassExpression inferredParent : inferredParents) { if (inferredParent instanceof OWLClass) { result.addAll(getInferredParents(sourceOntology, (OWLClass) inferredParent, viewed)); } } } else { result.add(parent); } } } return result; }
@Test public void testNegAllValuesFrom() { OWLObjectProperty property = ObjectProperty(getIRI("p")); OWLClass filler = Class(getIRI("A")); OWLObjectAllValuesFrom allValuesFrom = ObjectAllValuesFrom(property, filler); OWLClassExpression cls = allValuesFrom.getObjectComplementOf(); OWLClassExpression nnf = ObjectSomeValuesFrom(property, filler.getObjectComplementOf()); assertEquals(cls.getNNF(), nnf); }
private void runWithSeparateFiles() { if (owlFile == null) { throw new NullPointerException("You have to specify an ontology file!"); } OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = null; OBDADataFactory obdaDataFactory = OBDADataFactoryImpl.getInstance(); try { ontology = manager.loadOntologyFromOntologyDocument((new File(owlFile))); if (disableReasoning) { /* * when reasoning is disabled, we extract only the declaration assertions for the vocabulary */ ontology = extractDeclarations(manager, ontology); } Collection<Predicate> predicates = new ArrayList<>(); for (OWLClass owlClass : ontology.getClassesInSignature()) { Predicate predicate = obdaDataFactory.getClassPredicate(owlClass.getIRI().toString()); predicates.add(predicate); } for (OWLDataProperty owlDataProperty : ontology.getDataPropertiesInSignature()) { Predicate predicate = obdaDataFactory.getDataPropertyPredicate(owlDataProperty.getIRI().toString()); predicates.add(predicate); } for (OWLObjectProperty owlObjectProperty : ontology.getObjectPropertiesInSignature()) { Predicate predicate = obdaDataFactory.getObjectPropertyPredicate(owlObjectProperty.getIRI().toString()); predicates.add(predicate); } OBDAModel obdaModel = loadMappingFile(mappingFile); Ontology inputOntology = OWLAPI3TranslatorUtility.translate(ontology); obdaModel.declareAll(inputOntology.getVocabulary()); int numPredicates = predicates.size(); int i = 1; for (Predicate predicate : predicates) { System.err.println(String.format("Materializing %s (%d/%d)", predicate, i, numPredicates)); serializePredicate(ontology, inputOntology, obdaModel, predicate, outputFile, format); i++; } } catch (OWLOntologyCreationException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } }
public boolean isDomainOrRangeOfObjectProperty(OWLClass clase, OWLObjectProperty prop) { for (OWLClass owlClass1 : _reasoner.getObjectPropertyDomains(prop, false).getFlattened()) { if (owlClass1.getIRI().equals(clase.getIRI())) { Log.d( TAG, "<isDomainOrRangeOfObjectProperty> " + clase.getIRI().getRemainder().get() + " dominio o rango de " + prop.getIRI().getRemainder().get() + "? SI!"); return true; } } for (OWLClass owlClass1 : _reasoner.getObjectPropertyRanges(prop, false).getFlattened()) { if (owlClass1.getIRI().equals(clase.getIRI())) { Log.d( TAG, "<isDomainOrRangeOfObjectProperty> " + clase.getIRI().getRemainder().get() + " dominio o rango de " + prop.getIRI().getRemainder().get() + "? SI!"); return true; } } Log.d( TAG, "<isDomainOrRangeOfObjectProperty> " + clase.getIRI().getRemainder().get() + " dominio o rango de " + prop.getIRI().getRemainder().get() + "? NO"); return false; }
public Set<OWLDataProperty> getDataPropertiesByDomain(OWLClass domain) { Set<OWLDataProperty> propSet = new HashSet<OWLDataProperty>(); Set<OWLDataProperty> objProps = _ontology.getDataPropertiesInSignature(); for (OWLDataProperty prop : objProps) { for (OWLClass owlClass : _reasoner.getDataPropertyDomains(prop, true).getFlattened()) { if (owlClass.equals(domain)) { propSet.add(prop); } } } return propSet; }
/** * This method makes sure is used to setup a new/fresh OBDA model. This is done by replacing the * instance this.obdacontroller (the OBDA model) with a new object. On creation listeners for the * datasources, mappings and queries are setup so that changes in these trigger and ontology * change. * * <p>Additionally, this method configures all available OBDAOWLReasonerFacotry objects to have a * reference to the newly created OBDA model and to the global preference object. This is * necessary so that the factories are able to pass the OBDA model to the reasoner instances when * they are created. */ private void setupNewOBDAModel() { OBDAModel activeOBDAModel = getActiveOBDAModel(); if (activeOBDAModel != null) { return; } activeOBDAModel = dfac.getOBDAModel(); activeOBDAModel.addSourcesListener(dlistener); activeOBDAModel.addMappingsListener(mlistener); queryController.addListener(qlistener); OWLModelManager mmgr = owlEditorKit.getOWLWorkspace().getOWLModelManager(); Set<OWLOntology> ontologies = mmgr.getOntologies(); for (OWLOntology ontology : ontologies) { // Setup the entity declarations for (OWLClass c : ontology.getClassesInSignature()) { OClass pred = ofac.createClass(c.getIRI().toString()); activeOBDAModel.declareClass(pred); } for (OWLObjectProperty r : ontology.getObjectPropertiesInSignature()) { ObjectPropertyExpression pred = ofac.createObjectProperty(r.getIRI().toString()); activeOBDAModel.declareObjectProperty(pred); } for (OWLDataProperty p : ontology.getDataPropertiesInSignature()) { DataPropertyExpression pred = ofac.createDataProperty(p.getIRI().toString()); activeOBDAModel.declareDataProperty(pred); } } // Setup the prefixes PrefixOWLOntologyFormat prefixManager = PrefixUtilities.getPrefixOWLOntologyFormat(mmgr.getActiveOntology()); // addOBDACommonPrefixes(prefixManager); PrefixManagerWrapper prefixwrapper = new PrefixManagerWrapper(prefixManager); activeOBDAModel.setPrefixManager(prefixwrapper); OWLOntology activeOntology = mmgr.getActiveOntology(); String defaultPrefix = prefixManager.getDefaultPrefix(); if (defaultPrefix == null) { OWLOntologyID ontologyID = activeOntology.getOntologyID(); defaultPrefix = ontologyID.getOntologyIRI().toURI().toString(); } activeOBDAModel.getPrefixManager().addPrefix(PrefixManager.DEFAULT_PREFIX, defaultPrefix); // Add the model URI modelUri = activeOntology.getOntologyID().getOntologyIRI().toURI(); obdamodels.put(modelUri, activeOBDAModel); }
private Set<String> getSuperClasses(OWLReasoner reasoner, OWLClass cl, OWLClass owlThing) { Set<String> pset = new HashSet<String>(); Set<OWLClass> reasonerSuperClasses = reasoner.getSuperClasses(cl, false).getFlattened(); // add cl itself to S(X) computed by reasoner. That is missing // in its result. reasonerSuperClasses.add(cl); reasonerSuperClasses.add(owlThing); // adding equivalent classes -- they are not considered if asked for superclasses Iterator<OWLClass> iterator = reasoner.getEquivalentClasses(cl).iterator(); while (iterator.hasNext()) reasonerSuperClasses.add(iterator.next()); for (OWLClass scl : reasonerSuperClasses) pset.add(scl.toString()); return pset; }
private void compareAndPrintEqualSizedClasses( OWLClass cl, Set<OWLClass> reasonerSuperClasses, Set<String> superClasses, Jedis idReader) throws Exception { // compare each element of these 2 sets boolean print = false; for (OWLClass scl : reasonerSuperClasses) { String sclID = conceptToID(scl.toString(), idReader); if (!superClasses.contains(sclID)) { print = true; System.out.print(cl.toString() + " -e- " + scl.toString()); System.out.print(" , "); } } if (print) System.out.println("\n"); }
private Iterable<Concept> getDomainsConcepts(OWLOntology o, OWLObjectProperty objectProperty) { String uri = objectProperty.getIRI().toString(); if (objectProperty.getDomains(o).size() == 0) { throw new LumifyException("Invalid number of domain properties on " + uri); } List<Concept> domains = new ArrayList<Concept>(); for (OWLClassExpression rangeClassExpr : objectProperty.getDomains(o)) { OWLClass rangeClass = rangeClassExpr.asOWLClass(); String rangeClassUri = rangeClass.getIRI().toString(); Concept ontologyClass = getConceptByIRI(rangeClassUri); checkNotNull(ontologyClass, "Could not find class with uri: " + rangeClassUri); domains.add(ontologyClass); } return domains; }
public Set<OWLObjectProperty> getObjectPropertiesByDomainAndRange( OWLClass domain, OWLClass range) { Set<OWLObjectProperty> propSet = new HashSet<OWLObjectProperty>(); for (OWLObjectProperty prop : _ontology.getObjectPropertiesInSignature()) { for (OWLClass owlClass : _reasoner.getObjectPropertyDomains(prop, true).getFlattened()) { if (owlClass.equals(domain)) { for (OWLClass owlClass2 : _reasoner.getObjectPropertyRanges(prop, true).getFlattened()) { if (owlClass2.equals(range)) { propSet.add(prop); } } } } } return propSet; }
private void recursiveAddTuple( String termPath, OWLClass cls, OntologyLoader ontologyLoader, List<Tuple> tuples) { String label = ontologyLoader.getLabel(cls).replaceAll("[^a-zA-Z0-9 ]", " "); Set<String> synonyms = new HashSet<String>(); synonyms.add(label); synonyms.addAll(ontologyLoader.getSynonyms(cls)); // listOfChildren.addAll(model.getAssociatedClasses(cls)); StringBuilder alternativeDefinitions = new StringBuilder(); for (Set<OWLClass> alternativeDefinition : ontologyLoader.getAssociatedClasses(cls)) { StringBuilder newDefinition = new StringBuilder(); for (OWLClass associatedClass : alternativeDefinition) { if (newDefinition.length() != 0) newDefinition.append(','); newDefinition.append(associatedClass.getIRI().toString()); } if (alternativeDefinitions.length() != 0 && newDefinition.length() != 0) alternativeDefinitions.append("&&&"); alternativeDefinitions.append(newDefinition); } if (alternativeDefinitions.length() != 0) { System.out.println(alternativeDefinitions.toString()); } for (String synonym : synonyms) { KeyValueTuple tuple = new KeyValueTuple(); tuple.set(NODE_PATH, termPath); tuple.set(BOOST, false); tuple.set(ONTOLOGY_IRI, ontologyIRI); tuple.set(ONTOLOGY_NAME, ontologyName); tuple.set(ONTOLOGY_TERM, label); tuple.set(ONTOLOGY_TERM_IRI, cls.getIRI().toString()); tuple.set(ONTOLOGY_LABEL, ontologyLoader.getOntologyName()); tuple.set(ENTITY_TYPE, "ontologyTerm"); tuple.set(SYNONYMS, synonym.replaceAll("[^a-zA-Z0-9 ]", " ")); tuple.set(ALTERNATIVE_DEFINITION, alternativeDefinitions.toString()); tuples.add(tuple); } Set<OWLClass> listOfChildren = ontologyLoader.getChildClass(cls); if (listOfChildren.size() > 0) { int i = 0; for (OWLClass childClass : listOfChildren) { String childTermPath = termPath + "." + i; recursiveAddTuple(childTermPath, childClass, ontologyLoader, tuples); i++; } } }
private void compareClassificationResults( OWLOntology ontology, OWLReasoner reasoner, Jedis resultStore, Jedis idReader) throws Exception { Set<OWLClass> classes = ontology.getClassesInSignature(); Pipeline resultPipeline = resultStore.pipelined(); double classCount = 0; int multiplier = 1; double totalCount = 0; for (OWLClass cl : classes) { classCount++; double classProgress = (classCount / classes.size()) * 100; Set<OWLClass> reasonerSuperclasses = reasoner.getSuperClasses(cl, false).getFlattened(); // add cl itself to S(X) computed by reasoner. That is missing // in its result. reasonerSuperclasses.add(cl); // adding equivalent classes -- they are not considered if asked for superclasses Iterator<OWLClass> iterator = reasoner.getEquivalentClasses(cl).iterator(); while (iterator.hasNext()) reasonerSuperclasses.add(iterator.next()); String classToCheckID = conceptToID(cl.toString(), idReader); List<Response<Double>> responseList = new ArrayList<Response<Double>>(); for (OWLClass scl : reasonerSuperclasses) { String key = conceptToID(scl.toString(), idReader); responseList.add(resultPipeline.zscore(key, classToCheckID)); } resultPipeline.sync(); double hitCount = 0; for (Response<Double> response : responseList) { if (response.get() != null) hitCount++; } totalCount += (hitCount / reasonerSuperclasses.size()); if (classProgress >= (5 * multiplier)) { System.out.println( "% of no. of classes looked at: " + classProgress + "\tProgress %: " + (totalCount / classCount) * 100); multiplier++; } } double progress = totalCount / classes.size(); System.out.println("\nProgress %: " + (progress * 100)); }
public String[] searchOntology(String term, String ontofilepath, String type) { OWLAccessorImpl owlapi = new OWLAccessorImpl(new File(ontofilepath)); List<OWLClass> matches = owlapi.retrieveConcept(term); Iterator<OWLClass> it = matches.iterator(); String[] result = null; while (it.hasNext()) { OWLClass c = it.next(); String label = owlapi.getLabel(c); if (label.compareToIgnoreCase(term) == 0) { result = new String[3]; result[0] = type; result[1] = c.toString() .replaceFirst("http.*?(?=(PATO|TAO)_)", "") .replaceFirst("_", ":") .replaceFirst(">$", ""); result[2] = label; return result; } } it = matches.iterator(); result = new String[] {"", "", ""}; while (it.hasNext()) { OWLClass c = it.next(); String label = owlapi.getLabel(c); result[0] = type; result[1] += c.toString() .replaceFirst(".*http.*?(?=(PATO|TAO)_)", "") .replaceFirst("_", ":") .replaceFirst(">$", "") + ";"; result[2] += label + ";"; } if (result != null) { result[1] = result[1].replaceFirst(";$", ""); result[2] = result[2].replaceFirst(";$", ""); } return result; }
/* * (non-Javadoc) * * @see ru.iimm.ontology.OWL2UPOConverter.SubAxiom#getNewTitle() */ @Override protected String generateTitle() { LOGGER.info("=== Generate title for SubAxiom:\n" + this.getSubAxOWL()); String namedTitle = ""; /* В название добавляем имена классов субавксиомы */ for (OWLClass cls : this.getClsList()) { namedTitle += cls.getIRI().getFragment() + ConstantsOntConverter.UPO_TITLE_DELIMITER; } /* Формируем карту: свойство --- массив субаксиом [] */ HashMap<IRI, ArrayList<SubAxiom>> map = this.getPropSubaxMap(this.getPrpFromAxList()); /* Из карты формируем подзаголовки - для сложных субаксиом */ for (IRI propIRI : map.keySet()) { for (SubAxiom subax : map.get(propIRI)) // namedTitle += subax.getTitle() namedTitle += propIRI.getFragment() + ConstantsOntConverter.UPO_PROP_VAL_DELIMITER + subax.getTitle() + ConstantsOntConverter.UPO_TITLE_DELIMITER; } /* Добавляем фрагмент имени от типизированных свойств */ namedTitle = namedTitle + this.getDatatypePropNameFragment(this.getDtpPrpList()); /* Отрезаем лишний делимитер (-) из конца аксиомы */ namedTitle = namedTitle.length() > 0 ? namedTitle.substring( 0, namedTitle.lastIndexOf(ConstantsOntConverter.UPO_TITLE_DELIMITER)) : namedTitle; /* Обрамляем скобками субаксиому */ // namedTitle = namedTitle.length()>0 ? "[" + namedTitle + "]" : null; namedTitle = namedTitle.length() > 0 ? UPOont.getSBrackedString(namedTitle) : null; return namedTitle; }
public Set<OWLDataProperty> getDataPropertiesByDomainExtended(OWLClass domain) { Log.d( TAG, "<getDataPropertiesByDomainExtended> Buscando DataProperties con dominio " + domain.getIRI().toString()); Set<OWLDataProperty> propSet = new HashSet<OWLDataProperty>(); for (OWLDataProperty prop : _ontology.getDataPropertiesInSignature()) { if (isDomainOfDataProperty(domain, prop)) { propSet.add(prop); } } return propSet; }
private void compareAndPrintUnEqualSizedIndividuals( OWLClass cl, Set<OWLNamedIndividual> reasonerInstances, Set<String> computedInstances, Jedis idReader) throws Exception { // compare each element of these 2 sets boolean print = false; for (OWLNamedIndividual scl : reasonerInstances) { String sclID = conceptToID(scl.toString(), idReader); if (!computedInstances.contains(sclID)) { print = true; System.out.print(cl.toString() + " -ne- " + scl.toString()); System.out.print(" , "); } computedInstances.remove(sclID); } for (String s : computedInstances) System.out.println("\t -- " + Util.idToConcept(s, idReader) + "(" + s + ")"); System.out.println(); }
@Override public boolean compare(JSONObject jsonObject, OWLClass owlClass, OWLOntology owlOntology) { Set<OWLClass> allClassesInAxiomsRelated = new HashSet<OWLClass>(); JSONArray jsonAnnotationsArray = (JSONArray) jsonObject.get("annotations"); Set<OWLAxiom> owlAxiomSet = owlClass.getReferencingAxioms(owlOntology); Iterator<OWLAxiom> owlAxiomSetIterator = owlAxiomSet.iterator(); while (owlAxiomSetIterator.hasNext()) { OWLAxiom owlAxiom = owlAxiomSetIterator.next(); Set<OWLClass> owlClassesInAxiom = owlAxiom.getClassesInSignature(); allClassesInAxiomsRelated.addAll(owlClassesInAxiom); } Set<String> owlClassesIds = new HashSet<String>(); Iterator<OWLClass> allClassesInAxiomsRelatedIterator = allClassesInAxiomsRelated.iterator(); while (allClassesInAxiomsRelatedIterator.hasNext()) { OWLClass currentClass = allClassesInAxiomsRelatedIterator.next(); owlClassesIds.add( OwlDataExtrators.getAttribute("id", currentClass, owlOntology).replace(":", "_")); } Set<String> jsonAnnotationsIdsSet = new HashSet<String>(); Iterator<JSONObject> jsonAnnotationsArrayIterator = jsonAnnotationsArray.iterator(); while (jsonAnnotationsArrayIterator.hasNext()) { JSONObject jsonAnnottation = jsonAnnotationsArrayIterator.next(); String jsonAnnotationIdentifier = (String) jsonAnnottation.get("identifier"); jsonAnnotationsIdsSet.add(jsonAnnotationIdentifier); } Set<Set<String>> jsonAnnotationsIdPowerSet = SetsOperations.powerSet(jsonAnnotationsIdsSet); Iterator<Set<String>> jsonAnnotationsIdPowerSetIterator = jsonAnnotationsIdPowerSet.iterator(); while (jsonAnnotationsIdPowerSetIterator.hasNext()) { Set<String> idsSet = jsonAnnotationsIdPowerSetIterator.next(); if (idsSet.containsAll(owlClassesIds)) { return true; } } return false; }
private void handleNewSplitAxioms( OWLClass newlyCreatedClass, OWLClass classThatWasSplit, EntityBasedDiff diff) { OWLOntology sourceOntology = diffMap.getSourceOntology(); Map<OWLEntity, IRI> newTargetToSplitSource = Collections.singletonMap((OWLEntity) newlyCreatedClass, classThatWasSplit.getIRI()); OWLObjectDuplicator duplicator = new OWLObjectDuplicator(newTargetToSplitSource, diffMap.getOWLDataFactory()); Set<OWLClassExpression> inferredParents = getInferredParents(sourceOntology, classThatWasSplit); for (MatchedAxiom match : new ArrayList<MatchedAxiom>(diff.getAxiomMatches())) { if (match.getDescription().equals(MatchedAxiom.AXIOM_ADDED) && cameFromSourceOntology( (OWLAxiom) duplicator.duplicateObject(match.getTargetAxiom()), sourceOntology, classThatWasSplit, inferredParents)) { MatchedAxiom modifiedBySplit = new MatchedAxiom(null, match.getTargetAxiom(), COPIED_FROM_SPLIT); changes.removeMatch(match); changes.addMatch(modifiedBySplit); } } }
public Set<OWLClass> getRootUnsatisfiableClasses() { // StructureBasedRootClassFinder srd = new StructureBasedRootClassFinder(this.baseReasoner); StructuralRootDerivedReasoner srd = new StructuralRootDerivedReasoner(this.manager, this.baseReasoner, this.reasonerFactory); Set<OWLClass> estimatedRoots = srd.getRootUnsatisfiableClasses(); this.cls2JustificationMap = new HashMap<OWLClass, Set<Explanation>>(); Set<OWLAxiom> allAxioms = new HashSet<OWLAxiom>(); for (OWLOntology ont : this.baseReasoner.getRootOntology().getImportsClosure()) { allAxioms.addAll(ont.getLogicalAxioms()); } for (OWLClass cls : estimatedRoots) { this.cls2JustificationMap.put(cls, new HashSet<Explanation>()); System.out.println("POTENTIAL ROOT: " + cls); } System.out.println("Finding real roots from " + estimatedRoots.size() + " estimated roots"); int done = 0; this.roots.addAll(estimatedRoots); for (OWLClass estimatedRoot : estimatedRoots) { try { PelletExplanationGenerator gen = new PelletExplanationGenerator(manager.createOntology(allAxioms)); OWLDataFactory df = this.manager.getOWLDataFactory(); Set<Explanation> expls = gen.getExplanations(df.getOWLSubClassOfAxiom(estimatedRoot, df.getOWLNothing())); cls2JustificationMap.get(estimatedRoot).addAll(expls); ++done; System.out.println("Done " + done); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } } for (OWLClass clsA : estimatedRoots) { for (OWLClass clsB : estimatedRoots) if (!clsA.equals(clsB)) { Set<Explanation> clsAExpls = cls2JustificationMap.get(clsA); Set<Explanation> clsBExpls = cls2JustificationMap.get(clsB); boolean clsARootForClsB = false; boolean clsBRootForClsA = false; for (Explanation clsAExpl : clsAExpls) { for (Explanation clsBExpl : clsBExpls) if (isRootFor(clsAExpl, clsBExpl)) { clsARootForClsB = true; } else if (isRootFor(clsBExpl, clsAExpl)) { clsBRootForClsA = true; } } Explanation clsAExpl; if ((!clsARootForClsB) || (!clsBRootForClsA)) if (clsARootForClsB) { this.roots.remove(clsB); } else if (clsBRootForClsA) this.roots.remove(clsA); } } OWLClass clsA; return this.roots; }
// Main wrapping for adding non-ontology documents to GOlr. // Also see OntologySolrLoader. private void add(Bioentity e) { String eid = e.getId(); String esym = e.getSymbol(); String edb = e.getDb(); String etype = e.getTypeCls(); String ename = e.getFullName(); String edbid = e.getDBID(); // LOG.info("Adding: " + eid + " " + esym); SolrInputDocument bioentity_doc = new SolrInputDocument(); // Bioentity document base. bioentity_doc.addField("document_category", "bioentity"); bioentity_doc.addField("id", eid); bioentity_doc.addField("bioentity", eid); bioentity_doc.addField("bioentity_internal_id", edbid); bioentity_doc.addField("bioentity_label", esym); bioentity_doc.addField("bioentity_name", ename); bioentity_doc.addField("source", edb); bioentity_doc.addField("type", etype); // A little more work for the synonyms. List<String> esynonyms = e.getSynonyms(); if (!esynonyms.isEmpty()) { bioentity_doc.addField("synonym", esynonyms); } // Various taxon and taxon closure calculations, including map. String etaxid = e.getNcbiTaxonId(); TaxonDetails taxonDetails = null; if (etaxid != null) { taxonDetails = createTaxonDetails(etaxid); taxonDetails.addToSolrDocument(bioentity_doc); } // Optionally, pull information from the PANTHER file set. List<String> pantherFamilyIDs = new ArrayList<String>(); List<String> pantherFamilyLabels = new ArrayList<String>(); List<String> pantherTreeGraphs = new ArrayList<String>(); // List<String> pantherTreeAnnAncestors = new ArrayList<String>(); // List<String> pantherTreeAnnDescendants = new ArrayList<String>(); if (pset != null && pset.getNumberOfFilesInSet() > 0) { Set<PANTHERTree> pTrees = pset.getAssociatedTrees(eid); if (pTrees != null) { Iterator<PANTHERTree> piter = pTrees.iterator(); int pcnt = 0; // DEBUG while (piter.hasNext()) { pcnt++; // DEBUG PANTHERTree ptree = piter.next(); pantherFamilyIDs.add(ptree.getPANTHERID()); pantherFamilyLabels.add(StringUtils.lowerCase(ptree.getTreeLabel())); pantherTreeGraphs.add(ptree.getOWLShuntGraph().toJSON()); // pantherTreeAnnAncestors = new ArrayList<String>(ptree.getAncestorAnnotations(eid)); // pantherTreeAnnDescendants = new ArrayList<String>(ptree.getDescendantAnnotations(eid)); if (pcnt > 1) { // DEBUG LOG.info( "Belongs to multiple families (" + eid + "): " + StringUtils.join(pantherFamilyIDs, ", ")); } // Store that we saw this for later use in the tree. ptree.addAssociatedGeneProduct(eid, esym); } } } // Optionally, actually /add/ the PANTHER family data to the document. if (!pantherFamilyIDs.isEmpty()) { // BUG/TODO (but probably not ours): We only store the one tree for now as we're assuming that // there is just one family. // Unfortunately, PANTHER still produces data that sez sometimes something belongs to more // than one // family (eg something with fly in PTHR10919 PTHR10032), so we block it and just choose the // first. bioentity_doc.addField("panther_family", pantherFamilyIDs.get(0)); bioentity_doc.addField("panther_family_label", pantherFamilyLabels.get(0)); bioentity_doc.addField("phylo_graph_json", pantherTreeGraphs.get(0)); // if( ! pantherTreeAnnAncestors.isEmpty() ){ // bioentity_doc.addField("phylo_ancestor_closure", pantherTreeAnnAncestors); // } // if( ! pantherTreeAnnDescendants.isEmpty() ){ // bioentity_doc.addField("phylo_descendant_closure", pantherTreeAnnDescendants); // } } // We're also going to want to make note of the direct annotations to this bioentity. // This will mean getting ready and then storing all of c5 when we pass through through // the annotation loop. We'll add to the document on the other side. // Collect information: ids and labels. Map<String, String> direct_list_map = new HashMap<String, String>(); // Something that we'll need for the annotation evidence aggregate later. Map<String, SolrInputDocument> evAggDocMap = new HashMap<String, SolrInputDocument>(); // Annotation doc. // We'll also need to be collecting some aggregate information, like for the GP term closures, // which will be // added at the end of this section. Map<String, String> isap_map = new HashMap<String, String>(); Map<String, String> reg_map = new HashMap<String, String>(); for (GeneAnnotation a : gafDocument.getGeneAnnotations(e.getId())) { SolrInputDocument annotation_doc = new SolrInputDocument(); String clsId = a.getCls(); // Annotation document base from static and previous bioentity. annotation_doc.addField("document_category", "annotation"); // n/a annotation_doc.addField("source", edb); // Col. 1 (from bioentity above) annotation_doc.addField("bioentity", eid); // n/a, should be c1+c2. annotation_doc.addField("bioentity_internal_id", edbid); // Col. 2 (from bioentity above) annotation_doc.addField("bioentity_label", esym); // Col. 3 (from bioentity above) // NOTE: Col. 4 generation is below... annotation_doc.addField("annotation_class", clsId); // Col. 5 addLabelField(annotation_doc, "annotation_class_label", clsId); // n/a // NOTE: Col. 6 generation is below... String a_ev_type = a.getShortEvidence(); annotation_doc.addField("evidence_type", a_ev_type); // Col. 7 // NOTE: Col. 8 generation is below... String a_aspect = a.getAspect(); annotation_doc.addField("aspect", a_aspect); // Col. 9 annotation_doc.addField("bioentity_name", ename); // Col. 10 (from bioentity above) annotation_doc.addField("synonym", esynonyms); // Col. 11 (from bioentity above) annotation_doc.addField("type", etype); // Col. 12 (from bioentity above) String adate = a.getLastUpdateDate(); annotation_doc.addField("date", adate); // Col. 14 String assgnb = a.getAssignedBy(); annotation_doc.addField("assigned_by", assgnb); // Col. 15 // NOTE: Col. generation is 16 below... annotation_doc.addField("bioentity_isoform", a.getGeneProductForm()); // Col. 17 // Optionally, if there is enough taxon for a map, add the collections to the document. if (taxonDetails != null) { taxonDetails.addToSolrDocument(annotation_doc); } // Optionally, actually /add/ the PANTHER family data to the document. if (!pantherFamilyIDs.isEmpty()) { annotation_doc.addField("panther_family", pantherFamilyIDs.get(0)); annotation_doc.addField("panther_family_label", pantherFamilyLabels.get(0)); } // Evidence type closure. Set<OWLClass> ecoClasses = eco.getClassesForGoCode(a_ev_type); Set<OWLClass> ecoSuper = eco.getAncestors(ecoClasses, true); List<String> ecoIDClosure = new ArrayList<String>(); for (OWLClass es : ecoSuper) { String itemID = es.toStringID(); ecoIDClosure.add(itemID); } addLabelFields(annotation_doc, "evidence_type_closure", ecoIDClosure); // Col 4: qualifier generation. String comb_aqual = ""; if (a.hasQualifiers()) { if (a.isNegated()) { comb_aqual = comb_aqual + "not"; annotation_doc.addField("qualifier", "not"); } if (a.isContributesTo()) { comb_aqual = comb_aqual + "contributes_to"; annotation_doc.addField("qualifier", "contributes_to"); } if (a.isIntegralTo()) { comb_aqual = comb_aqual + "integral_to"; annotation_doc.addField("qualifier", "integral_to"); } if (a.isColocatesWith()) { comb_aqual = comb_aqual + "colocalizes_with"; annotation_doc.addField("qualifier", "colocalizes_with"); } if (a.isCut()) { comb_aqual = comb_aqual + "cut"; annotation_doc.addField("qualifier", "cut"); } } // Drag in the reference (col 6) List<String> refIds = a.getReferenceIds(); String refIdList = ""; // used to help make unique ID. for (String refId : refIds) { annotation_doc.addField("reference", refId); refIdList = refIdList + "_" + refId; } // Drag in "with" (col 8). // annotation_doc.addField("evidence_with", a.getWithExpression()); String withList = ""; // used to help make unique ID. for (String wi : a.getWithInfos()) { annotation_doc.addField("evidence_with", wi); withList = withList + "_" + wi; } /// /// isa_partof_closure /// OWLObject cls = graph.getOWLObjectByIdentifier(clsId); // TODO: This may be a bug workaround, or it may be the way things are. // getOWLObjectByIdentifier returns null on alt_ids, so skip them for now. if (cls != null) { // System.err.println(clsId); // Is-a part-of closures. ArrayList<String> isap = new ArrayList<String>(); isap.add("BFO:0000050"); Map<String, String> curr_isap_map = addClosureToAnnAndBio( isap, "isa_partof_closure", "isa_partof_closure_label", "isa_partof_closure_map", cls, graph, annotation_doc, bioentity_doc, a.isNegated()); isap_map.putAll(curr_isap_map); // add to aggregate map // // Add to annotation and bioentity isa_partof closures; label and id. // List<String> idClosure = graph.getRelationIDClosure(cls, isap); // List<String> labelClosure = graph.getRelationLabelClosure(cls, isap); // annotation_doc.addField("isa_partof_closure", idClosure); // annotation_doc.addField("isa_partof_closure_label", labelClosure); // for( String tlabel : labelClosure){ // addFieldUnique(bioentity_doc, "isa_partof_closure_label", tlabel); // } // for( String tid : idClosure){ // addFieldUnique(bioentity_doc, "isa_partof_closure", tid); // } // // // Compile closure maps to JSON. // Map<String, String> isa_partof_map = graph.getRelationClosureMap(cls, isap); // if( ! isa_partof_map.isEmpty() ){ // String jsonized_isa_partof_map = gson.toJson(isa_partof_map); // annotation_doc.addField("isa_partof_closure_map", jsonized_isa_partof_map); // } // Regulates closures. List<String> reg = RelationSets.getRelationSet(RelationSets.COMMON); Map<String, String> curr_reg_map = addClosureToAnnAndBio( reg, "regulates_closure", "regulates_closure_label", "regulates_closure_map", cls, graph, annotation_doc, bioentity_doc, a.isNegated()); reg_map.putAll(curr_reg_map); // add to aggregate map // /// // /// Next, work on the evidence aggregate... // /// // // // Bug/TODO: This is a bit os a slowdown since we're not reusing our work from above // here anymore. // List<String> idIsapClosure = graph.getRelationIDClosure(cls, isap); // Map<String, String> isaPartofMap = graph.getRelationClosureMap(cls, isap); // // // When we cycle, we'll also want to do some stuff to track all of the evidence codes // we see. // List<String> aggEvIDClosure = new ArrayList<String>(); // List<String> aggEvWiths = new ArrayList<String>(); // // // Cycle through and pick up all the associated bits for the terms in the closure. // SolrInputDocument ev_agg_doc = null; // for( String tid : idIsapClosure ){ // // String tlabel = isaPartofMap.get(tid); // //OWLObject c = graph.getOWLObjectByIdentifier(tid); // // // Only have to do the annotation evidence aggregate base once. // // Otherwise, just skip over and add the multi fields separately. // String evAggId = eid + "_:ev:_" + clsId; // if (evAggDocMap.containsKey(evAggId)) { // ev_agg_doc = evAggDocMap.get(evAggId); // } else { // ev_agg_doc = new SolrInputDocument(); // evAggDocMap.put(evAggId, ev_agg_doc); // ev_agg_doc.addField("id", evAggId); // ev_agg_doc.addField("document_category", "annotation_evidence_aggregate"); // ev_agg_doc.addField("bioentity", eid); // ev_agg_doc.addField("bioentity_label", esym); // ev_agg_doc.addField("annotation_class", tid); // ev_agg_doc.addField("annotation_class_label", tlabel); // ev_agg_doc.addField("taxon", etaxid); // addLabelField(ev_agg_doc, "taxon_label", etaxid); // // // Optionally, if there is enough taxon for a map, add the collections to the // document. // if( jsonized_taxon_map != null ){ // ev_agg_doc.addField("taxon_closure", taxIDClosure); // ev_agg_doc.addField("taxon_closure_label", taxLabelClosure); // ev_agg_doc.addField("taxon_closure_map", jsonized_taxon_map); // } // // // Optionally, actually /add/ the PANTHER family data to the document. // if( ! pantherFamilyIDs.isEmpty() ){ // ev_agg_doc.addField("panther_family", pantherFamilyIDs.get(0)); // ev_agg_doc.addField("panther_family_label", pantherFamilyLabels.get(0)); // } // } // // // Drag in "with" (col 8), this time for ev_agg. // for (String wi : a.getWithInfos()) { // aggEvWiths.add(wi); // } // // // Make note for the evidence type closure. // aggEvIDClosure.add(a.getShortEvidence()); // } // // // If there was actually a doc created/there, add the cumulative fields to it. // if( ev_agg_doc != null ){ // addLabelFields(ev_agg_doc, "evidence_type_closure", aggEvIDClosure); // addLabelFields(ev_agg_doc, "evidence_with", aggEvWiths); // } } // Let's piggyback on a little of the work above and cache the extra stuff that we'll be // adding to the bioenity at the end // for the direct annotations. c5 and ???. if (a.isNegated() == false) { String dlbl = graph.getLabel(cls); direct_list_map.put(clsId, dlbl); } // Map<String,String> isa_partof_map = new HashMap<String,String>(); // capture labels/ids // OWLObject c = graph.getOWLObjectByIdentifier(clsId); // Set<OWLPropertyExpression> ps = // Collections.singleton((OWLPropertyExpression)getPartOfProperty()); // Set<OWLObject> ancs = graph.getAncestors(c, ps); // for (OWLObject t : ancs) { // if (! (t instanceof OWLClass)) // continue; // String tid = graph.getIdentifier(t); // //System.out.println(edge+" TGT:"+tid); // String tlabel = null; // if (t != null) // tlabel = graph.getLabel(t); // annotation_doc.addField("isa_partof_closure", tid); // addFieldUnique(bioentity_doc, "isa_partof_closure", tid); // if (tlabel != null) { // annotation_doc.addField("isa_partof_closure_label", tlabel); // addFieldUnique(bioentity_doc, "isa_partof_closure_label", tlabel); // // Map both ways. // // TODO: collisions shouldn't be an issue here? // isa_partof_map.put(tid, tlabel); // isa_partof_map.put(tlabel, tid); // }else{ // // For the time being at least, I want to ensure that the id and label closures // // mirror eachother as much as possible (for facets and mapping, etc.). Without // // this, in some cases there is simply nothing returned to drill on. // annotation_doc.addField("isa_partof_closure_label", tid); // addFieldUnique(bioentity_doc, "isa_partof_closure_label", tid); // // Map just the one way I guess--see above. // isa_partof_map.put(tid, tid); // } // // // Annotation evidence aggregate base. // String evAggId = eid + "_:ev:_" + clsId; // SolrInputDocument ev_agg_doc; // if (evAggDocMap.containsKey(evAggId)) { // ev_agg_doc = evAggDocMap.get(evAggId); // } // else { // ev_agg_doc = new SolrInputDocument(); // evAggDocMap.put(evAggId, ev_agg_doc); // ev_agg_doc.addField("id", evAggId); // ev_agg_doc.addField("document_category", "annotation_evidence_aggregate"); // ev_agg_doc.addField("bioentity", eid); // ev_agg_doc.addField("bioentity_label", esym); // ev_agg_doc.addField("annotation_class", tid); // ev_agg_doc.addField("annotation_class_label", tlabel); // ev_agg_doc.addField("taxon", taxId); // addLabelField(ev_agg_doc, "taxon_label", taxId); // } // // //evidence_type is single valued // //aggDoc.addField("evidence_type", a.getEvidenceCls()); // // // Drag in "with" (col 8), this time for ev_agg. // for (WithInfo wi : a.getWithInfos()) { // ev_agg_doc.addField("evidence_with", wi.getWithXref()); // } // // //aggDoc.getFieldValues(name) // // TODO: // ev_agg_doc.addField("evidence_type_closure", a.getEvidenceCls()); // } // Column 16. // We only want to climb the is_a/part_of parts here. ArrayList<String> aecc_rels = new ArrayList<String>(); aecc_rels.add("BFO:0000050"); // And capture the label and ID mappings for when we're done the loop. Map<String, String> ann_ext_map = new HashMap<String, String>(); // capture labels/ids for (List<ExtensionExpression> groups : a.getExtensionExpressions()) { // TODO handle extension expression groups for (ExtensionExpression ee : groups) { String eeid = ee.getCls(); OWLObject eObj = graph.getOWLObjectByIdentifier(eeid); annotation_doc.addField("annotation_extension_class", eeid); String eLabel = addLabelField(annotation_doc, "annotation_extension_class_label", eeid); if (eLabel == null) eLabel = eeid; // ensure the label /////////////// // New /////////////// // Get the closure maps. if (eObj != null) { Map<String, String> aecc_cmap = graph.getRelationClosureMap(eObj, aecc_rels); if (!aecc_cmap.isEmpty()) { for (String aecc_id : aecc_cmap.keySet()) { String aecc_lbl = aecc_cmap.get(aecc_id); // Add all items to the document. annotation_doc.addField("annotation_extension_class_closure", aecc_id); annotation_doc.addField("annotation_extension_class_closure_label", aecc_lbl); // And make sure that both id and label are in the per-term map. ann_ext_map.put(aecc_lbl, aecc_id); ann_ext_map.put(aecc_id, aecc_lbl); } } } // /////////////// // // Old // /////////////// // // if (eObj != null) { // for (OWLGraphEdge edge : graph.getOutgoingEdgesClosureReflexive(eObj)) { // OWLObject t = edge.getTarget(); // if (!(t instanceof OWLClass)) // continue; // String annExtID = graph.getIdentifier(t); // String annExtLabel = graph.getLabel(edge.getTarget()); // annotation_doc.addField("annotation_extension_class_closure", annExtID); // annotation_doc.addField("annotation_extension_class_closure_label", annExtLabel); // ann_ext_map.put(annExtID, annExtLabel); // ann_ext_map.put(annExtLabel, annExtID); // } // } // Ugly. Hand roll out the data for the c16 special handler. Have mercy on me--I'm going // to just do this by hand since it's a limited case and I don't want to mess with Gson // right now. String complicated_c16r = ee.getRelation(); if (complicated_c16r != null) { List<OWLObjectProperty> relations = graph.getRelationOrChain(complicated_c16r); if (relations != null) { ArrayList<String> relChunk = new ArrayList<String>(); for (OWLObjectProperty rel : relations) { // Use the IRI to get the BFO:0000050 as ID for the part_of OWLObjectProperty String rID = graph.getIdentifier(rel.getIRI()); String rLabel = graph.getLabel(rel); if (rLabel == null) rLabel = rID; // ensure the label relChunk.add("{\"id\": \"" + rID + "\", \"label\": \"" + rLabel + "\"}"); } String finalSpan = StringUtils.join(relChunk, ", "); // Assemble final JSON blob. String aeJSON = "{\"relationship\": {\"relation\": [" + finalSpan + "], \"id\": \"" + eeid + "\", \"label\": \"" + eLabel + "\"}}"; annotation_doc.addField("annotation_extension_json", aeJSON); // LOG.info("added complicated c16: (" + eeid + ", " + eLabel + ") " + aeJSON); } else { // The c16r is unknown to the ontology--render it as just a normal label, without the // link. annotation_doc.addField("annotation_extension_json", complicated_c16r); LOG.info("added unknown c16: " + complicated_c16r); } } } } // Add annotation ext closure map to annotation doc (needs to be outside loop since there are // multiple extensions). if (!ann_ext_map.isEmpty()) { String jsonized_ann_ext_map = gson.toJson(ann_ext_map); annotation_doc.addField("annotation_extension_class_closure_map", jsonized_ann_ext_map); } // Final doc assembly; make the ID /really/ unique. // WARNING: We're relying on the current truth that the toString() method returns a nice // concatenated version // of the GAF line, which is fundamentally unique (or should be). If this changes, revert to // what we were // doing here pre-20150930, which was assembling out own unique line manually. annotation_doc.addField("id", a.toString()); // Finally add doc. add(annotation_doc); } // Add the necessary aggregates to the bio doc. These cannot be done incrementally like the // multi-valued closures // sonce there can only be a single map. if (!isap_map.isEmpty()) { String jsonized_cmap = gson.toJson(isap_map); bioentity_doc.addField("isa_partof_closure_map", jsonized_cmap); } if (!reg_map.isEmpty()) { String jsonized_cmap = gson.toJson(reg_map); bioentity_doc.addField("regulates_closure_map", jsonized_cmap); } // Add c5 to bioentity. // Compile closure map to JSON and add to the document. String jsonized_direct_map = null; if (!direct_list_map.isEmpty()) { jsonized_direct_map = gson.toJson(direct_list_map); } // Optionally, if there is enough direct annotations for a map, add the collections to the // document. if (jsonized_direct_map != null) { List<String> directIDList = new ArrayList<String>(direct_list_map.keySet()); List<String> directLabelList = new ArrayList<String>(direct_list_map.values()); bioentity_doc.addField("annotation_class_list", directIDList); bioentity_doc.addField("annotation_class_list_label", directLabelList); bioentity_doc.addField("annotation_class_list_map", jsonized_direct_map); } add(bioentity_doc); for (SolrInputDocument ev_agg_doc : evAggDocMap.values()) { add(ev_agg_doc); } // Now repeat some of the same to help populate the "general" index for bioentities. SolrInputDocument general_doc = new SolrInputDocument(); // Watch out for "id" collision! general_doc.addField("id", "general_bioentity_" + eid); general_doc.addField("entity", eid); general_doc.addField("entity_label", esym); general_doc.addField("document_category", "general"); general_doc.addField("category", "bioentity"); general_doc.addField( "general_blob", ename + " " + edbid + " " + StringUtils.join(esynonyms, " ")); add(general_doc); }
@Override public void visit(OWLClass cls) { handleObject(cls); cls.getIRI().accept(this); }
@Override public void ontologiesChanged(List<? extends OWLOntologyChange> changes) throws OWLException { Map<OWLEntity, OWLEntity> renamings = new HashMap<OWLEntity, OWLEntity>(); Set<OWLEntity> removals = new HashSet<OWLEntity>(); for (int idx = 0; idx < changes.size(); idx++) { OWLOntologyChange change = changes.get(idx); if (change instanceof SetOntologyID) { IRI newiri = ((SetOntologyID) change).getNewOntologyID().getOntologyIRI(); if (newiri == null) continue; IRI oldiri = ((SetOntologyID) change).getOriginalOntologyID().getOntologyIRI(); log.debug("Ontology ID changed"); log.debug("Old ID: {}", oldiri); log.debug("New ID: {}", newiri); OBDAModel model = obdamodels.get(oldiri.toURI()); if (model == null) { setupNewOBDAModel(); model = getActiveOBDAModel(); } PrefixManager prefixManager = model.getPrefixManager(); prefixManager.addPrefix(PrefixManager.DEFAULT_PREFIX, newiri.toURI().toString()); obdamodels.remove(oldiri.toURI()); obdamodels.put(newiri.toURI(), model); continue; } else if (change instanceof AddAxiom) { OWLAxiom axiom = change.getAxiom(); if (axiom instanceof OWLDeclarationAxiom) { OWLEntity entity = ((OWLDeclarationAxiom) axiom).getEntity(); OBDAModel activeOBDAModel = getActiveOBDAModel(); if (entity instanceof OWLClass) { OWLClass oc = (OWLClass) entity; OClass c = ofac.createClass(oc.getIRI().toString()); activeOBDAModel.declareClass(c); } else if (entity instanceof OWLObjectProperty) { OWLObjectProperty or = (OWLObjectProperty) entity; ObjectPropertyExpression r = ofac.createObjectProperty(or.getIRI().toString()); activeOBDAModel.declareObjectProperty(r); } else if (entity instanceof OWLDataProperty) { OWLDataProperty op = (OWLDataProperty) entity; DataPropertyExpression p = ofac.createDataProperty(op.getIRI().toString()); activeOBDAModel.declareDataProperty(p); } } } else if (change instanceof RemoveAxiom) { OWLAxiom axiom = change.getAxiom(); if (axiom instanceof OWLDeclarationAxiom) { OWLEntity entity = ((OWLDeclarationAxiom) axiom).getEntity(); OBDAModel activeOBDAModel = getActiveOBDAModel(); if (entity instanceof OWLClass) { OWLClass oc = (OWLClass) entity; OClass c = ofac.createClass(oc.getIRI().toString()); activeOBDAModel.unDeclareClass(c); } else if (entity instanceof OWLObjectProperty) { OWLObjectProperty or = (OWLObjectProperty) entity; ObjectPropertyExpression r = ofac.createObjectProperty(or.getIRI().toString()); activeOBDAModel.unDeclareObjectProperty(r); } else if (entity instanceof OWLDataProperty) { OWLDataProperty op = (OWLDataProperty) entity; DataPropertyExpression p = ofac.createDataProperty(op.getIRI().toString()); activeOBDAModel.unDeclareDataProperty(p); } } } if (idx + 1 >= changes.size()) { continue; } if (change instanceof RemoveAxiom && changes.get(idx + 1) instanceof AddAxiom) { // Found the pattern of a renaming refactoring RemoveAxiom remove = (RemoveAxiom) change; AddAxiom add = (AddAxiom) changes.get(idx + 1); if (!(remove.getAxiom() instanceof OWLDeclarationAxiom && add.getAxiom() instanceof OWLDeclarationAxiom)) { continue; } // Found the patter we are looking for, a remove and add of // declaration axioms OWLEntity olde = ((OWLDeclarationAxiom) remove.getAxiom()).getEntity(); OWLEntity newe = ((OWLDeclarationAxiom) add.getAxiom()).getEntity(); renamings.put(olde, newe); } else if (change instanceof RemoveAxiom && ((RemoveAxiom) change).getAxiom() instanceof OWLDeclarationAxiom) { // Found the pattern of a deletion OWLDeclarationAxiom declaration = (OWLDeclarationAxiom) ((RemoveAxiom) change).getAxiom(); OWLEntity removedEntity = declaration.getEntity(); removals.add(removedEntity); } } // Applying the renaming to the OBDA model OBDAModel obdamodel = getActiveOBDAModel(); for (OWLEntity olde : renamings.keySet()) { OWLEntity removedEntity = olde; OWLEntity newEntity = renamings.get(removedEntity); // This set of changes appears to be a "renaming" operation, // hence we will modify the OBDA model accordingly Predicate removedPredicate = getPredicate(removedEntity); Predicate newPredicate = getPredicate(newEntity); obdamodel.renamePredicate(removedPredicate, newPredicate); } // Applying the deletions to the obda model for (OWLEntity removede : removals) { Predicate removedPredicate = getPredicate(removede); obdamodel.deletePredicate(removedPredicate); } }
/* * (non-Javadoc) * * @see * org.semanticweb.owlapi.model.OWLClassExpressionVisitor#visit(org.semanticweb * .owlapi.model.OWLClass) */ @Override public void visit(OWLClass axiom) { sb.append(print(axiom.toString())); }
public boolean isRestrictedInstantiator(OWLClass c) { return c == null || c.isTopEntity() || m_ontologyOperator.getFlatteningTransformer().isIntermediary(c) || c.equals(m_referenceClass); }