public OWLOntology getMergedOntology() { final IRI mergedOntologyIRI = IRI.create(queryOntology.getOntologyID().getDefaultDocumentIRI() + "-merged"); final OWLOntologyManager mm = controller.getOWLOntologyManager(); if (mm.contains(mergedOntologyIRI)) { return mm.getOntology(mergedOntologyIRI); } else { try { final OWLOntology mergedOntology = mm.createOntology(mergedOntologyIRI); mm.setOntologyFormat(mergedOntology, new RDFXMLOntologyFormat()); final String mergedOntologyFileName = mergedOntologyIRI .toURI() .toString() .substring(mergedOntologyIRI.toURI().toString().lastIndexOf("/") + 1) + ".owl"; mm.setOntologyDocumentIRI( mergedOntology, IRI.create( controller.getRuleSpec().getOutputDir().toURI() + "/" + mergedOntologyFileName)); mm.applyChange( new AddImport( mergedOntology, mm.getOWLDataFactory() .getOWLImportsDeclaration( queryOntology.getOntologyID().getDefaultDocumentIRI()))); return mergedOntology; } catch (OWLOntologyCreationException e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } } }
/** * Gets the IconBox for the specified image IRI. * * @param iri The IRI pointing to the image. * @return The icon box containing the image. * @throws MalformedURLException */ private IconBox getImageBox(IRI iri) throws MalformedURLException { ImageIcon imageIcon = new ImageIcon(iri.toURI().toURL()); imageIcon.getImageLoadStatus(); IconBox iconBox = new IconBox(imageIcon, new HTTPLink(iri.toURI())); iconBox.setMaxHeight(50); return iconBox; }
/** * Renders an IRI as a full IRI rather than as an IRI that represents an entity in the signature * of the imports closure of the active ontology. * * @param page The page that the IRI should be rendered into. * @param iri The IRI to be rendered. * @return A list of paragraphs that represent the rendering of the annotation value. */ private List<Paragraph> renderExternalIRI(Page page, IRI iri) { Paragraph paragraph; if (isLinkableAddress(iri)) { paragraph = page.addParagraph(iri.toString(), new HTTPLink(iri.toURI())); } else { paragraph = page.addParagraph(iri.toString()); } return Arrays.asList(paragraph); }
/** * Renders an IRI as a full IRI rather than as an IRI that represents an entity in the signature * of the imports closure of the active ontology. * * @param page The page that the IRI should be rendered into. * @param iri The IRI to be rendered. * @return A list of paragraphs that represent the rendering of the annotation value. */ private List<Paragraph> renderExternalIRI(Page page, IRI iri) { List<Paragraph> paragraphs = new ArrayList<>(); String iriString = iri.toString(); if (isLinkableAddress(iri)) { if (isImageAddress(iri) && isDisplayThumbnails()) { try { IconBox iconBox = getImageBox(iri); page.add(iconBox); } catch (MalformedURLException e) { paragraphs.add(page.addParagraph(iriString, new HTTPLink(iri.toURI()))); } } else { paragraphs.add(page.addParagraph(iriString, new HTTPLink(iri.toURI()))); } } else { paragraphs.add(page.addParagraph(iriString)); } return paragraphs; }
/** * @param namespace The OntoNet namespace that will prefix the space ID in Web references. This * implementation only allows non-null and non-empty IRIs, with no query or fragment. Hash * URIs are not allowed, slash URIs are preferred. If neither, a slash will be concatenated * and a warning will be logged. */ @Override public void setDefaultNamespace(IRI namespace) { if (namespace == null) throw new IllegalArgumentException("Stanbol ontology namespace cannot be null."); if (namespace.toURI().getQuery() != null) throw new IllegalArgumentException( "URI Query is not allowed in Stanbol ontology namespaces."); if (namespace.toURI().getFragment() != null) throw new IllegalArgumentException( "URI Fragment is not allowed in Stanbol ontology namespaces."); if (namespace.toString().endsWith("#")) throw new IllegalArgumentException( "Stanbol ontology namespaces must not end with a hash ('#') character."); if (!namespace.toString().endsWith("/")) { log.warn( "Namespace {} does not end with a slash ('/') character. It be added automatically.", namespace); namespace = IRI.create(namespace + "/"); } this.namespace = namespace; }
public OBDAModel getActiveOBDAModel() { OWLOntology ontology = owlEditorKit.getOWLModelManager().getActiveOntology(); if (ontology != null) { OWLOntologyID ontologyID = ontology.getOntologyID(); IRI ontologyIRI = ontologyID.getOntologyIRI(); URI uri = null; if (ontologyIRI != null) { uri = ontologyIRI.toURI(); } else { uri = URI.create(ontologyID.toString()); } return obdamodels.get(uri); } return null; }
/** * Gets the URI of this datatype * * @return The URI that identifies the datatype */ public URI getURI() { return iri.toURI(); }
@Override public void ontologiesChanged(List<? extends OWLOntologyChange> changes) throws OWLException { Map<OWLEntity, OWLEntity> renamings = new HashMap<OWLEntity, OWLEntity>(); Set<OWLEntity> removals = new HashSet<OWLEntity>(); for (int idx = 0; idx < changes.size(); idx++) { OWLOntologyChange change = changes.get(idx); if (change instanceof SetOntologyID) { IRI newiri = ((SetOntologyID) change).getNewOntologyID().getOntologyIRI(); if (newiri == null) continue; IRI oldiri = ((SetOntologyID) change).getOriginalOntologyID().getOntologyIRI(); log.debug("Ontology ID changed"); log.debug("Old ID: {}", oldiri); log.debug("New ID: {}", newiri); OBDAModel model = obdamodels.get(oldiri.toURI()); if (model == null) { setupNewOBDAModel(); model = getActiveOBDAModel(); } PrefixManager prefixManager = model.getPrefixManager(); prefixManager.addPrefix(PrefixManager.DEFAULT_PREFIX, newiri.toURI().toString()); obdamodels.remove(oldiri.toURI()); obdamodels.put(newiri.toURI(), model); continue; } else if (change instanceof AddAxiom) { OWLAxiom axiom = change.getAxiom(); if (axiom instanceof OWLDeclarationAxiom) { OWLEntity entity = ((OWLDeclarationAxiom) axiom).getEntity(); OBDAModel activeOBDAModel = getActiveOBDAModel(); if (entity instanceof OWLClass) { OWLClass oc = (OWLClass) entity; OClass c = ofac.createClass(oc.getIRI().toString()); activeOBDAModel.declareClass(c); } else if (entity instanceof OWLObjectProperty) { OWLObjectProperty or = (OWLObjectProperty) entity; ObjectPropertyExpression r = ofac.createObjectProperty(or.getIRI().toString()); activeOBDAModel.declareObjectProperty(r); } else if (entity instanceof OWLDataProperty) { OWLDataProperty op = (OWLDataProperty) entity; DataPropertyExpression p = ofac.createDataProperty(op.getIRI().toString()); activeOBDAModel.declareDataProperty(p); } } } else if (change instanceof RemoveAxiom) { OWLAxiom axiom = change.getAxiom(); if (axiom instanceof OWLDeclarationAxiom) { OWLEntity entity = ((OWLDeclarationAxiom) axiom).getEntity(); OBDAModel activeOBDAModel = getActiveOBDAModel(); if (entity instanceof OWLClass) { OWLClass oc = (OWLClass) entity; OClass c = ofac.createClass(oc.getIRI().toString()); activeOBDAModel.unDeclareClass(c); } else if (entity instanceof OWLObjectProperty) { OWLObjectProperty or = (OWLObjectProperty) entity; ObjectPropertyExpression r = ofac.createObjectProperty(or.getIRI().toString()); activeOBDAModel.unDeclareObjectProperty(r); } else if (entity instanceof OWLDataProperty) { OWLDataProperty op = (OWLDataProperty) entity; DataPropertyExpression p = ofac.createDataProperty(op.getIRI().toString()); activeOBDAModel.unDeclareDataProperty(p); } } } if (idx + 1 >= changes.size()) { continue; } if (change instanceof RemoveAxiom && changes.get(idx + 1) instanceof AddAxiom) { // Found the pattern of a renaming refactoring RemoveAxiom remove = (RemoveAxiom) change; AddAxiom add = (AddAxiom) changes.get(idx + 1); if (!(remove.getAxiom() instanceof OWLDeclarationAxiom && add.getAxiom() instanceof OWLDeclarationAxiom)) { continue; } // Found the patter we are looking for, a remove and add of // declaration axioms OWLEntity olde = ((OWLDeclarationAxiom) remove.getAxiom()).getEntity(); OWLEntity newe = ((OWLDeclarationAxiom) add.getAxiom()).getEntity(); renamings.put(olde, newe); } else if (change instanceof RemoveAxiom && ((RemoveAxiom) change).getAxiom() instanceof OWLDeclarationAxiom) { // Found the pattern of a deletion OWLDeclarationAxiom declaration = (OWLDeclarationAxiom) ((RemoveAxiom) change).getAxiom(); OWLEntity removedEntity = declaration.getEntity(); removals.add(removedEntity); } } // Applying the renaming to the OBDA model OBDAModel obdamodel = getActiveOBDAModel(); for (OWLEntity olde : renamings.keySet()) { OWLEntity removedEntity = olde; OWLEntity newEntity = renamings.get(removedEntity); // This set of changes appears to be a "renaming" operation, // hence we will modify the OBDA model accordingly Predicate removedPredicate = getPredicate(removedEntity); Predicate newPredicate = getPredicate(newEntity); obdamodel.renamePredicate(removedPredicate, newPredicate); } // Applying the deletions to the obda model for (OWLEntity removede : removals) { Predicate removedPredicate = getPredicate(removede); obdamodel.deletePredicate(removedPredicate); } }
protected boolean hghandleSaveAs(OWLOntology ont, OWLOntologyFormat format) throws Exception { PHGDBOntologyManagerImpl man = (PHGDBOntologyManagerImpl) getModelManager().getOWLOntologyManager(); OWLOntologyFormat oldFormat = man.getOntologyFormat(ont); IRI oldDocumentIRI = man.getOntologyDocumentIRI(ont); HGDBOntologyRepository repo = man.getOntologyRepository(); if (oldFormat instanceof PrefixOWLOntologyFormat && format instanceof PrefixOWLOntologyFormat) { PrefixOWLOntologyFormat oldPrefixes = (PrefixOWLOntologyFormat) oldFormat; for (String name : oldPrefixes.getPrefixNames()) { ((PrefixOWLOntologyFormat) format).setPrefix(name, oldPrefixes.getPrefix(name)); } } if (format instanceof HGDBOntologyFormat) { // Case A1) OntololgyHGDB -> Repository Same Name: Already in // repository // Case A2) OntololgyHGDB -> Repository: Copy Ontology in Repository // NOT CURRENTLY SUPPORTED DIALOLG if (ont instanceof HGDBOntology) { String message = "This ontology is database backed and does not need to be saved to the database again.\n" + "All changes to it are instantly persisted in the Hypergraph Ontology Repository.\n" + "A copy operation to a different name in the repository is currently not supported."; System.err.println(message); // logger.warn(message); JOptionPane.showMessageDialog( getWorkspace(), message, "Hypergraph Database Backed Ontology", JOptionPane.ERROR_MESSAGE); return false; } else { // IN MEMORY ONTOLOGY // Case B) OntololgyImpl -> Repository: Import String message = "This in-memory ontology will be imported into the Hypergraph Ontology Repository.\n" + "This process is estimated to take one minute per 35000 Axioms. \n" + ont.getOntologyID().toString() + " has " + ont.getAxiomCount() + " Axioms. \n" + "Please be patient. A Success Dialog will pop up when the process is finished."; System.err.println(message); // logger.info(message); JOptionPane.showMessageDialog( getWorkspace(), message, "Hypergraph Database Import", JOptionPane.INFORMATION_MESSAGE); System.out.println("IMPORTING INTO HYPERGRAPH " + ont.getOntologyID()); // logger.info("IMPORTING INTO HYPERGRAPH " + // ont.getOntologyID()); long startTime = System.currentTimeMillis(); man.setOntologyFormat(ont, format); // TODO OPEN A DIALOG FOR SELECTING A documentIRI IRI documentIri; if (ont.getOntologyID().isAnonymous()) { int i = 0; do { documentIri = IRI.create("hgdb://" + "anonymous-" + i); i++; } while (repo.existsOntologyByDocumentIRI(documentIri)); } else { // HGDBOntologyFormat.convertToHGDBDocumentIRI(ontologyIRI) // IRI defaultIri = // ont.getOntologyID().getDefaultDocumentIRI(); // String defaultIriStr = defaultIri.toString(); // int schemaLength = defaultIri.getScheme().length(); // String hgdbIRIStr = "hgdb" + // defaultIriStr.toString().substring(schemaLength); documentIri = HGDBOntologyFormat.convertToHGDBDocumentIRI( ont.getOntologyID().getDefaultDocumentIRI()); // // Check if exists by ID or Document IRI // if (repo.existsOntology(ont.getOntologyID())) { JOptionPane.showMessageDialog( getWorkspace(), "An ontology with the same ID already exists in the hypergraph repository." + "\r\n " + ont.getOntologyID() + "\r\n If you wish to replace, delete the old one now using: HypergraphDB/Delete", "Hypergraph Database Import - Failed", JOptionPane.ERROR_MESSAGE); return false; } else if (repo.existsOntologyByDocumentIRI(documentIri)) { JOptionPane.showMessageDialog( getWorkspace(), "An ontology with the same documentIRI already exists in the hypergraph repository." + "\r\n " + documentIri + "\r\n If you wish to replace, delete the old one now using: HypergraphDB/Delete", "Hypergraph Database Import - Failed", JOptionPane.ERROR_MESSAGE); return false; } // else continue import } System.out.println("Saving with documentIRI: " + documentIri); // logger.info("Saving with documentIRI: " + documentIri); // + ont.getOntologyID().getOntologyIRI().getFragment()); man.setOntologyDocumentIRI(ont, documentIri); getModelManager().save(ont); int durationSecs = (int) (System.currentTimeMillis() - startTime) / 1000; message = "Hypergraph Database Import Success.\n" + "Saving took " + durationSecs + " seconds for " + ont.getAxiomCount() + " Axioms. \n" + "You are still working with the in-memory ontology. \n " + "Do you wish to use the database backed ontology now?"; int useHGOnto = JOptionPane.showConfirmDialog( getWorkspace(), message, "Hypergraph Database Import Success", JOptionPane.YES_NO_OPTION); addRecent(documentIri.toURI()); if (useHGOnto == JOptionPane.YES_OPTION) { // load the ontology from hypergraph and close getModelManager().reload(ont); } else { man.setOntologyFormat(ont, oldFormat); man.setOntologyDocumentIRI(ont, oldDocumentIRI); } return true; } } else { // FILE BASED FORMAT File file = getSaveAsOWLFile(ont); if (file != null) { // Do the following only if not database backed. man.setOntologyFormat(ont, format); man.setOntologyDocumentIRI(ont, IRI.create(file)); try { getModelManager().save(ont); addRecent(file.toURI()); } finally { if (ont instanceof HGDBOntology) { man.setOntologyFormat(ont, oldFormat); man.setOntologyDocumentIRI(ont, oldDocumentIRI); } } return true; } else { System.err.println("No valid file specified for the save as operation - quitting"); // logger.warn("No valid file specified for the save as operation - quitting"); return false; } } }