public HGDBOntology createOntologyInDatabase(IRI ontologyIRI) throws OWLOntologyCreationException { try { HGDBOntologyFormat format = new HGDBOntologyFormat(); IRI hgdbDocumentIRI = HGDBOntologyFormat.convertToHGDBDocumentIRI(ontologyIRI); OWLOntology o = super.createOntology(ontologyIRI); setOntologyFormat(o, format); setOntologyDocumentIRI(o, hgdbDocumentIRI); saveOntology(o, format, hgdbDocumentIRI); HGDBOntology result = ontologyRepository.getOntologyByDocumentIRI(hgdbDocumentIRI); result.setOWLOntologyManager(this); this.ontologiesByID.put(o.getOntologyID(), result); return result; } catch (Exception ex) { throw new RuntimeException(ex); } }
public void storeFromTo(OWLOntology from, HGDBOntology to) { final Set<OWLAxiom> axioms = from.getAxioms(); int i = 0; for (OWLAxiom axiom : axioms) { to.applyChange(new AddAxiom(to, axiom)); i++; if (i % 5000 == 0) { System.out.println("storeFromTo: Axioms: " + i); } } System.out.println("storeFromTo: Axioms: " + i); // manager.addAxioms(newOnto, axioms); // Add Ontology Annotations for (OWLAnnotation a : from.getAnnotations()) { to.applyChange(new AddOntologyAnnotation(to, a)); } // Add Import Declarations for (OWLImportsDeclaration im : from.getImportsDeclarations()) { to.applyChange(new AddImport(to, im)); } }
public void testRenderAndParse() { List<File> renderedFiles = new ArrayList<File>(); String dblocation = System.getProperty("java.io.tmpdir") + File.separator + "hgdbtest"; HGDBOntologyManager manager = HGOntologyManagerFactory.getOntologyManager(dblocation); HGDBOntologyRepository repo = new HGDBOntologyRepository(dblocation); VersionManager versionManager = manager.getVersionManager(); HyperGraph graph = manager.getOntologyRepository().getHyperGraph(); // // IMPORT AND RENDER // try { // repo.dropHypergraph(); repo.deleteAllOntologies(); // System.out.println("Running GC"); // CANNOT RUN GC nullHANDLE problem !!! repo.runGarbageCollector(); URL ontologyUrl = this.getClass().getResource("/sampleOntology.owl"); IRI targetIRI = ImportOntologies.importOntology(ontologyUrl, manager); // IRI targetIRI = ImportOntologies.importOntology(f2, manager); HGDBOntology o = (HGDBOntology) manager.loadOntologyFromOntologyDocument(targetIRI); VersionedOntology vo = versionManager.versioned(o.getAtomHandle()); // MANIPULATE REMOVE CHANGED Object[] axioms = o.getAxioms().toArray(); // remove all axioms 10. for (int i = 0; i < axioms.length / 10; i++) { int j = i; for (; j < i + axioms.length / 100; j++) { if (j < axioms.length) { manager.applyChange(new RemoveAxiom(o, (OWLAxiom) axioms[j])); } } i = j; vo.commit("SameUser", " commit no " + i); } // RENDER VERSIONED ONTOLOGY, includes data // TODO... VOWLXMLRenderConfiguration c = new VOWLXMLRenderConfiguration(); VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager); File fx = new File(TESTFILE.getAbsolutePath() + " Revision-" + ".xml"); renderedFiles.add(fx); Writer fwriter = new OutputStreamWriter(new FileOutputStream(fx), Charset.forName("UTF-8")); r.render(vo, null, fwriter, c); // for (int i = 0; i < vo.getArity(); i++) // { // VOWLXMLRenderConfiguration c = new VOWLXMLRenderConfiguration(); // //c.setLastRevisionIndex(i); // VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager); // File fx = new File(TESTFILE.getAbsolutePath() + " Revision-" + i + ".xml"); // // File fx = new // // File("C:\\_CiRM\\testontos\\CountyVersioned-Rev-"+ i + // // ".vowlxml"); // renderedFiles.add(fx); // // File fx = new File("C:\\_CiRM\\testontos\\1 csr-Rev-"+ i + // // ".vowlxml"); // Writer fwriter = new OutputStreamWriter(new FileOutputStream(fx), // Charset.forName("UTF-8")); // // Full export // r.render(vo, fwriter, c); // } System.out.println("DELETE ALL ONTOLOGIES"); repo.deleteAllOntologies(); GarbageCollector gc = new GarbageCollector(repo); gc.runGarbageCollection(GarbageCollector.MODE_DELETED_ONTOLOGIES); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (OWLRendererException e) { // TODO Auto-generated catch block e.printStackTrace(); } // // PARSE // File f = new File(TESTFILE.getAbsolutePath() + " Revision-" + 10 + ".xml"); System.out.println("PARSING: " + f + " length: " + (f.length() / 1024) + " kB"); OWLOntologyDocumentSource source = new FileDocumentSource(f); VOWLXMLParser parser = new VOWLXMLParser(); OWLOntologyEx onto = new OWLTempOntologyImpl(manager, new OWLOntologyID()); // must have onto for manager in super class VOWLXMLDocument versionedOntologyRoot = new VOWLXMLDocument(onto); // // Create VersionedOntology Revision 10 try { parser.parse(graph, source, versionedOntologyRoot, new OWLOntologyLoaderConfiguration()); System.out.println("PARSING FINISHED."); } catch (OWLOntologyChangeException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (UnloadableImportException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (OWLParserException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } if (versionedOntologyRoot.getRenderConfig().heads().isEmpty() && versionedOntologyRoot .getRenderConfig() .roots() .contains(versionedOntologyRoot.getRenderConfig().firstRevision())) { OWLOntologyID ontologyID = versionedOntologyRoot.getRevisionData().getOntologyID(); IRI documentIRI = IRI.create("hgdb://" + ontologyID.getDefaultDocumentIRI().toString().substring(7)); HGPersistentHandle ontologyUUID = repo.getHyperGraph().getHandleFactory().makeHandle(versionedOntologyRoot.getOntologyID()); try { System.out.println("Storing ontology data for : " + ontologyUUID); HGDBOntology o = manager .getOntologyRepository() .createOWLOntology(ontologyID, documentIRI, ontologyUUID); storeFromTo(versionedOntologyRoot.getRevisionData(), o); } catch (HGDBOntologyAlreadyExistsByDocumentIRIException e) { e.printStackTrace(); } catch (HGDBOntologyAlreadyExistsByOntologyIDException e) { e.printStackTrace(); } catch (HGDBOntologyAlreadyExistsByOntologyUUIDException e) { e.printStackTrace(); } // Add version control with full matching history. System.out.println("Adding version control history to : " + ontologyUUID); // VersionedOntology voParsed = new VersionedOntology(versionedOntologyRoot.getRevisions(), // versionedOntologyRoot.getChangesets(), graph); // VHGDBOntologyRepository vrepo = (VHGDBOntologyRepository) // manager.getOntologyRepository(); // System.out.println("Versioned Repository Contents: "); // for (VersionedOntology vox : vrepo.getVersionControlledOntologies()) // { // System.out.println("Versioned Ontology: " + vox.getWorkingSetData()); // System.out.println("Versioned Ontology Revs: " + vox.getNrOfRevisions()); // } // // // // Rendering FULL Versioned Ontology // // // System.out.println("Rendering full versioned ontology after parse and store: " + // ontologyUUID); // VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager); // File fx = new File(TESTFILE.getAbsolutePath() + "FULL-afterParse.xml"); // Writer fwriter; // try // { // fwriter = new OutputStreamWriter(new FileOutputStream(fx), Charset.forName("UTF-8")); // r.render(voParsed, fwriter); // } // catch (IOException e) // { // e.printStackTrace(); // } // catch (OWLRendererException e) // { // e.printStackTrace(); // } } else { System.out.println("ERROR: EXPECTING COMPLETE VERSIONED ONTOLOGY"); } // } }