public void testRenderAndParse() {
    List<File> renderedFiles = new ArrayList<File>();
    String dblocation = System.getProperty("java.io.tmpdir") + File.separator + "hgdbtest";
    HGDBOntologyManager manager = HGOntologyManagerFactory.getOntologyManager(dblocation);
    HGDBOntologyRepository repo = new HGDBOntologyRepository(dblocation);
    VersionManager versionManager = manager.getVersionManager();
    HyperGraph graph = manager.getOntologyRepository().getHyperGraph();

    //
    // IMPORT AND RENDER
    //
    try {
      // repo.dropHypergraph();
      repo.deleteAllOntologies();
      // System.out.println("Running GC");
      // CANNOT RUN GC nullHANDLE problem !!! repo.runGarbageCollector();
      URL ontologyUrl = this.getClass().getResource("/sampleOntology.owl");
      IRI targetIRI = ImportOntologies.importOntology(ontologyUrl, manager);
      // IRI targetIRI = ImportOntologies.importOntology(f2, manager);
      HGDBOntology o = (HGDBOntology) manager.loadOntologyFromOntologyDocument(targetIRI);
      VersionedOntology vo = versionManager.versioned(o.getAtomHandle());
      // MANIPULATE REMOVE CHANGED
      Object[] axioms = o.getAxioms().toArray();
      // remove all axioms 10.
      for (int i = 0; i < axioms.length / 10; i++) {
        int j = i;
        for (; j < i + axioms.length / 100; j++) {
          if (j < axioms.length) {
            manager.applyChange(new RemoveAxiom(o, (OWLAxiom) axioms[j]));
          }
        }
        i = j;
        vo.commit("SameUser", " commit no " + i);
      }
      // RENDER VERSIONED ONTOLOGY, includes data

      // TODO...
      VOWLXMLRenderConfiguration c = new VOWLXMLRenderConfiguration();
      VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager);
      File fx = new File(TESTFILE.getAbsolutePath() + " Revision-" + ".xml");
      renderedFiles.add(fx);
      Writer fwriter = new OutputStreamWriter(new FileOutputStream(fx), Charset.forName("UTF-8"));
      r.render(vo, null, fwriter, c);
      //			for (int i = 0; i < vo.getArity(); i++)
      //			{
      //				VOWLXMLRenderConfiguration c = new VOWLXMLRenderConfiguration();
      //				//c.setLastRevisionIndex(i);
      //				VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager);
      //				File fx = new File(TESTFILE.getAbsolutePath() + " Revision-" + i + ".xml");
      //				// File fx = new
      //				// File("C:\\_CiRM\\testontos\\CountyVersioned-Rev-"+ i +
      //				// ".vowlxml");
      //				renderedFiles.add(fx);
      //				// File fx = new File("C:\\_CiRM\\testontos\\1 csr-Rev-"+ i +
      //				// ".vowlxml");
      //				Writer fwriter = new OutputStreamWriter(new FileOutputStream(fx),
      // Charset.forName("UTF-8"));
      //				// Full export
      //				r.render(vo, fwriter, c);
      //			}
      System.out.println("DELETE ALL ONTOLOGIES");
      repo.deleteAllOntologies();
      GarbageCollector gc = new GarbageCollector(repo);
      gc.runGarbageCollection(GarbageCollector.MODE_DELETED_ONTOLOGIES);
    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (OWLRendererException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }

    //
    // PARSE
    //
    File f = new File(TESTFILE.getAbsolutePath() + " Revision-" + 10 + ".xml");
    System.out.println("PARSING: " + f + " length: " + (f.length() / 1024) + " kB");
    OWLOntologyDocumentSource source = new FileDocumentSource(f);
    VOWLXMLParser parser = new VOWLXMLParser();
    OWLOntologyEx onto = new OWLTempOntologyImpl(manager, new OWLOntologyID());
    // must have onto for manager in super class
    VOWLXMLDocument versionedOntologyRoot = new VOWLXMLDocument(onto);
    //
    // Create VersionedOntology Revision 10
    try {
      parser.parse(graph, source, versionedOntologyRoot, new OWLOntologyLoaderConfiguration());
      System.out.println("PARSING FINISHED.");
    } catch (OWLOntologyChangeException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (UnloadableImportException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (OWLParserException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    if (versionedOntologyRoot.getRenderConfig().heads().isEmpty()
        && versionedOntologyRoot
            .getRenderConfig()
            .roots()
            .contains(versionedOntologyRoot.getRenderConfig().firstRevision())) {
      OWLOntologyID ontologyID = versionedOntologyRoot.getRevisionData().getOntologyID();
      IRI documentIRI =
          IRI.create("hgdb://" + ontologyID.getDefaultDocumentIRI().toString().substring(7));
      HGPersistentHandle ontologyUUID =
          repo.getHyperGraph().getHandleFactory().makeHandle(versionedOntologyRoot.getOntologyID());
      try {
        System.out.println("Storing ontology data for : " + ontologyUUID);
        HGDBOntology o =
            manager
                .getOntologyRepository()
                .createOWLOntology(ontologyID, documentIRI, ontologyUUID);
        storeFromTo(versionedOntologyRoot.getRevisionData(), o);
      } catch (HGDBOntologyAlreadyExistsByDocumentIRIException e) {
        e.printStackTrace();
      } catch (HGDBOntologyAlreadyExistsByOntologyIDException e) {
        e.printStackTrace();
      } catch (HGDBOntologyAlreadyExistsByOntologyUUIDException e) {
        e.printStackTrace();
      }
      // Add version control with full matching history.
      System.out.println("Adding version control history to : " + ontologyUUID);
      //			VersionedOntology voParsed = new VersionedOntology(versionedOntologyRoot.getRevisions(),
      //					versionedOntologyRoot.getChangesets(), graph);
      //			VHGDBOntologyRepository vrepo = (VHGDBOntologyRepository)
      // manager.getOntologyRepository();
      //			System.out.println("Versioned Repository Contents: ");
      //			for (VersionedOntology vox : vrepo.getVersionControlledOntologies())
      //			{
      //				System.out.println("Versioned Ontology: " + vox.getWorkingSetData());
      //				System.out.println("Versioned Ontology Revs: " + vox.getNrOfRevisions());
      //			}
      //			//
      //			// Rendering FULL Versioned Ontology
      //			//
      //			System.out.println("Rendering full versioned ontology after parse and store: " +
      // ontologyUUID);
      //			VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager);
      //			File fx = new File(TESTFILE.getAbsolutePath() + "FULL-afterParse.xml");
      //			Writer fwriter;
      //			try
      //			{
      //				fwriter = new OutputStreamWriter(new FileOutputStream(fx), Charset.forName("UTF-8"));
      //				r.render(voParsed, fwriter);
      //			}
      //			catch (IOException e)
      //			{
      //				e.printStackTrace();
      //			}
      //			catch (OWLRendererException e)
      //			{
      //				e.printStackTrace();
      //			}
    } else {
      System.out.println("ERROR: EXPECTING COMPLETE VERSIONED ONTOLOGY");
    }
    // }
  }
Esempio n. 2
0
  protected boolean hghandleSaveAs(OWLOntology ont, OWLOntologyFormat format) throws Exception {
    PHGDBOntologyManagerImpl man =
        (PHGDBOntologyManagerImpl) getModelManager().getOWLOntologyManager();
    OWLOntologyFormat oldFormat = man.getOntologyFormat(ont);
    IRI oldDocumentIRI = man.getOntologyDocumentIRI(ont);
    HGDBOntologyRepository repo = man.getOntologyRepository();
    if (oldFormat instanceof PrefixOWLOntologyFormat && format instanceof PrefixOWLOntologyFormat) {
      PrefixOWLOntologyFormat oldPrefixes = (PrefixOWLOntologyFormat) oldFormat;
      for (String name : oldPrefixes.getPrefixNames()) {
        ((PrefixOWLOntologyFormat) format).setPrefix(name, oldPrefixes.getPrefix(name));
      }
    }
    if (format instanceof HGDBOntologyFormat) {
      // Case A1) OntololgyHGDB -> Repository Same Name: Already in
      // repository
      // Case A2) OntololgyHGDB -> Repository: Copy Ontology in Repository
      // NOT CURRENTLY SUPPORTED DIALOLG
      if (ont instanceof HGDBOntology) {
        String message =
            "This ontology is database backed and does not need to be saved to the database again.\n"
                + "All changes to it are instantly persisted in the Hypergraph Ontology Repository.\n"
                + "A copy operation to a different name in the repository is currently not supported.";
        System.err.println(message);
        // logger.warn(message);
        JOptionPane.showMessageDialog(
            getWorkspace(),
            message,
            "Hypergraph Database Backed Ontology",
            JOptionPane.ERROR_MESSAGE);

        return false;
      } else {
        // IN MEMORY ONTOLOGY
        // Case B) OntololgyImpl -> Repository: Import
        String message =
            "This in-memory ontology will be imported into the Hypergraph Ontology Repository.\n"
                + "This process is estimated to take one minute per 35000 Axioms. \n"
                + ont.getOntologyID().toString()
                + " has "
                + ont.getAxiomCount()
                + " Axioms. \n"
                + "Please be patient. A Success Dialog will pop up when the process is finished.";
        System.err.println(message);
        // logger.info(message);
        JOptionPane.showMessageDialog(
            getWorkspace(), message, "Hypergraph Database Import", JOptionPane.INFORMATION_MESSAGE);
        System.out.println("IMPORTING INTO HYPERGRAPH " + ont.getOntologyID());
        // logger.info("IMPORTING INTO HYPERGRAPH " +
        // ont.getOntologyID());
        long startTime = System.currentTimeMillis();
        man.setOntologyFormat(ont, format);
        // TODO OPEN A DIALOG FOR SELECTING A documentIRI
        IRI documentIri;
        if (ont.getOntologyID().isAnonymous()) {
          int i = 0;
          do {
            documentIri = IRI.create("hgdb://" + "anonymous-" + i);
            i++;
          } while (repo.existsOntologyByDocumentIRI(documentIri));

        } else {
          // HGDBOntologyFormat.convertToHGDBDocumentIRI(ontologyIRI)
          // IRI defaultIri =
          // ont.getOntologyID().getDefaultDocumentIRI();
          // String defaultIriStr = defaultIri.toString();
          // int schemaLength = defaultIri.getScheme().length();
          // String hgdbIRIStr = "hgdb" +
          // defaultIriStr.toString().substring(schemaLength);
          documentIri =
              HGDBOntologyFormat.convertToHGDBDocumentIRI(
                  ont.getOntologyID().getDefaultDocumentIRI());
          //
          // Check if exists by ID or Document IRI
          //
          if (repo.existsOntology(ont.getOntologyID())) {
            JOptionPane.showMessageDialog(
                getWorkspace(),
                "An ontology with the same ID already exists in the hypergraph repository."
                    + "\r\n "
                    + ont.getOntologyID()
                    + "\r\n If you wish to replace, delete the old one now using: HypergraphDB/Delete",
                "Hypergraph Database Import - Failed",
                JOptionPane.ERROR_MESSAGE);
            return false;
          } else if (repo.existsOntologyByDocumentIRI(documentIri)) {
            JOptionPane.showMessageDialog(
                getWorkspace(),
                "An ontology with the same documentIRI already exists in the hypergraph repository."
                    + "\r\n "
                    + documentIri
                    + "\r\n If you wish to replace, delete the old one now using: HypergraphDB/Delete",
                "Hypergraph Database Import - Failed",
                JOptionPane.ERROR_MESSAGE);
            return false;
          } // else continue import
        }
        System.out.println("Saving with documentIRI: " + documentIri);
        // logger.info("Saving with documentIRI: " + documentIri);
        // + ont.getOntologyID().getOntologyIRI().getFragment());

        man.setOntologyDocumentIRI(ont, documentIri);
        getModelManager().save(ont);
        int durationSecs = (int) (System.currentTimeMillis() - startTime) / 1000;
        message =
            "Hypergraph Database Import Success.\n"
                + "Saving took "
                + durationSecs
                + " seconds for "
                + ont.getAxiomCount()
                + " Axioms. \n"
                + "You are still working with the in-memory ontology. \n "
                + "Do you wish to use the database backed ontology now?";
        int useHGOnto =
            JOptionPane.showConfirmDialog(
                getWorkspace(),
                message,
                "Hypergraph Database Import Success",
                JOptionPane.YES_NO_OPTION);
        addRecent(documentIri.toURI());
        if (useHGOnto == JOptionPane.YES_OPTION) {
          // load the ontology from hypergraph and close
          getModelManager().reload(ont);
        } else {
          man.setOntologyFormat(ont, oldFormat);
          man.setOntologyDocumentIRI(ont, oldDocumentIRI);
        }
        return true;
      }
    } else {
      // FILE BASED FORMAT
      File file = getSaveAsOWLFile(ont);
      if (file != null) {
        // Do the following only if not database backed.
        man.setOntologyFormat(ont, format);
        man.setOntologyDocumentIRI(ont, IRI.create(file));
        try {
          getModelManager().save(ont);
          addRecent(file.toURI());
        } finally {
          if (ont instanceof HGDBOntology) {
            man.setOntologyFormat(ont, oldFormat);
            man.setOntologyDocumentIRI(ont, oldDocumentIRI);
          }
        }
        return true;
      } else {
        System.err.println("No valid file specified for the save as operation - quitting");
        // logger.warn("No valid file specified for the save as operation - quitting");
        return false;
      }
    }
  }