public HGDBOntology importOntology(IRI documentIRI, HGDBImportConfig config) {
   try {
     OWLOntology o = loadOntologyFromOntologyDocument(documentIRI);
     HGDBOntologyFormat format = new HGDBOntologyFormat();
     setOntologyFormat(o, format);
     setOntologyDocumentIRI(o, documentIRI);
     saveOntology(o, format, documentIRI);
     return ontologyRepository.getOntologyByDocumentIRI(documentIRI);
   } catch (Exception ex) {
     throw new RuntimeException(ex);
   }
 }
 public HGDBOntology createOntologyInDatabase(IRI ontologyIRI)
     throws OWLOntologyCreationException {
   try {
     HGDBOntologyFormat format = new HGDBOntologyFormat();
     IRI hgdbDocumentIRI = HGDBOntologyFormat.convertToHGDBDocumentIRI(ontologyIRI);
     OWLOntology o = super.createOntology(ontologyIRI);
     setOntologyFormat(o, format);
     setOntologyDocumentIRI(o, hgdbDocumentIRI);
     saveOntology(o, format, hgdbDocumentIRI);
     HGDBOntology result = ontologyRepository.getOntologyByDocumentIRI(hgdbDocumentIRI);
     result.setOWLOntologyManager(this);
     this.ontologiesByID.put(o.getOntologyID(), result);
     return result;
   } catch (Exception ex) {
     throw new RuntimeException(ex);
   }
 }
예제 #3
0
  protected boolean hghandleSaveAs(OWLOntology ont, OWLOntologyFormat format) throws Exception {
    PHGDBOntologyManagerImpl man =
        (PHGDBOntologyManagerImpl) getModelManager().getOWLOntologyManager();
    OWLOntologyFormat oldFormat = man.getOntologyFormat(ont);
    IRI oldDocumentIRI = man.getOntologyDocumentIRI(ont);
    HGDBOntologyRepository repo = man.getOntologyRepository();
    if (oldFormat instanceof PrefixOWLOntologyFormat && format instanceof PrefixOWLOntologyFormat) {
      PrefixOWLOntologyFormat oldPrefixes = (PrefixOWLOntologyFormat) oldFormat;
      for (String name : oldPrefixes.getPrefixNames()) {
        ((PrefixOWLOntologyFormat) format).setPrefix(name, oldPrefixes.getPrefix(name));
      }
    }
    if (format instanceof HGDBOntologyFormat) {
      // Case A1) OntololgyHGDB -> Repository Same Name: Already in
      // repository
      // Case A2) OntololgyHGDB -> Repository: Copy Ontology in Repository
      // NOT CURRENTLY SUPPORTED DIALOLG
      if (ont instanceof HGDBOntology) {
        String message =
            "This ontology is database backed and does not need to be saved to the database again.\n"
                + "All changes to it are instantly persisted in the Hypergraph Ontology Repository.\n"
                + "A copy operation to a different name in the repository is currently not supported.";
        System.err.println(message);
        // logger.warn(message);
        JOptionPane.showMessageDialog(
            getWorkspace(),
            message,
            "Hypergraph Database Backed Ontology",
            JOptionPane.ERROR_MESSAGE);

        return false;
      } else {
        // IN MEMORY ONTOLOGY
        // Case B) OntololgyImpl -> Repository: Import
        String message =
            "This in-memory ontology will be imported into the Hypergraph Ontology Repository.\n"
                + "This process is estimated to take one minute per 35000 Axioms. \n"
                + ont.getOntologyID().toString()
                + " has "
                + ont.getAxiomCount()
                + " Axioms. \n"
                + "Please be patient. A Success Dialog will pop up when the process is finished.";
        System.err.println(message);
        // logger.info(message);
        JOptionPane.showMessageDialog(
            getWorkspace(), message, "Hypergraph Database Import", JOptionPane.INFORMATION_MESSAGE);
        System.out.println("IMPORTING INTO HYPERGRAPH " + ont.getOntologyID());
        // logger.info("IMPORTING INTO HYPERGRAPH " +
        // ont.getOntologyID());
        long startTime = System.currentTimeMillis();
        man.setOntologyFormat(ont, format);
        // TODO OPEN A DIALOG FOR SELECTING A documentIRI
        IRI documentIri;
        if (ont.getOntologyID().isAnonymous()) {
          int i = 0;
          do {
            documentIri = IRI.create("hgdb://" + "anonymous-" + i);
            i++;
          } while (repo.existsOntologyByDocumentIRI(documentIri));

        } else {
          // HGDBOntologyFormat.convertToHGDBDocumentIRI(ontologyIRI)
          // IRI defaultIri =
          // ont.getOntologyID().getDefaultDocumentIRI();
          // String defaultIriStr = defaultIri.toString();
          // int schemaLength = defaultIri.getScheme().length();
          // String hgdbIRIStr = "hgdb" +
          // defaultIriStr.toString().substring(schemaLength);
          documentIri =
              HGDBOntologyFormat.convertToHGDBDocumentIRI(
                  ont.getOntologyID().getDefaultDocumentIRI());
          //
          // Check if exists by ID or Document IRI
          //
          if (repo.existsOntology(ont.getOntologyID())) {
            JOptionPane.showMessageDialog(
                getWorkspace(),
                "An ontology with the same ID already exists in the hypergraph repository."
                    + "\r\n "
                    + ont.getOntologyID()
                    + "\r\n If you wish to replace, delete the old one now using: HypergraphDB/Delete",
                "Hypergraph Database Import - Failed",
                JOptionPane.ERROR_MESSAGE);
            return false;
          } else if (repo.existsOntologyByDocumentIRI(documentIri)) {
            JOptionPane.showMessageDialog(
                getWorkspace(),
                "An ontology with the same documentIRI already exists in the hypergraph repository."
                    + "\r\n "
                    + documentIri
                    + "\r\n If you wish to replace, delete the old one now using: HypergraphDB/Delete",
                "Hypergraph Database Import - Failed",
                JOptionPane.ERROR_MESSAGE);
            return false;
          } // else continue import
        }
        System.out.println("Saving with documentIRI: " + documentIri);
        // logger.info("Saving with documentIRI: " + documentIri);
        // + ont.getOntologyID().getOntologyIRI().getFragment());

        man.setOntologyDocumentIRI(ont, documentIri);
        getModelManager().save(ont);
        int durationSecs = (int) (System.currentTimeMillis() - startTime) / 1000;
        message =
            "Hypergraph Database Import Success.\n"
                + "Saving took "
                + durationSecs
                + " seconds for "
                + ont.getAxiomCount()
                + " Axioms. \n"
                + "You are still working with the in-memory ontology. \n "
                + "Do you wish to use the database backed ontology now?";
        int useHGOnto =
            JOptionPane.showConfirmDialog(
                getWorkspace(),
                message,
                "Hypergraph Database Import Success",
                JOptionPane.YES_NO_OPTION);
        addRecent(documentIri.toURI());
        if (useHGOnto == JOptionPane.YES_OPTION) {
          // load the ontology from hypergraph and close
          getModelManager().reload(ont);
        } else {
          man.setOntologyFormat(ont, oldFormat);
          man.setOntologyDocumentIRI(ont, oldDocumentIRI);
        }
        return true;
      }
    } else {
      // FILE BASED FORMAT
      File file = getSaveAsOWLFile(ont);
      if (file != null) {
        // Do the following only if not database backed.
        man.setOntologyFormat(ont, format);
        man.setOntologyDocumentIRI(ont, IRI.create(file));
        try {
          getModelManager().save(ont);
          addRecent(file.toURI());
        } finally {
          if (ont instanceof HGDBOntology) {
            man.setOntologyFormat(ont, oldFormat);
            man.setOntologyDocumentIRI(ont, oldDocumentIRI);
          }
        }
        return true;
      } else {
        System.err.println("No valid file specified for the save as operation - quitting");
        // logger.warn("No valid file specified for the save as operation - quitting");
        return false;
      }
    }
  }