Example #1
0
  @Override
  public void evaluate(QueryRecord queryRecord) {
    OWLOntology knowledgebase = relevantPart(queryRecord);

    if (knowledgebase == null) {
      Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty());
      return;
    }

    int aboxcount = knowledgebase.getABoxAxioms(true).size();
    Utility.logDebug(
        "ABox axioms: "
            + aboxcount
            + " TBox axioms: "
            + (knowledgebase.getAxiomCount() - aboxcount));
    //		queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl");

    Timer t = new Timer();
    Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT());
    //		int validNumber =
    summarisedChecker.check(queryRecord.getGapAnswers());
    summarisedChecker.dispose();
    Utility.logDebug("Total time for full reasoner: " + t.duration());
    //		if (validNumber == 0) {
    queryRecord.markAsProcessed();
    Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty());
    //		}
  }
Example #2
0
 public void affiche() {
   OWLOntology ontology = getOntology();
   PelletReasoner reasoner = (PelletReasoner) this.reasoner;
   System.out.println("Ontologie : " + getNomFichier());
   System.out.println("Nombre d'axiomes : " + ontology.getAxiomCount());
   System.out.println("Nombre de classes : " + reasoner.getKB().getClasses().size());
   System.out.println("Nombre de propriétés : " + reasoner.getKB().getProperties().size());
   System.out.println("Nombre d'instances : " + reasoner.getKB().getIndividuals().size());
   System.out.println("Expressivité : " + reasoner.getKB().getExpressivity());
   System.out.println("Consistante : " + (reasoner.isConsistent() ? "OUI" : "NON"));
   System.out.println(
       "Nombre de classes insatisfiables : " + reasoner.getUnsatisfiableClasses().getSize());
 }
 @Test
 public void testNamedOntologyToString() throws Exception {
   OWLOntologyManager man = Factory.getManager();
   IRI ontIRI = IRI("http://owlapi.sourceforge.net/ont");
   OWLOntology ont = man.createOntology(ontIRI);
   String s = ont.toString();
   String expected =
       "Ontology("
           + ont.getOntologyID().toString()
           + ") [Axioms: "
           + ont.getAxiomCount()
           + " Logical Axioms: "
           + ont.getLogicalAxiomCount()
           + "]";
   assertEquals(expected, s);
 }
  @Override
  public boolean test() {
    // Check if the ontology contains any axioms
    System.out.println("Number of axioms: " + ontology.getAxiomCount());
    // Every ontology has a unique ID.
    System.out.println("Current Ontology ID: " + ontology.getOntologyID());
    // test of CRUD
    // test of Create
    System.out.println("Number of children: " + factory.getAllChildInstances().size());
    System.out.println("Create a new child ");
    factory.createChild("Nicola");
    System.out.println("Number of children: " + factory.getAllChildInstances().size());
    // test of Read
    Child c = factory.getChild("Nicola");
    System.out.println(c.getOwlIndividual());
    // TODO: test of Update

    // test of Delete
    c.delete();
    System.out.println("Number of children: " + factory.getAllChildInstances().size());

    // save ABox, TBox, RBox to separate files.
    try {
      ontlgAbox = manager.createOntology(ontology.getABoxAxioms(true));
      ontlgTbox = manager.createOntology(ontology.getTBoxAxioms(true));
      ontlgRbox = manager.createOntology(ontology.getRBoxAxioms(true));
    } catch (OWLOntologyCreationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    try {
      manager.saveOntology(ontlgAbox, IRI.create(new File("individual/Abox.owl")));
      manager.saveOntology(ontlgTbox, IRI.create(new File("individual/Tbox.owl")));
      manager.saveOntology(ontlgRbox, IRI.create(new File("individual/Rbox.owl")));
    } catch (OWLOntologyStorageException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    return true;
  }
Example #5
0
  protected boolean hghandleSaveAs(OWLOntology ont, OWLOntologyFormat format) throws Exception {
    PHGDBOntologyManagerImpl man =
        (PHGDBOntologyManagerImpl) getModelManager().getOWLOntologyManager();
    OWLOntologyFormat oldFormat = man.getOntologyFormat(ont);
    IRI oldDocumentIRI = man.getOntologyDocumentIRI(ont);
    HGDBOntologyRepository repo = man.getOntologyRepository();
    if (oldFormat instanceof PrefixOWLOntologyFormat && format instanceof PrefixOWLOntologyFormat) {
      PrefixOWLOntologyFormat oldPrefixes = (PrefixOWLOntologyFormat) oldFormat;
      for (String name : oldPrefixes.getPrefixNames()) {
        ((PrefixOWLOntologyFormat) format).setPrefix(name, oldPrefixes.getPrefix(name));
      }
    }
    if (format instanceof HGDBOntologyFormat) {
      // Case A1) OntololgyHGDB -> Repository Same Name: Already in
      // repository
      // Case A2) OntololgyHGDB -> Repository: Copy Ontology in Repository
      // NOT CURRENTLY SUPPORTED DIALOLG
      if (ont instanceof HGDBOntology) {
        String message =
            "This ontology is database backed and does not need to be saved to the database again.\n"
                + "All changes to it are instantly persisted in the Hypergraph Ontology Repository.\n"
                + "A copy operation to a different name in the repository is currently not supported.";
        System.err.println(message);
        // logger.warn(message);
        JOptionPane.showMessageDialog(
            getWorkspace(),
            message,
            "Hypergraph Database Backed Ontology",
            JOptionPane.ERROR_MESSAGE);

        return false;
      } else {
        // IN MEMORY ONTOLOGY
        // Case B) OntololgyImpl -> Repository: Import
        String message =
            "This in-memory ontology will be imported into the Hypergraph Ontology Repository.\n"
                + "This process is estimated to take one minute per 35000 Axioms. \n"
                + ont.getOntologyID().toString()
                + " has "
                + ont.getAxiomCount()
                + " Axioms. \n"
                + "Please be patient. A Success Dialog will pop up when the process is finished.";
        System.err.println(message);
        // logger.info(message);
        JOptionPane.showMessageDialog(
            getWorkspace(), message, "Hypergraph Database Import", JOptionPane.INFORMATION_MESSAGE);
        System.out.println("IMPORTING INTO HYPERGRAPH " + ont.getOntologyID());
        // logger.info("IMPORTING INTO HYPERGRAPH " +
        // ont.getOntologyID());
        long startTime = System.currentTimeMillis();
        man.setOntologyFormat(ont, format);
        // TODO OPEN A DIALOG FOR SELECTING A documentIRI
        IRI documentIri;
        if (ont.getOntologyID().isAnonymous()) {
          int i = 0;
          do {
            documentIri = IRI.create("hgdb://" + "anonymous-" + i);
            i++;
          } while (repo.existsOntologyByDocumentIRI(documentIri));

        } else {
          // HGDBOntologyFormat.convertToHGDBDocumentIRI(ontologyIRI)
          // IRI defaultIri =
          // ont.getOntologyID().getDefaultDocumentIRI();
          // String defaultIriStr = defaultIri.toString();
          // int schemaLength = defaultIri.getScheme().length();
          // String hgdbIRIStr = "hgdb" +
          // defaultIriStr.toString().substring(schemaLength);
          documentIri =
              HGDBOntologyFormat.convertToHGDBDocumentIRI(
                  ont.getOntologyID().getDefaultDocumentIRI());
          //
          // Check if exists by ID or Document IRI
          //
          if (repo.existsOntology(ont.getOntologyID())) {
            JOptionPane.showMessageDialog(
                getWorkspace(),
                "An ontology with the same ID already exists in the hypergraph repository."
                    + "\r\n "
                    + ont.getOntologyID()
                    + "\r\n If you wish to replace, delete the old one now using: HypergraphDB/Delete",
                "Hypergraph Database Import - Failed",
                JOptionPane.ERROR_MESSAGE);
            return false;
          } else if (repo.existsOntologyByDocumentIRI(documentIri)) {
            JOptionPane.showMessageDialog(
                getWorkspace(),
                "An ontology with the same documentIRI already exists in the hypergraph repository."
                    + "\r\n "
                    + documentIri
                    + "\r\n If you wish to replace, delete the old one now using: HypergraphDB/Delete",
                "Hypergraph Database Import - Failed",
                JOptionPane.ERROR_MESSAGE);
            return false;
          } // else continue import
        }
        System.out.println("Saving with documentIRI: " + documentIri);
        // logger.info("Saving with documentIRI: " + documentIri);
        // + ont.getOntologyID().getOntologyIRI().getFragment());

        man.setOntologyDocumentIRI(ont, documentIri);
        getModelManager().save(ont);
        int durationSecs = (int) (System.currentTimeMillis() - startTime) / 1000;
        message =
            "Hypergraph Database Import Success.\n"
                + "Saving took "
                + durationSecs
                + " seconds for "
                + ont.getAxiomCount()
                + " Axioms. \n"
                + "You are still working with the in-memory ontology. \n "
                + "Do you wish to use the database backed ontology now?";
        int useHGOnto =
            JOptionPane.showConfirmDialog(
                getWorkspace(),
                message,
                "Hypergraph Database Import Success",
                JOptionPane.YES_NO_OPTION);
        addRecent(documentIri.toURI());
        if (useHGOnto == JOptionPane.YES_OPTION) {
          // load the ontology from hypergraph and close
          getModelManager().reload(ont);
        } else {
          man.setOntologyFormat(ont, oldFormat);
          man.setOntologyDocumentIRI(ont, oldDocumentIRI);
        }
        return true;
      }
    } else {
      // FILE BASED FORMAT
      File file = getSaveAsOWLFile(ont);
      if (file != null) {
        // Do the following only if not database backed.
        man.setOntologyFormat(ont, format);
        man.setOntologyDocumentIRI(ont, IRI.create(file));
        try {
          getModelManager().save(ont);
          addRecent(file.toURI());
        } finally {
          if (ont instanceof HGDBOntology) {
            man.setOntologyFormat(ont, oldFormat);
            man.setOntologyDocumentIRI(ont, oldDocumentIRI);
          }
        }
        return true;
      } else {
        System.err.println("No valid file specified for the save as operation - quitting");
        // logger.warn("No valid file specified for the save as operation - quitting");
        return false;
      }
    }
  }
  @Test
  public void testAttGroup() {

    try {

      Xsd2Owl converter = Xsd2OwlImpl.getInstance();

      URL url = converter.getSchemaURL("org/drools/shapes/xsd/attrGroup.xsd");
      Schema x = converter.parse(url);
      String tns = x.getTargetNamespace() + "#";

      OWLOntology onto = converter.transform(x, url, true, true);

      OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
      OWLDataFactory factory = manager.getOWLDataFactory();

      OWLClass ivl = factory.getOWLClass(IRI.create(tns, "IVL_RTO"));
      OWLClass xyz = factory.getOWLClass(IRI.create(tns, "XYZ.ABC"));
      OWLDatatype dec = factory.getOWLDatatype(IRI.create(tns, "Decimal"));
      OWLDatatype cd = factory.getOWLDatatype(IRI.create(tns, "Code"));
      OWLDatatype bool = OWL2DatatypeImpl.getDatatype(OWL2Datatype.XSD_BOOLEAN);

      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(ivl)));
      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(dec)));
      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(cd)));

      OWLDataProperty lowIsInclusive =
          factory.getOWLDataProperty(IRI.create(tns, "lowIsInclusive"));
      OWLDataProperty highIsInclusive =
          factory.getOWLDataProperty(IRI.create(tns, "highIsInclusive"));
      OWLDataProperty highValue = factory.getOWLDataProperty(IRI.create(tns, "highValue"));
      OWLDataProperty highUnit = factory.getOWLDataProperty(IRI.create(tns, "highUnit"));
      OWLDataProperty numerator = factory.getOWLDataProperty(IRI.create(tns, "numerator"));
      OWLDataProperty denominator = factory.getOWLDataProperty(IRI.create(tns, "denominator"));
      OWLDataProperty lowNumerator = factory.getOWLDataProperty(IRI.create(tns, "lowNumerator"));
      OWLDataProperty lowDenominator =
          factory.getOWLDataProperty(IRI.create(tns, "lowDenominator"));
      OWLDataProperty highNumerator = factory.getOWLDataProperty(IRI.create(tns, "highNumerator"));
      OWLDataProperty highDenominator =
          factory.getOWLDataProperty(IRI.create(tns, "highDenominator"));

      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(lowIsInclusive)));
      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(highIsInclusive)));
      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(highValue)));
      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(highUnit)));
      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(numerator)));
      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(denominator)));
      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(lowNumerator)));
      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(lowDenominator)));
      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(highNumerator)));
      assertTrue(onto.containsAxiom(factory.getOWLDeclarationAxiom(highDenominator)));

      assertEquals(6, onto.getAxiomCount(AxiomType.DATA_PROPERTY_DOMAIN));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyDomainAxiom(lowIsInclusive, ivl)));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyDomainAxiom(highIsInclusive, ivl)));
      assertTrue(
          onto.containsAxiom(
              factory.getOWLDataPropertyDomainAxiom(
                  lowNumerator, factory.getOWLObjectUnionOf(ivl, xyz))));
      assertTrue(
          onto.containsAxiom(
              factory.getOWLDataPropertyDomainAxiom(
                  lowDenominator, factory.getOWLObjectUnionOf(ivl, xyz))));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyDomainAxiom(highNumerator, ivl)));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyDomainAxiom(highDenominator, ivl)));

      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyRangeAxiom(lowIsInclusive, bool)));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyRangeAxiom(highIsInclusive, bool)));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyRangeAxiom(highValue, dec)));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyRangeAxiom(highUnit, cd)));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyRangeAxiom(numerator, dec)));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyRangeAxiom(denominator, dec)));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyRangeAxiom(lowNumerator, dec)));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyRangeAxiom(lowDenominator, dec)));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyRangeAxiom(highNumerator, dec)));
      assertTrue(onto.containsAxiom(factory.getOWLDataPropertyRangeAxiom(highDenominator, dec)));

      assertEquals(6 + 1, onto.getSubClassAxiomsForSubClass(ivl).size());
      assertEquals(2 + 1, onto.getSubClassAxiomsForSubClass(xyz).size());

    } catch (Exception e) {
      fail(e.getMessage());
    }
  }