Exemplo n.º 1
0
  @Test
  public void test03() {
    File file = new File("TestData/EliminateTransTest03.owl");
    OWLOntologyManager man = OWLManager.createOWLOntologyManager();
    OWLOntology ontology;
    try {
      ontology = man.loadOntologyFromOntologyDocument(file);

      HornSHIQNormalizer normalizer = new HornSHIQNormalizer();
      OWLOntology normalizedOnt = normalizer.normalize(ontology);

      HornALCHIQTransNormalizer normalizer1 = new HornALCHIQTransNormalizer();
      OWLOntology normalizedOnt1 = normalizer1.normalize(normalizedOnt);

      HornALCHIQNormalizer normalizer2 = new HornALCHIQNormalizer();
      OWLOntology normalizedOnt2 = normalizer2.normalize(normalizedOnt1);
      man.saveOntology(
          normalizedOnt2, IRI.create(new File("TestData/EliminateTransTest03Norm.owl")));
    } catch (OWLOntologyCreationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (OWLOntologyStorageException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
  }
 public void saveOntology() {
   try {
     manager.saveOntology(topIxOnt);
   } catch (OWLOntologyStorageException ose) {
     System.out.println(ose.getMessage());
   }
 }
Exemplo n.º 3
0
  @Override
  public boolean test() {
    // Check if the ontology contains any axioms
    System.out.println("Number of axioms: " + ontology.getAxiomCount());
    // Every ontology has a unique ID.
    System.out.println("Current Ontology ID: " + ontology.getOntologyID());
    // test of CRUD
    // test of Create
    System.out.println("Number of children: " + factory.getAllChildInstances().size());
    System.out.println("Create a new child ");
    factory.createChild("Nicola");
    System.out.println("Number of children: " + factory.getAllChildInstances().size());
    // test of Read
    Child c = factory.getChild("Nicola");
    System.out.println(c.getOwlIndividual());
    // TODO: test of Update

    // test of Delete
    c.delete();
    System.out.println("Number of children: " + factory.getAllChildInstances().size());

    // save ABox, TBox, RBox to separate files.
    try {
      ontlgAbox = manager.createOntology(ontology.getABoxAxioms(true));
      ontlgTbox = manager.createOntology(ontology.getTBoxAxioms(true));
      ontlgRbox = manager.createOntology(ontology.getRBoxAxioms(true));
    } catch (OWLOntologyCreationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    try {
      manager.saveOntology(ontlgAbox, IRI.create(new File("individual/Abox.owl")));
      manager.saveOntology(ontlgTbox, IRI.create(new File("individual/Tbox.owl")));
      manager.saveOntology(ontlgRbox, IRI.create(new File("individual/Rbox.owl")));
    } catch (OWLOntologyStorageException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    return true;
  }
  private IRI executeRule(final Rule r, final IRI inputIRI) {
    try {
      PelletOptions.USE_ANNOTATION_SUPPORT = true;

      PelletOptions.TREAT_ALL_VARS_DISTINGUISHED = controller.isTreatAllVariablesDistinguished();

      QueryEngineType type = (QueryEngineType) controller.getQueryEngineType();

      final QueryExecution qe;
      final ByteArrayOutputStream w = new ByteArrayOutputStream();

      final Query qSelect = getSelectExampleQuery(r.getQuery());

      if (type.toPellet() != null) {
        final OWLOntology queryOntology = getInputOntologyForRule(inputIRI);

        final PelletReasoner reasoner =
            PelletReasonerFactory.getInstance().createReasoner(queryOntology);

        log.info("Ontology size: " + reasoner.getKB().getInfo());

        final Dataset ds = kb2ds(reasoner.getKB());

        final QueryExecution qeSelect =
            SparqlDLExecutionFactory.create(qSelect, ds, null, type.toPellet());

        final ResultSet rs = qeSelect.execSelect();
        controller.setSelect(r, rs.getResultVars(), ResultSetFormatter.toList(rs));

        qe =
            SparqlDLExecutionFactory.create(
                r.getQuery(), kb2ds(reasoner.getKB()), null, type.toPellet());
        qe.execConstruct().write(w);
      } else {
        final ByteArrayOutputStream w2 = new ByteArrayOutputStream();
        final Model model = ModelFactory.createDefaultModel();
        try {
          controller
              .getOWLOntologyManager()
              .saveOntology(queryOntology, new TurtleOntologyFormat(), w2);
          model.read(new ByteArrayInputStream(w2.toByteArray()), "", "TURTLE");

          final QueryExecution qeSelect = QueryExecutionFactory.create(qSelect, model);

          final ResultSet rs = qeSelect.execSelect();
          controller.setSelect(r, rs.getResultVars(), ResultSetFormatter.toList(rs));

          qe = QueryExecutionFactory.create(r.getQuery(), model);
          qe.execConstruct().write(w);
        } catch (OWLOntologyStorageException e) {
          // TODO Auto-generated catch block
          e.printStackTrace();
        }
      }

      final IRI outputIRI = getOntologyIRIForRuleName(r.getName());

      // loaded generated ontology
      final OWLOntology generatedOntology =
          controller
              .getOWLOntologyManager()
              .loadOntologyFromOntologyDocument(new ByteArrayInputStream(w.toByteArray()));
      controller.updateOntology(
          generatedOntology,
          outputIRI,
          inputIRI,
          controller.getRuleSpec().getResultFile(r).toURI());
      controller.setStatus("Rule " + r.getName() + " successfully executed");
      return outputIRI;
    } catch (OWLOntologyCreationException e1) {
      controller.setStatus(e1.getMessage());
      return null;
    }
  }