Пример #1
0
 @Override
 protected boolean exec() {
   Timer t = new Timer();
   if (ontology == null) {
     lSigExtractor = null;
     return true;
   }
   if (integrateRangesFirst) {
     OWLNormalization4MORe normalization =
         new OWLNormalization4MORe(ontology, true, false, false);
     Set<OWLAxiom> axioms = normalization.getNormalizedOntology();
     try {
       OWLOntologyManager manager = ontology.getOWLOntologyManager();
       ontology = manager.createOntology();
       manager.addAxioms(ontology, axioms);
     } catch (OWLOntologyCreationException e) {
       e.printStackTrace();
       lSigExtractor = null;
       return true;
     }
   }
   lSigExtractor.findLsignature(ontology, fragment);
   if (!integrateRangesFirst) stats.updateNelkAxioms(lSigExtractor.nAxiomsInFragment());
   Logger_MORe.logDebug(
       t.duration() + "s to find Lsignature with integrateRangesFirst=" + integrateRangesFirst);
   return true;
 }
Пример #2
0
  @Override
  public void evaluate(QueryRecord queryRecord) {
    OWLOntology knowledgebase = relevantPart(queryRecord);

    if (knowledgebase == null) {
      Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty());
      return;
    }

    int aboxcount = knowledgebase.getABoxAxioms(true).size();
    Utility.logDebug(
        "ABox axioms: "
            + aboxcount
            + " TBox axioms: "
            + (knowledgebase.getAxiomCount() - aboxcount));
    //		queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl");

    Timer t = new Timer();
    Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT());
    //		int validNumber =
    summarisedChecker.check(queryRecord.getGapAnswers());
    summarisedChecker.dispose();
    Utility.logDebug("Total time for full reasoner: " + t.duration());
    //		if (validNumber == 0) {
    queryRecord.markAsProcessed();
    Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty());
    //		}
  }
Пример #3
0
 private void queryUpperBound(
     BasicQueryEngine upperStore,
     QueryRecord queryRecord,
     String queryText,
     String[] answerVariables) {
   AnswerTuples rlAnswer = null;
   try {
     Utility.logDebug(queryText);
     rlAnswer = upperStore.evaluate(queryText, answerVariables);
     Utility.logDebug(t.duration());
     queryRecord.updateUpperBoundAnswers(rlAnswer);
   } finally {
     if (rlAnswer != null) rlAnswer.dispose();
     rlAnswer = null;
   }
 }
Пример #4
0
  public OWLOntology findLsignature(
      OWLOntology ontology, LogicFragment fragment, Statistics stats) {
    Timer t = new Timer();
    this.stats = stats;
    Logger_MORe.logInfo("extracting " + fragment.toString() + "-signature");
    OWLOntology ret = null;
    OWLOntologyManager manager = ontology.getOWLOntologyManager();
    try {
      ret = manager.createOntology();
      manager.addAxioms(ret, ontology.getAxioms());
    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    }
    lSignatureClasses = new HashSet<OWLClass>();
    lSignatureOther = new HashSet<OWLEntity>();
    compSignatureClasses = new HashSet<OWLClass>();
    compSignatureOther = new HashSet<OWLEntity>();

    LsignatureExtractorLauncher elkSignatureExtractorLauncher = null;
    LsignatureExtractorLauncher elkSignatureExtractorIntegratingRangesLauncher = null;
    LsignatureExtractorViaInverseRewritingLauncher elkSignatureExtractorRewritingInversesLauncher =
        null;

    ForkJoinPool executor = new ForkJoinPool();
    elkSignatureExtractorLauncher =
        new LsignatureExtractorLauncher(ontology, LogicFragment.ELK, false);
    executor.execute(elkSignatureExtractorLauncher);

    if (ret != null) {
      // otherwise we have nowhere to return the axioms in the normalised ontologies necessary to
      // really classify all the extra classses in the lSignature
      if (rewriteInverses) {
        elkSignatureExtractorRewritingInversesLauncher =
            new LsignatureExtractorViaInverseRewritingLauncher(ontology, LogicFragment.ELK);
        executor.execute(elkSignatureExtractorRewritingInversesLauncher);
      }
      if (integrateRanges) {
        elkSignatureExtractorIntegratingRangesLauncher =
            new LsignatureExtractorLauncher(ontology, LogicFragment.ELK, true);
        executor.execute(elkSignatureExtractorIntegratingRangesLauncher);
      }

      // check the output of the normal ELKsignature and cancel the other threads if the lSig is the
      // whole signature
      initialiseLsignature((LsignatureExtractor) elkSignatureExtractorLauncher.join());

      if (compSignatureClasses.isEmpty())
        cancelTasks(
            elkSignatureExtractorIntegratingRangesLauncher,
            elkSignatureExtractorRewritingInversesLauncher);
      else {
        if (elkSignatureExtractorRewritingInversesLauncher != null
            && extendLsignature(
                    (LsignatureExtractor) elkSignatureExtractorRewritingInversesLauncher.join())
                > 0) {
          manager.addAxioms(
              ret,
              ((LsignatureExtractorViaInverseRewritingLauncher)
                      elkSignatureExtractorRewritingInversesLauncher)
                  .getOntology()
                  .getAxioms());
        }
        if (compSignatureClasses.isEmpty())
          cancelTasks(elkSignatureExtractorRewritingInversesLauncher);
        else if (elkSignatureExtractorIntegratingRangesLauncher != null
            && extendLsignature(
                    (LsignatureExtractor) elkSignatureExtractorIntegratingRangesLauncher.join())
                > 0) {
          manager.addAxioms(
              ret,
              ((LsignatureExtractorLauncher) elkSignatureExtractorIntegratingRangesLauncher)
                  .getOntology()
                  .getAxioms());
        }
      }
      stats.updateLsignatureSize(lSignatureClasses.size(), true);
    } else {
      ret = ontology;
      initialiseLsignature((LsignatureExtractor) elkSignatureExtractorLauncher.join());
    }

    Logger_MORe.logInfo(lSignatureClasses.size() + "classes in lSignature");
    Logger_MORe.logDebug(lSignatureClasses.toString());
    Logger_MORe.logInfo(compSignatureClasses.size() + "classes in compSignature");

    // might be a good idea to try to isolate extra axioms in the normalisation/rewriting - is this
    // possible/worth the effort?
    // check the order in which we try to extend the lSignature with each of the rewritten
    // ontologies and consider if one may be better that the other
    Logger_MORe.logDebug(t.duration() + "s to find Lsignature");

    return ret;
  }
Пример #5
0
    @Override
    protected boolean exec() {
      Timer t = new Timer();
      if (ontology == null) {
        extractor = null;
        return true;
      }
      IRI iri =
          IRI.create("http://www.cs.ox.ac.uk/isg/tools/MORe/ontologies/inverseRewritingModule.owl");
      extractor.findLsignature(ontology, LogicFragment.SHOIQ);
      if (containsNonInternalClasses(
          extractor
              .getCompSignature())) { // then the ontology goes beyond SHOIQ and we need to work
        // with a SHOIQ module rather than the whole ontology
        Set<OWLEntity> aux = getNonInternalClasses(extractor.getLsignature());
        if (aux.isEmpty()) {
          extractor = null;
          Logger_MORe.logDebug(
              t.duration()
                  + "s to find Lsignature with inverseRewriting (failed - empty SHOIQ-signature)");
          return true;
        }
        SyntacticLocalityModuleExtractor moduleExtractor =
            new SyntacticLocalityModuleExtractor(manager, ontology, ModuleType.BOT);
        try {
          //					ontology = manager.createOntology(iri);
          //					manager.addAxioms(ontology, moduleExtractor.extract(aux));
          ontology = moduleExtractor.extractAsOntology(aux, iri);
        } catch (OWLOntologyCreationException e1) {
          extractor = null;
          e1.printStackTrace();
          Logger_MORe.logDebug(
              t.duration()
                  + "s to find Lsignature with inverseRewriting (failed - exception creating a SHOIQ module)");
          return true;
        }
      }

      // if we get this far then we have a nonempty ontology (maybe module) that we need to
      // normalize and then rewrite
      OWLNormalization4MORe normalization = new OWLNormalization4MORe(ontology, true, true, true);
      Rewriter rewriter =
          new Rewriter(normalization.getNormalizedOntology(), normalization.getSortedGCIs());
      if (manager.contains(iri)) manager.removeOntology(ontology);
      Set<OWLAxiom> rewrittenAxioms = rewriter.getRewrittenOntology();
      if (!rewriter.anyRewrittenRoles()) {
        extractor = null;
        Logger_MORe.logDebug(
            t.duration()
                + "s to find Lsignature with inverseRewriting (failed - could not rewrite any roles)");
        return true;
      }
      try {
        ontology = manager.createOntology();
        manager.addAxioms(ontology, rewrittenAxioms);
        extractor = new LsignatureExtractor_reducedGreedyness();
        extractor.findLsignature(ontology, fragment);
      } catch (OWLOntologyCreationException e1) {
        extractor = null;
        e1.printStackTrace();
        Logger_MORe.logDebug(
            t.duration()
                + "s to find Lsignature with inverseRewriting (failed - exception creating ontology for rewritten axioms)");
        return true;
      }
      Logger_MORe.logDebug(t.duration() + "s to find Lsignature with inverseRewriting");
      return true;
    }
Пример #6
0
  private void materialiseMultiStage(
      MultiStageUpperProgram program,
      Treatment4Classification treatment,
      GapByStore4ID_registerInfoAboutInstantiationIndividualsOnly gap) {
    String programName = "multi-stage upper program";
    Logger_MORe.logInfo(name + " store is materialising " + programName + " ...");
    Timer t = new Timer();

    long tripleCountBeforeMat = 0;

    program.saveDatalogRules(Utility_PAGOdA.TempDirectory + "/multi_datalog.dlog");

    Collection<Violation> violations = null;
    int iteration = 0;
    Timer subTimer = new Timer();
    boolean incrementally = false;
    try {
      while (true) {
        long oldTripleCount = store.getTriplesCount();

        subTimer.reset();
        Logger_MORe.logInfo("Iteration " + ++iteration + ": ");

        incrementally = (iteration != 1);

        if (incrementally) gap.compileFromFile(null);
        else {
          tripleCountBeforeMat = oldTripleCount;
          gap.compileFromFile(new File(Utility_PAGOdA.TempDirectory + "/multi_datalog.dlog"));
        }
        //				Utility.printStoreToFile(store, "multistageMaterialisationIter"+iteration);
        //				Utility.printPredicatesSummaryOfStoreToFile(store,
        // "multistageMaterialisationIter"+iteration);
        Utility.printAllTriples(getDataStore());
        gap
            .registerGapTuples(); // PAGOdA does not add the gap when doing multistage because the
                                  // multistage store cannot be used for tracking, but we want to
                                  // register
        // which instantiation individuals have a gap and which don't to detect classes that may be
        // fully classified at this point.
        // this is an alternative to addGapBackTo that registers information about which
        // instantiation individuals have a gap but
        // doesn't add gap tuples to the store.

        long tripleCount = store.getTriplesCount();
        Logger_MORe.logDebug(
            name
                + " store after materialising datalog-rules: "
                + tripleCount
                + " ("
                + (tripleCount - oldTripleCount)
                + " new)");
        Logger_MORe.logDebug("Time to materialise datalog-rules: " + subTimer.duration());

        subTimer.reset();
        if ((violations = program.isIntegrated(this, incrementally)) == null
            || violations.size() == 0) {
          store.clearRulesAndMakeFactsExplicit();
          Logger_MORe.logDebug(
              name
                  + " store after materialising "
                  + programName
                  + ": "
                  + tripleCount
                  + " ("
                  + (tripleCount - tripleCountBeforeMat)
                  + " new)");
          Logger_MORe.logInfo(
              name
                  + " store is DONE for multi-stage materialising in "
                  + t.duration()
                  + " seconds.");
          return;
        }
        Logger_MORe.logDebug("Time to detect violations: " + subTimer.duration());

        store.makeFactsExplicit();

        subTimer.reset();
        oldTripleCount = store.getTriplesCount();
        for (Violation v : violations) {
          Timer localTimer = new Timer();
          int number = v.size();
          long vOldCounter = store.getTriplesCount();

          treatment.makeSatisfied(v, gap);

          Logger_MORe.logDebug(
              "Time to make the constraint being satisfied: "
                  + localTimer.duration()
                  + " "
                  + number
                  + " tuples for "
                  + v.getConstraint());
          Logger_MORe.logDebug(
              "tuple number: "
                  + v.size()
                  + " before: "
                  + vOldCounter
                  + " after: "
                  + store.getTriplesCount()
                  + " ("
                  + (store.getTriplesCount() - vOldCounter)
                  + " new) .");
        }
        Logger_MORe.logDebug(
            name
                + " store after adding facts for violations: "
                + (tripleCount = store.getTriplesCount())
                + " ("
                + (tripleCount - oldTripleCount)
                + " new)");
        Logger_MORe.logDebug("Time to add triples for violations: " + subTimer.duration());
      }
    } catch (JRDFStoreException e) {
      e.printStackTrace();
    }
  }
Пример #7
0
  protected Set<?>[] materialise(
      MultiStageUpperProgram4Classification program,
      Treatment treatment,
      boolean registerPredicatesWithGap) {
    // based on materialise(MultiStageUpperProgram, Treatment, GapByStore4ID)

    Set<?>[] ret = new Set<?>[3];
    if (registerPredicatesWithGap) {
      ret[0] = new HashSet<OWLClass>(); // classes with candidate subsumers
      ret[1] = new HashSet<String>(); // potentiallyUnsatClasses
      ret[2] = new HashSet<String>(); // candidate subsumers // do I even need to store these?
    }

    String programName = "multi-stage upper program";
    Logger_MORe.logInfo(name + " store is materialising " + programName + " ...");
    Timer t = new Timer();

    long tripleCountBeforeMat = 0;

    Collection<Violation> violations = null;
    int iteration = 0;
    Timer subTimer = new Timer();
    boolean incrementally = false;
    TupleIterator iter = null;
    try {
      while (true) {
        long oldTripleCount = store.getTriplesCount();

        subTimer.reset();
        Logger_MORe.logInfo("Iteration " + ++iteration + ": ");

        incrementally = (iteration != 1);

        if (incrementally) store.setNumberOfThreads(1);
        else {
          tripleCountBeforeMat = oldTripleCount;
          store.importFiles(
              new File[] {new File(program.getOutputPath())}, new Prefixes(), UpdateType.Add, true);
        }
        store.applyReasoning(incrementally);
        store.setNumberOfThreads(matNoOfThreads);

        if (registerPredicatesWithGap) {
          // here we are basically imitating GapByStore4ID, can't we just add the relevant methods
          // to that class and use it class here?

          try {
            iter =
                internal_evaluateAgainstIDBs(
                    "select ?x ?z where { ?x "
                        + MyPrefixes.PAGOdAPrefixes.expandText("rdf:type")
                        + " ?z . }");
            for (long multi = iter.open(); multi != 0; multi = iter.getNext()) {
              OWLClass c =
                  indManager.getClass4Individual(
                      RDFoxTripleManager.getRawTerm(iter.getResource(0)));
              if (c != null) {
                String s = RDFoxTripleManager.getRawTerm(iter.getResource(1));
                if (s.equals(MyPrefixes.PAGOdAPrefixes.expandText("owl:Nothing"))) {
                  ((Set<String>) ret[1]).add(RDFoxTripleManager.getRawTerm(iter.getResource(0)));
                  ((Set<OWLClass>) ret[0]).add(c);
                } else {
                  ((Set<OWLClass>) ret[0]).add(c);
                  ((Set<String>) ret[2]).add(RDFoxTripleManager.getRawTerm(iter.getResource(1)));
                }
              }
            }
          } catch (JRDFStoreException e) {
            e.printStackTrace();
            if (iter != null) iter.dispose();
          } finally {
            if (iter != null) iter.dispose();
          }
        }

        long tripleCount = store.getTriplesCount();
        Logger_MORe.logDebug(
            name
                + " store after materialising datalog-rules: "
                + tripleCount
                + " ("
                + (tripleCount - oldTripleCount)
                + " new)");
        Logger_MORe.logDebug("Time to materialise datalog-rules: " + subTimer.duration());

        subTimer.reset();
        // TODO revise this chunk to make sure inconsistencies do not make us stop materialising
        // FIXME
        if ((violations = program.isIntegrated(this, incrementally)) == null
            || violations.size() == 0) {
          store.clearRulesAndMakeFactsExplicit();
          Logger_MORe.logDebug(
              name
                  + " store after materialising "
                  + programName
                  + ": "
                  + tripleCount
                  + " ("
                  + (tripleCount - tripleCountBeforeMat)
                  + " new)");
          Logger_MORe.logInfo(
              name
                  + " store is DONE for multi-stage materialising in "
                  + t.duration()
                  + " seconds.");
          return ret; // isValid() ? 1 : 0;
        }
        Logger_MORe.logDebug("Time to detect violations: " + subTimer.duration());

        store.makeFactsExplicit();

        //				first.printAllTriples(getDataStore());

        subTimer.reset();
        oldTripleCount = store.getTriplesCount();
        for (Violation v : violations) {
          Timer localTimer = new Timer();
          int number = v.size();
          long vOldCounter = store.getTriplesCount();

          if (registerPredicatesWithGap) {
            for (AnswerTuple tuple :
                ((Treatment4Classification) treatment).makeSatisfiedAndReturnAddedTuples(v)) {
              OWLClass c = indManager.getClass4Individual(tuple.getRawTerm(0));
              if (c != null) {
                String s = tuple.getRawTerm(1);
                if (s.equals(MyPrefixes.PAGOdAPrefixes.expandText("owl:Nothing"))) {
                  ((Set<String>) ret[1]).add(tuple.getRawTerm(0));
                  ((Set<OWLClass>) ret[0]).add(c);
                } else {
                  ((Set<OWLClass>) ret[0]).add(c);
                  ((Set<String>) ret[2]).add(tuple.getRawTerm(1));
                }
              }
            }
          } else {
            if (!treatment.makeSatisfied(v)) {
              //						validMaterialisation = false;
              //						Utility.logInfo(name + " store FAILED for multi-stage materialisation in " +
              // t.duration() + " seconds.");
              Logger_MORe.logInfo(
                  name
                      + " store could not make violation satisfied for multi-stage materialisation, but we'll keep going!.");
              //						return 0;
            }
          }

          Logger_MORe.logDebug(
              "Time to make the constraint being satisfied: "
                  + localTimer.duration()
                  + " "
                  + number
                  + " tuples for "
                  + v.getConstraint());
          Logger_MORe.logDebug(
              "tuple number: "
                  + v.size()
                  + " before: "
                  + vOldCounter
                  + " after: "
                  + store.getTriplesCount()
                  + " ("
                  + (store.getTriplesCount() - vOldCounter)
                  + " new) .");
        }
        Logger_MORe.logDebug(
            name
                + " store after adding facts for violations: "
                + (tripleCount = store.getTriplesCount())
                + " ("
                + (tripleCount - oldTripleCount)
                + " new)");
        Logger_MORe.logDebug("Time to add triples for violations: " + subTimer.duration());
      }
    } catch (JRDFStoreException e) {
      e.printStackTrace();
    }
    return ret;
  }
Пример #8
0
  private OWLOntology relevantPart(QueryRecord queryRecord) {
    AnswerTuples rlAnswer = null, elAnswer = null;

    t.reset();
    try {
      rlAnswer =
          rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
      Utility.logDebug(t.duration());
      queryRecord.updateLowerBoundAnswers(rlAnswer);
    } finally {
      if (rlAnswer != null) rlAnswer.dispose();
    }
    queryRecord.addProcessingTime(Step.LowerBound, t.duration());
    rlAnswer = null;

    t.reset();
    BasicQueryEngine upperStore =
        queryRecord.isBottom() || lazyUpperStore == null ? trackingStore : lazyUpperStore;

    String[] extendedQuery = queryRecord.getExtendedQueryText();

    queryUpperBound(
        upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables());
    if (!queryRecord.processed() && !queryRecord.getQueryText().equals(extendedQuery[0]))
      queryUpperBound(upperStore, queryRecord, extendedQuery[0], queryRecord.getAnswerVariables());
    if (!queryRecord.processed() && queryRecord.hasNonAnsDistinguishedVariables())
      queryUpperBound(
          upperStore, queryRecord, extendedQuery[1], queryRecord.getDistinguishedVariables());

    queryRecord.addProcessingTime(Step.UpperBound, t.duration());
    if (queryRecord.processed()) {
      queryRecord.setDifficulty(Step.UpperBound);
      return null;
    }

    t.reset();
    try {
      elAnswer =
          elLowerStore.evaluate(
              extendedQuery[0],
              queryRecord.getAnswerVariables(),
              queryRecord.getLowerBoundAnswers());
      Utility.logDebug(t.duration());
      queryRecord.updateLowerBoundAnswers(elAnswer);
    } finally {
      if (elAnswer != null) elAnswer.dispose();
    }
    queryRecord.addProcessingTime(Step.ELLowerBound, t.duration());

    if (queryRecord.processed()) {
      queryRecord.setDifficulty(Step.ELLowerBound);
      return null;
    }

    t.reset();

    QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord);

    OWLOntology knowledgebase;
    t.reset();
    //		if (program.getGeneral().isHorn()) {
    //			knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true);
    //			queryRecord.addProcessingTime(Step.Fragment, t.duration());
    //			return knowledgebase;
    //		}
    //		else {
    knowledgebase = tracker.extract(trackingStore, consistency.getQueryRecords(), true);
    queryRecord.addProcessingTime(Step.Fragment, t.duration());
    //		}

    if (knowledgebase.isEmpty() || queryRecord.isBottom()) return knowledgebase;

    if (program.getGeneral().isHorn()) return knowledgebase;

    //		t.reset();
    //		if (queryRecord.isHorn() && lazyUpperStore != null) {
    ////			knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true);
    //		} else if (queryRecord.getArity() < 3) {
    //			IterativeRefinement iterativeRefinement = new IterativeRefinement(queryRecord, tracker,
    // trackingStore, consistency.getQueryRecords());
    //			knowledgebase = iterativeRefinement.extractWithFullABox(importedData.toString(),
    // program.getUpperBottomStrategy());
    //		}
    //
    //		queryRecord.addProcessingTime(Step.FragmentRefinement, t.duration());
    //
    //		if (knowledgebase == null)
    //			queryRecord.setDifficulty(Step.FragmentRefinement);

    return knowledgebase;
  }