コード例 #1
0
ファイル: TestRuleEngine.java プロジェクト: szarnyasg/corese
  @BeforeClass
  public static void init() throws EngineException {
    // Graph.setCompareIndex(true);
    QuerySolver.definePrefix("c", "http://www.inria.fr/acacia/comma#");

    graph = createGraph(true);
    Load load = Load.create(graph);

    load.load(data + "engine/ontology/test.rdfs");
    load.load(data + "engine/data/test.rdf");

    try {
      load.loadWE(data + "engine/rule/test2.brul");
      load.load(new FileInputStream(data + "engine/rule/meta.brul"), "meta.brul");
    } catch (LoadException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (FileNotFoundException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }

    fengine = load.getRuleEngine();
    fengine.setSpeedUp(true);

    QueryProcess exec = QueryProcess.create(graph);
    rengine = Engine.create(exec);

    rengine.load(data + "engine/rule/test2.brul");
    rengine.load(data + "engine/rule/meta.brul");
  }
コード例 #2
0
ファイル: TestRuleEngine.java プロジェクト: szarnyasg/corese
  /** Rule engine with QueryExec on two graphs */
  @Test
  public void test6() {
    QuerySolver.definePrefix("c", "http://www.inria.fr/acacia/comma#");

    Graph g1 = createGraph(true);
    Graph g2 = createGraph(true);

    Load load1 = Load.create(g1);
    Load load2 = Load.create(g2);

    load1.load(data + "engine/ontology/test.rdfs");
    load2.load(data + "engine/data/test.rdf");

    QueryProcess exec = QueryProcess.create(g1);
    exec.add(g2);
    RuleEngine re = RuleEngine.create(g2, exec);
    // re.setOptimize(true);

    load2.setEngine(re);

    try {
      load2.loadWE(data + "engine/rule/test2.brul");
      load2.load(new FileInputStream(data + "engine/rule/meta.brul"), "meta.brul");
    } catch (LoadException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (FileNotFoundException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }

    Engine rengine = Engine.create(exec);

    rengine.load(data + "engine/rule/test2.brul");
    rengine.load(data + "engine/rule/meta.brul");

    String query =
        "prefix c: <http://www.inria.fr/acacia/comma#>"
            + "select     * where {"
            + "?x c:hasGrandParent c:Pierre "
            + "}";

    LBind bind = rengine.SPARQLProve(query);
    assertEquals("Result", 4, bind.size());
    // System.out.println(bind);

    re.process();

    try {
      Mappings map = exec.query(query);
      assertEquals("Result", 4, map.size());
      // System.out.println(map);
    } catch (EngineException e) {
      assertEquals("Result", 4, e);
    }
  }
コード例 #3
0
ファイル: TestRuleEngine.java プロジェクト: szarnyasg/corese
 RuleEngine testRules() throws LoadException {
   Graph g = createGraph();
   Load ld = Load.create(g);
   ld.loadWE(data + "comma/comma.rdfs");
   ld.loadWE(data + "comma/data");
   ld.loadWE(data + "comma/data2");
   try {
     ld.loadWE(data + "owlrule/owlrllite-junit.rul");
   } catch (LoadException e) {
     e.printStackTrace();
   }
   RuleEngine re = ld.getRuleEngine();
   return re;
 }
コード例 #4
0
  /**
   * Transforms an EDGE request into a simple SPARQL query pushed to the remote producer. Results
   * are returned through standard web services protocol.
   *
   * @param gNode graph variable if it exists, null otherwise
   * @param from "from named <g>" list
   * @param qEdge edge searched for
   * @param env query execution context (current variable values, etc.)
   * @return an iterator over graph entities
   */
  @Override
  public Iterable<Entity> getEdges(Node gNode, List<Node> from, Edge qEdge, Environment env) {
    // si gNode != null et from non vide, alors "from named"
    // si gNode == null et from non vide alors "from"

    String query = getSparqlQuery(qEdge, env);
    Graph resGraph = Graph.create();
    Graph g = Graph.create();

    StopWatch sw = new StopWatch();
    sw.start();

    InputStream is = null;
    try {
      QueryProcess exec = QueryProcess.create(resGraph);

      if (query != null) {
        Mappings map = exec.query(query);

        //            logger.info("Received results in " + sw.getTime());

        String sparqlRes = RDFFormat.create(map).toString();
        //            System.out.println(XMLFormat.create(map));

        if (sparqlRes != null) {
          Load l = Load.create(g);
          is = new ByteArrayInputStream(sparqlRes.getBytes());
          l.load(is);
          //                logger.info("Results (cardinality " + g.size() + ") merged in  " +
          // sw.getTime() + " ms.");
        }
      }

    } catch (LoadException ex) {
      ex.printStackTrace();
    } catch (EngineException ex) {
      ex.printStackTrace();
    }
    //        for (Iterator<Entity> it = g.getEdges().iterator(); it.hasNext();) {
    //            Edge e = (Edge) it.next();
    //            System.out.println(e);
    //        }
    //
    return g.getEdges();
  }