@BeforeClass
  public static void setup() {
    // Dataset for implicit join tests
    implJoin = DatasetFactory.createTxnMem();

    Node a = NodeFactory.createURI("http://a");
    Node b = NodeFactory.createURI("http://b");
    Node c = NodeFactory.createURI("http://c");
    Node p1 = NodeFactory.createURI("http://p1");
    Node p2 = NodeFactory.createURI("http://p2");
    Node pSelf = NodeFactory.createURI("http://self");
    Node o = NodeFactory.createLiteral("object");

    DatasetGraph dsg = implJoin.asDatasetGraph();
    dsg.add(Quad.defaultGraphNodeGenerated, a, p1, o);
    dsg.add(Quad.defaultGraphNodeGenerated, a, p2, o);
    dsg.add(Quad.defaultGraphNodeGenerated, b, p1, o);
    dsg.add(Quad.defaultGraphNodeGenerated, b, p2, o);
    dsg.add(Quad.defaultGraphNodeGenerated, c, p1, o);
    // dsg.add(Quad.defaultGraphNodeGenerated, a, pSelf, a);

    // Currently these optimizations are off by default
    Assert.assertFalse(ARQ.isFalse(ARQ.optFilterImplicitJoin));
    Assert.assertFalse(ARQ.isFalse(ARQ.optImplicitLeftJoin));
  }
  @AfterClass
  public static void teardown() {
    if (implJoin != null) {
      implJoin.close();
      implJoin = null;
    }

    // Currently these optimizations are off by default
    Assert.assertFalse(ARQ.isFalse(ARQ.optFilterImplicitJoin));
    Assert.assertFalse(ARQ.isFalse(ARQ.optImplicitLeftJoin));
  }
示例#3
0
 public static NodeValue eval(Expr expr, Binding binding) {
   Context context = ARQ.getContext().copy();
   context.set(ARQConstants.sysCurrentTime, NodeFactoryExtra.nowAsDateTime());
   FunctionEnv env = new ExecutionContext(context, null, null, null);
   NodeValue r = expr.eval(binding, env);
   return r;
 }
  /**
   * Tests whether a query gives the same results when run both with and without a given optimizer
   *
   * @param queryStr Query
   * @param ds Dataset
   * @param opt Optimizer
   * @param expected Expected number of results
   */
  public static void test(String queryStr, Dataset ds, Symbol opt, int expected) {
    Query q = QueryFactory.create(queryStr);

    if (!q.isSelectType()) Assert.fail("Only SELECT queries are testable with this method");

    Op op = Algebra.compile(q);
    // Track current state
    boolean isEnabled = ARQ.isTrue(opt);
    boolean isDisabled = ARQ.isFalse(opt);

    try {
      // Run first without optimization
      ARQ.set(opt, false);
      ResultSetRewindable rs;
      try (QueryExecution qe = QueryExecutionFactory.create(q, ds)) {
        rs = ResultSetFactory.makeRewindable(qe.execSelect());
        if (expected != rs.size()) {
          System.err.println("Non-optimized results not as expected");
          TextOutput output = new TextOutput((SerializationContext) null);
          output.format(System.out, rs);
          rs.reset();
        }
        Assert.assertEquals(expected, rs.size());
      }

      // Run with optimization
      ARQ.set(opt, true);
      ResultSetRewindable rsOpt;
      try (QueryExecution qeOpt = QueryExecutionFactory.create(q, ds)) {
        rsOpt = ResultSetFactory.makeRewindable(qeOpt.execSelect());
        if (expected != rsOpt.size()) {
          System.err.println("Optimized results not as expected");
          TextOutput output = new TextOutput((SerializationContext) null);
          output.format(System.out, rsOpt);
          rsOpt.reset();
        }
        Assert.assertEquals(expected, rsOpt.size());
      }
      Assert.assertTrue(ResultSetCompare.isomorphic(rs, rsOpt));
    } finally {
      // Restore previous state
      if (isEnabled) {
        ARQ.set(opt, true);
      } else if (isDisabled) {
        ARQ.set(opt, false);
      } else {
        ARQ.unset(opt);
      }
    }
  }
  /**
   * Tests whether an algebra expression gives the same results when run both with and without a
   * given optimizer
   *
   * @param algStr Algebra
   * @param ds Dataset
   * @param opt Optimizer
   * @param expected Expected number of results
   */
  public static void testAsAlgebra(String algStr, Dataset ds, Symbol opt, int expected) {
    Op op = SSE.parseOp(algStr);
    List<String> vars = new ArrayList<>();
    for (Var v : OpVars.visibleVars(op)) {
      vars.add(v.getName());
    }

    // Track current state
    boolean isEnabled = ARQ.isTrue(opt);
    boolean isDisabled = ARQ.isFalse(opt);

    try {
      // Run first without optimization
      ARQ.set(opt, false);
      QueryEngineMain engine =
          new QueryEngineMain(op, ds.asDatasetGraph(), BindingFactory.binding(), ARQ.getContext());
      QueryIterator iter =
          engine.eval(op, ds.asDatasetGraph(), BindingFactory.binding(), ARQ.getContext());
      ResultSetRewindable rs =
          ResultSetFactory.makeRewindable(
              new ResultSetStream(vars, ModelFactory.createDefaultModel(), iter));
      if (expected != rs.size()) {
        System.err.println("Non-optimized results not as expected");
        TextOutput output = new TextOutput((SerializationContext) null);
        output.format(System.out, rs);
        rs.reset();
      }
      Assert.assertEquals(expected, rs.size());
      iter.close();

      // Run with optimization
      ARQ.set(opt, true);
      engine =
          new QueryEngineMain(op, ds.asDatasetGraph(), BindingFactory.binding(), ARQ.getContext());
      QueryIterator iterOpt =
          engine.eval(op, ds.asDatasetGraph(), BindingFactory.binding(), ARQ.getContext());
      ResultSetRewindable rsOpt =
          ResultSetFactory.makeRewindable(
              new ResultSetStream(vars, ModelFactory.createDefaultModel(), iterOpt));
      if (expected != rsOpt.size()) {
        System.err.println("Optimized results not as expected");
        TextOutput output = new TextOutput((SerializationContext) null);
        output.format(System.out, rsOpt);
        rsOpt.reset();
      }
      Assert.assertEquals(expected, rsOpt.size());
      iterOpt.close();

      Assert.assertTrue(ResultSetCompare.isomorphic(rs, rsOpt));
    } finally {
      // Restore previous state
      if (isEnabled) {
        ARQ.set(opt, true);
      } else if (isDisabled) {
        ARQ.set(opt, false);
      } else {
        ARQ.unset(opt);
      }
    }
  }
示例#6
0
/** Misc RIOT code */
public class RiotLib {
  private static final String bNodeLabelStart = "_:";
  private static final boolean skolomizedBNodes = ARQ.isTrue(ARQ.constantBNodeLabels);

  /**
   * Implement <_:....> as a "Node IRI" that is, use the given label as the BNode internal label.
   * Use with care.
   */
  public static Node createIRIorBNode(String iri) {
    // Is it a bNode label? i.e. <_:xyz>
    if (isBNodeIRI(iri)) {
      String s = iri.substring(bNodeLabelStart.length());
      Node n = NodeFactory.createBlankNode(s);
      return n;
    }
    return NodeFactory.createURI(iri);
  }

  /** Test whether */
  public static boolean isBNodeIRI(String iri) {
    return skolomizedBNodes && iri.startsWith(bNodeLabelStart);
  }

  private static ParserProfile profile = profile(RDFLanguages.TURTLE, null, null);

  static {
    PrefixMap pmap = profile.getPrologue().getPrefixMap();
    pmap.add("rdf", ARQConstants.rdfPrefix);
    pmap.add("rdfs", ARQConstants.rdfsPrefix);
    pmap.add("xsd", ARQConstants.xsdPrefix);
    pmap.add("owl", ARQConstants.owlPrefix);
    pmap.add("fn", ARQConstants.fnPrefix);
    pmap.add("op", ARQConstants.fnPrefix);
    pmap.add("ex", "http://example/ns#");
    pmap.add("", "http://example/");
  }

  /** Parse a string to get one Node (the first token in the string) */
  public static Node parse(String string) {
    Tokenizer tokenizer = TokenizerFactory.makeTokenizerString(string);
    if (!tokenizer.hasNext()) return null;
    Token t = tokenizer.next();
    Node n = profile.create(null, t);
    if (tokenizer.hasNext())
      Log.warn(RiotLib.class, "String has more than one token in it: " + string);
    return n;
  }

  public static ParserProfile profile(Lang lang, String baseIRI) {
    return profile(lang, baseIRI, ErrorHandlerFactory.getDefaultErrorHandler());
  }

  public static ParserProfile profile(Lang lang, String baseIRI, ErrorHandler handler) {
    if (sameLang(NTRIPLES, lang) || sameLang(NQUADS, lang)) {
      boolean checking = SysRIOT.strictMode;
      // If strict mode, do checking e.g. URIs
      return profile(baseIRI, false, checking, handler);
    }
    if (sameLang(RDFJSON, lang)) return profile(baseIRI, false, true, handler);
    return profile(baseIRI, true, true, handler);
  }

  /**
   * Create a parser profile for the given setup
   *
   * @param baseIRI Base IRI
   * @param resolveIRIs Whether to resolve IRIs
   * @param checking Whether to check
   * @param handler Error handler
   * @return ParserProfile
   * @see #profile for per-language setup
   */
  public static ParserProfile profile(
      String baseIRI, boolean resolveIRIs, boolean checking, ErrorHandler handler) {
    LabelToNode labelToNode =
        true ? SyntaxLabels.createLabelToNode() : LabelToNode.createUseLabelEncoded();

    Prologue prologue;
    if (resolveIRIs)
      prologue = new Prologue(PrefixMapFactory.createForInput(), IRIResolver.create(baseIRI));
    else prologue = new Prologue(PrefixMapFactory.createForInput(), IRIResolver.createNoResolve());

    if (checking) return new ParserProfileChecker(prologue, handler, labelToNode);
    else return new ParserProfileBase(prologue, handler, labelToNode);
  }

  /** Get triples with the same subject */
  public static Collection<Triple> triplesOfSubject(Graph graph, Node subj) {
    return triples(graph, subj, Node.ANY, Node.ANY);
  }

  /** Get all the triples for the graph.find */
  public static List<Triple> triples(Graph graph, Node s, Node p, Node o) {
    List<Triple> acc = new ArrayList<>();
    accTriples(acc, graph, s, p, o);
    return acc;
  }

  /* Count the triples for the graph.find */
  public static long countTriples(Graph graph, Node s, Node p, Node o) {
    ExtendedIterator<Triple> iter = graph.find(s, p, o);
    try {
      return Iter.count(iter);
    } finally {
      iter.close();
    }
  }

  /* Count the matches to a pattern across the dataset  */
  public static long countTriples(DatasetGraph dsg, Node s, Node p, Node o) {
    Iterator<Quad> iter = dsg.find(Node.ANY, s, p, o);
    return Iter.count(iter);
  }

  /** Collect all the matching triples */
  public static void accTriples(Collection<Triple> acc, Graph graph, Node s, Node p, Node o) {
    ExtendedIterator<Triple> iter = graph.find(s, p, o);
    for (; iter.hasNext(); ) acc.add(iter.next());
    iter.close();
  }

  /** Get exactly one triple or null for none or more than one. */
  public static Triple triple1(Graph graph, Node s, Node p, Node o) {
    ExtendedIterator<Triple> iter = graph.find(s, p, o);
    try {
      if (!iter.hasNext()) return null;
      Triple t = iter.next();
      if (iter.hasNext()) return null;
      return t;
    } finally {
      iter.close();
    }
  }

  /** Get exactly one triple, or null for none or more than one. */
  public static Triple triple1(DatasetGraph dsg, Node s, Node p, Node o) {
    Iterator<Quad> iter = dsg.find(Node.ANY, s, p, o);
    if (!iter.hasNext()) return null;
    Quad q = iter.next();
    if (iter.hasNext()) return null;
    return q.asTriple();
  }

  public static boolean strSafeFor(String str, char ch) {
    return str.indexOf(ch) == -1;
  }

  public static void writeBase(IndentedWriter out, String base) {
    if (base != null) {
      out.print("@base ");
      out.pad(PREFIX_IRI);
      out.print("<");
      out.print(base);
      out.print(">");
      out.print(" .");
      out.println();
    }
  }

  public static void writePrefixes(IndentedWriter out, PrefixMap prefixMap) {
    if (prefixMap != null && !prefixMap.isEmpty()) {
      for (Map.Entry<String, String> e : prefixMap.getMappingCopyStr().entrySet()) {
        out.print("@prefix ");
        out.print(e.getKey());
        out.print(": ");
        out.pad(PREFIX_IRI);
        out.print("<");
        out.print(e.getValue());
        out.print(">");
        out.print(" .");
        out.println();
      }
    }
  }

  /** Returns dataset that wraps a graph */
  public static DatasetGraph dataset(Graph graph) {
    return DatasetGraphFactory.createOneGraph(graph);
  }

  public static PrefixMap prefixMap(DatasetGraph dsg) {
    return PrefixMapFactory.create(dsg.getDefaultGraph().getPrefixMapping());
  }

  public static int calcWidth(PrefixMap prefixMap, String baseURI, Node p) {
    if (!prefixMap.contains(rdfNS) && RDF_type.equals(p)) return 1;

    String x = prefixMap.abbreviate(p.getURI());
    if (x == null) return p.getURI().length() + 2;
    return x.length();
  }

  public static int calcWidth(
      PrefixMap prefixMap, String baseURI, Collection<Node> nodes, int minWidth, int maxWidth) {
    Node prev = null;
    int nodeMaxWidth = minWidth;

    for (Node n : nodes) {
      if (prev != null && prev.equals(n)) continue;
      int len = calcWidth(prefixMap, baseURI, n);
      if (len > maxWidth) continue;
      if (nodeMaxWidth < len) nodeMaxWidth = len;
      prev = n;
    }
    return nodeMaxWidth;
  }

  public static int calcWidthTriples(
      PrefixMap prefixMap, String baseURI, Collection<Triple> triples, int minWidth, int maxWidth) {
    Node prev = null;
    int nodeMaxWidth = minWidth;

    for (Triple triple : triples) {
      Node n = triple.getPredicate();
      if (prev != null && prev.equals(n)) continue;
      int len = calcWidth(prefixMap, baseURI, n);
      if (len > maxWidth) continue;
      if (nodeMaxWidth < len) nodeMaxWidth = len;
      prev = n;
    }
    return nodeMaxWidth;
  }

  /** IndentedWriter over a jaav.io.Writer (better to use an IndentedWriter over an OutputStream) */
  public static IndentedWriter create(Writer writer) {
    return new IndentedWriterWriter(writer);
  }

  public static PrefixMap prefixMap(Graph graph) {
    return PrefixMapFactory.create(graph.getPrefixMapping());
  }

  public static WriterGraphRIOTBase adapter(WriterDatasetRIOT writer) {
    return new WriterAdapter(writer);
  }

  /** Hidden to direct program to using OutputStreams (for RDF, that gets the charset right) */
  private static class IndentedWriterWriter extends IndentedWriter {
    IndentedWriterWriter(Writer w) {
      super(w);
    }
  }

  private static class WriterAdapter extends WriterGraphRIOTBase {
    private WriterDatasetRIOT writer;

    WriterAdapter(WriterDatasetRIOT writer) {
      this.writer = writer;
    }

    @Override
    public Lang getLang() {
      return writer.getLang();
    }

    @Override
    public void write(
        OutputStream out, Graph graph, PrefixMap prefixMap, String baseURI, Context context) {
      writer.write(out, RiotLib.dataset(graph), prefixMap, baseURI, context);
    }

    @Override
    public void write(
        Writer out, Graph graph, PrefixMap prefixMap, String baseURI, Context context) {
      writer.write(out, RiotLib.dataset(graph), prefixMap, baseURI, context);
    }
  }
}