/**
   * A simple test with nothing bound and a single <em>empty</em> source solution. This can be
   * handled by either {@link IRemoteSparqlQueryBuilder} .
   */
  public void test_service_001b() throws Exception {

    final BigdataURI serviceURI = valueFactory.createURI("http://www.bigdata.com/myService");

    final GraphPatternGroup<IGroupMemberNode> groupNode = new JoinGroupNode();
    {
      groupNode.addChild(
          new StatementPatternNode(new VarNode("s"), new VarNode("p"), new VarNode("o")));
    }

    final String exprImage = "SERVICE <" + serviceURI + "> { ?s ?p ?o }";

    final Map<String, String> prefixDecls = new LinkedHashMap<String, String>();
    {
      prefixDecls.put("foo", "http://www.bigdata.com/foo");
    }

    final ServiceNode serviceNode =
        new ServiceNode(new ConstantNode(makeIV(serviceURI)), groupNode);
    {
      final Set<IVariable<?>> projectedVars = new LinkedHashSet<IVariable<?>>();
      {
        projectedVars.add(Var.var("s"));
        projectedVars.add(Var.var("p"));
        projectedVars.add(Var.var("o"));
      }

      serviceNode.setExprImage(exprImage);
      serviceNode.setPrefixDecls(prefixDecls);
      serviceNode.setProjectedVars(projectedVars);
    }

    final List<BindingSet> bindingSets = new LinkedList<BindingSet>();
    {
      bindingSets.add(new MapBindingSet());
    }

    final BindingSet[] a = bindingSets.toArray(new BindingSet[bindingSets.size()]);

    final RemoteServiceOptions options = new RemoteServiceOptions();

    options.setSPARQLVersion(SPARQLVersion.SPARQL_10);

    assertEquals(
        RemoteSparql10QueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());

    options.setSPARQLVersion(SPARQLVersion.SPARQL_11);

    assertEquals(
        RemoteSparql11QueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());

    options.setSPARQLVersion(SPARQLVersion.SPARQL_11_DRAFT_BINDINGS);

    assertEquals(
        RemoteSparql11DraftQueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());
  }
  public void test_service_009() throws Exception {

    final BigdataURI serviceURI = valueFactory.createURI("http://www.bigdata.com/myService");

    final String exprImage = "SERVICE <" + serviceURI + "> { ?s ?p ?o }";

    final Map<String, String> prefixDecls = new LinkedHashMap<String, String>();

    final GraphPatternGroup<IGroupMemberNode> groupNode = new JoinGroupNode();
    {
      groupNode.addChild(
          new StatementPatternNode(new VarNode("s"), new VarNode("p"), new VarNode("o")));
    }

    final ServiceNode serviceNode =
        new ServiceNode(new ConstantNode(makeIV(serviceURI)), groupNode);

    {
      final Set<IVariable<?>> projectedVars = new LinkedHashSet<IVariable<?>>();
      {
        projectedVars.add(Var.var("s"));
      }

      serviceNode.setExprImage(exprImage);
      serviceNode.setPrefixDecls(prefixDecls);
      serviceNode.setProjectedVars(projectedVars);
    }

    final List<BindingSet> bindingSets = new LinkedList<BindingSet>();

    {
      final MapBindingSet bset = new MapBindingSet();
      final BigdataURI tmp1 = valueFactory.createURI("p:p1");
      final BigdataLiteral tmp2 = valueFactory.createLiteral("lit1");
      bset.addBinding("p", tmp1);
      bset.addBinding("o", tmp2);
      bindingSets.add(bset);
    }

    final BindingSet[] a = bindingSets.toArray(new BindingSet[bindingSets.size()]);

    final RemoteServiceOptions options = new RemoteServiceOptions();

    options.setSPARQLVersion(SPARQLVersion.SPARQL_10);

    IRemoteSparqlQueryBuilder queryBuilder =
        RemoteSparqlBuilderFactory.get(options, serviceNode, a);

    final String actualQueryStrVersion_10 = queryBuilder.getSparqlQuery(a).replaceAll("\\s+", " ");

    final String expectedSparqlVersion_10 =
        new String(
                "SELECT  ?s "
                    + "WHERE { "
                    + //
                    "FILTER ( sameTerm( ?p, <p:p1>) ). "
                    + //
                    "FILTER ( sameTerm( ?o, \"lit1\") ). "
                    + //
                    " ?s ?p ?o "
                    + //
                    "} ")
            .replaceAll("\\s+", " ");

    assertEquals(expectedSparqlVersion_10, actualQueryStrVersion_10);

    options.setSPARQLVersion(SPARQLVersion.SPARQL_11);

    queryBuilder = RemoteSparqlBuilderFactory.get(options, serviceNode, a);

    final String actualQueryStrVersion_11 = queryBuilder.getSparqlQuery(a).replaceAll("\\s+", " ");

    final String expectedSparqlVersion_11 =
        new String(
                "SELECT  ?s "
                    + //
                    "WHERE {"
                    + //
                    " ?s ?p ?o "
                    + //
                    "} "
                    + //
                    "VALUES ( ?p ?o) { "
                    + //
                    "( <p:p1> \"lit1\" ) "
                    + //
                    "} ")
            .replaceAll("\\s+", " ");

    assertEquals(expectedSparqlVersion_11, actualQueryStrVersion_11);

    options.setSPARQLVersion(SPARQLVersion.SPARQL_11_DRAFT_BINDINGS);

    queryBuilder = RemoteSparqlBuilderFactory.get(options, serviceNode, a);

    final String actualQueryStrVersion_11_DRAFT_BINDINGS =
        queryBuilder.getSparqlQuery(a).replaceAll("\\s+", " ");

    final String expectedSparqlVersion_11_DRAFT_BINDINGS =
        new String(
                "SELECT  ?s "
                    + //
                    "WHERE {"
                    + //
                    " ?s ?p ?o "
                    + //
                    "} "
                    + //
                    "BINDINGS ?p ?o { "
                    + //
                    "( <p:p1> \"lit1\" ) "
                    + //
                    "} ")
            .replaceAll("\\s+", " ");

    assertEquals(actualQueryStrVersion_11_DRAFT_BINDINGS, expectedSparqlVersion_11_DRAFT_BINDINGS);
  }
  /**
   * A variant test in there is a blank node in the BINDINGS to be flowed through to the remote
   * SERVICE. In this test the blank nodes are correlated so we MUST impose a constraint on the
   * remote service to enforce that correlation. However, there is another solution in which the two
   * variables are NOT correlated so that FILTER MUST NOT be imposed across all such solutions.
   * Therefore the SERVICE class will be vectored by rewriting it into a UNION with different
   * variable names in each variant of the UNION.
   *
   * <p>This case is only handled by the {@link RemoteSparql10QueryBuilder}.
   */
  public void test_service_008() throws Exception {

    final BigdataURI serviceURI = valueFactory.createURI("http://www.bigdata.com/myService");

    final GraphPatternGroup<IGroupMemberNode> groupNode = new JoinGroupNode();
    {
      groupNode.addChild(
          new StatementPatternNode(new VarNode("s"), new VarNode("p"), new VarNode("o")));
    }

    final String exprImage = "SERVICE <" + serviceURI + "> { ?s ?p ?o }";

    final Map<String, String> prefixDecls = new LinkedHashMap<String, String>();

    final ServiceNode serviceNode =
        new ServiceNode(new ConstantNode(makeIV(serviceURI)), groupNode);
    {
      final Set<IVariable<?>> projectedVars = new LinkedHashSet<IVariable<?>>();
      {
        projectedVars.add(Var.var("s"));
        projectedVars.add(Var.var("p"));
        projectedVars.add(Var.var("o"));
      }

      serviceNode.setExprImage(exprImage);
      serviceNode.setPrefixDecls(prefixDecls);
      serviceNode.setProjectedVars(projectedVars);
    }

    final List<BindingSet> bindingSets = new LinkedList<BindingSet>();
    /*
     * Note: Blank nodes are not permitting in the BINDINGS clause (per the
     * SPARQL 1.1 grammar). However, a blank node MAY be turned into an
     * unbound variable as long as we impose the constraint that all vars
     * having that blank node for a solution are EQ (same term).
     *
     * Note: For this query, the *same* blank node is used for ?s and ?book.
     * That needs to be turned into a FILTER which is attached to the remote
     * SPARQL query in order to maintain the correlation between those
     * variables (FILTER ?s = ?book).
     */
    { // Note: Blank nodes ARE correlated for this solution.
      final MapBindingSet bset = new MapBindingSet();
      final BNode tmp = new BNodeImpl("abc");
      bset.addBinding("s", tmp);
      bset.addBinding("o", tmp);
      bindingSets.add(bset);
    }
    { // Note: Blank nodes are NOT correlated for this solution.
      final MapBindingSet bset = new MapBindingSet();
      final BNode tmp1 = new BNodeImpl("foo");
      final BNode tmp2 = new BNodeImpl("bar");
      bset.addBinding("s", tmp1);
      bset.addBinding("o", tmp2);
      bindingSets.add(bset);
    }

    final BindingSet[] a = bindingSets.toArray(new BindingSet[bindingSets.size()]);

    final RemoteServiceOptions options = new RemoteServiceOptions();

    options.setSPARQLVersion(SPARQLVersion.SPARQL_10);

    assertEquals(
        RemoteSparql10QueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());

    options.setSPARQLVersion(SPARQLVersion.SPARQL_11);

    /*
     * Still uses the SPARQL 1.0 query builder since it will not rely on the
     * BINDINGS clause.
     */
    assertEquals(
        RemoteSparql10QueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());

    options.setSPARQLVersion(SPARQLVersion.SPARQL_11_DRAFT_BINDINGS);

    /*
     * Still uses the SPARQL 1.0 query builder since it will not rely on the
     * BINDINGS clause.
     */
    assertEquals(
        RemoteSparql10QueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());
  }
  /**
   * A variant test in there is a blank node in the BINDINGS to be flowed through to the remote
   * SERVICE. In this test the blank nodes are not correlated so we do not need to impose a FILTER
   * on the remote service. This can be handled by either {@link IRemoteSparqlQueryBuilder}.
   */
  public void test_service_004() throws Exception {

    final BigdataURI serviceURI = valueFactory.createURI("http://www.bigdata.com/myService");

    final GraphPatternGroup<IGroupMemberNode> groupNode = new JoinGroupNode();
    {
      groupNode.addChild(
          new StatementPatternNode(new VarNode("s"), new VarNode("p"), new VarNode("o")));
    }

    final String exprImage = "SERVICE <" + serviceURI + "> { ?s ?p ?o }";

    final Map<String, String> prefixDecls = new LinkedHashMap<String, String>();

    final ServiceNode serviceNode =
        new ServiceNode(new ConstantNode(makeIV(serviceURI)), groupNode);
    {
      final Set<IVariable<?>> projectedVars = new LinkedHashSet<IVariable<?>>();
      {
        projectedVars.add(Var.var("s"));
        projectedVars.add(Var.var("p"));
        projectedVars.add(Var.var("o"));
      }

      serviceNode.setExprImage(exprImage);
      serviceNode.setPrefixDecls(prefixDecls);
      serviceNode.setProjectedVars(projectedVars);
    }

    final List<BindingSet> bindingSets = new LinkedList<BindingSet>();
    /*
     * Note: Blank nodes are not permitting in the BINDINGS clause (per the
     * SPARQL 1.1 grammar). However, a blank node MAY be turned into an
     * unbound variable as long as we impose the constraint that all vars
     * having that blank node for a solution are EQ (same term).
     */
    {
      final MapBindingSet bset = new MapBindingSet();
      bset.addBinding("s", new BNodeImpl("abc"));
      bindingSets.add(bset);
    }

    final BindingSet[] a = bindingSets.toArray(new BindingSet[bindingSets.size()]);

    final RemoteServiceOptions options = new RemoteServiceOptions();

    options.setSPARQLVersion(SPARQLVersion.SPARQL_10);

    assertEquals(
        RemoteSparql10QueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());

    options.setSPARQLVersion(SPARQLVersion.SPARQL_11);

    assertEquals(
        RemoteSparql11QueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());

    options.setSPARQLVersion(SPARQLVersion.SPARQL_11_DRAFT_BINDINGS);

    assertEquals(
        RemoteSparql11DraftQueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());
  }
  /**
   * A variant test in which there are some BINDINGS to be passed through. The set of bindings
   * covers the different types of RDF {@link Value} and also exercises the prefix declarations.
   * This test does NOT use blank nodes in the BINDINGS. This can be handled by either {@link
   * IRemoteSparqlQueryBuilder}.
   */
  public void test_service_003() throws Exception {

    /*
     * Resolve IVs that we will use below.
     */
    final BigdataURI dcCreator = valueFactory.asValue(DC.CREATOR);
    final BigdataURI book1 = valueFactory.createURI("http://example.org/book/book1");
    final BigdataURI book2 = valueFactory.createURI("http://example.org/book/book2");
    final BigdataLiteral book3 = valueFactory.createLiteral("Semantic Web Primer");
    final BigdataLiteral book4 = valueFactory.createLiteral("Semantic Web Primer", "DE");
    final BigdataLiteral book5 = valueFactory.createLiteral("12", XSD.INT);
    final BigdataLiteral book6 = valueFactory.createLiteral("true", XSD.BOOLEAN);

    addResolveIVs(dcCreator, book1, book2, book3, book4, book5, book6);

    final BigdataURI serviceURI = valueFactory.createURI("http://www.bigdata.com/myService");

    final GraphPatternGroup<IGroupMemberNode> groupNode = new JoinGroupNode();
    {
      groupNode.addChild(
          new StatementPatternNode(new VarNode("s"), new VarNode("p"), new VarNode("book")));
    }

    final String exprImage = "SERVICE <" + serviceURI + "> { ?book ?p ?o}";

    final Map<String, String> prefixDecls = new LinkedHashMap<String, String>();
    {
      prefixDecls.put("", "http://example.org/book/");
    }

    final ServiceNode serviceNode =
        new ServiceNode(new ConstantNode(makeIV(serviceURI)), groupNode);
    {
      final Set<IVariable<?>> projectedVars = new LinkedHashSet<IVariable<?>>();
      {
        projectedVars.add(Var.var("book"));
        projectedVars.add(Var.var("p"));
        projectedVars.add(Var.var("o"));
      }

      serviceNode.setExprImage(exprImage);
      serviceNode.setPrefixDecls(prefixDecls);
      serviceNode.setProjectedVars(projectedVars);
    }

    final List<BindingSet> bindingSets = new LinkedList<BindingSet>();
    {
      final MapBindingSet bset = new MapBindingSet();
      bset.addBinding("book", book1);
      bindingSets.add(bset);
    }
    {
      final MapBindingSet bset = new MapBindingSet();
      bset.addBinding("book", book2);
      bindingSets.add(bset);
    }
    {
      final MapBindingSet bset = new MapBindingSet();
      bset.addBinding("book", book3);
      bindingSets.add(bset);
    }
    {
      final MapBindingSet bset = new MapBindingSet();
      bset.addBinding("book", book4);
      bindingSets.add(bset);
    }
    {
      final MapBindingSet bset = new MapBindingSet();
      bset.addBinding("book", book5);
      bindingSets.add(bset);
    }
    {
      final MapBindingSet bset = new MapBindingSet();
      bset.addBinding("book", book6);
      bindingSets.add(bset);
    }

    final BindingSet[] a = bindingSets.toArray(new BindingSet[bindingSets.size()]);

    final RemoteServiceOptions options = new RemoteServiceOptions();

    options.setSPARQLVersion(SPARQLVersion.SPARQL_10);

    assertEquals(
        RemoteSparql10QueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());

    options.setSPARQLVersion(SPARQLVersion.SPARQL_11);

    assertEquals(
        RemoteSparql11QueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());

    options.setSPARQLVersion(SPARQLVersion.SPARQL_11_DRAFT_BINDINGS);

    assertEquals(
        RemoteSparql11DraftQueryBuilder.class,
        RemoteSparqlBuilderFactory.get(options, serviceNode, a).getClass());
  }
  /** Apply all optimizations. */
  private void liftSubqueries(
      final AST2BOpContext context,
      final StaticAnalysis sa,
      final GraphPatternGroup<IGroupMemberNode> group) {

    final int arity = group.arity();

    for (int i = 0; i < arity; i++) {

      final BOp child = (BOp) group.get(i);

      if (child instanceof GraphPatternGroup<?>) {

        /*
         * Note: Do recursion *before* we do the rewrite so we will
         * rewrite Sub-Sub-Selects.
         *
         * FIXME Unit test for sub-sub-select optimization.
         */
        liftSubqueries(context, sa, ((GraphPatternGroup<IGroupMemberNode>) child));

      } else if (child instanceof SubqueryRoot) {

        // Recursion into subqueries.

        final SubqueryRoot subqueryRoot = (SubqueryRoot) child;

        liftSubqueries(context, sa, subqueryRoot.getWhereClause());

      } else if (child instanceof ServiceNode) {

        // Do not rewrite things inside of a SERVICE node.
        continue;
      }

      if (!(child instanceof SubqueryRoot)) {
        continue;
      }

      final SubqueryRoot subqueryRoot = (SubqueryRoot) child;

      if (subqueryRoot.getQueryType() == QueryType.ASK) {

        /*
         * FIXME Look at what would be involved in lifting an ASK
         * sub-query. There are going to be at least two cases. If there
         * is no join variable, then we always want to lift the ASK
         * sub-query as it is completely independent of the parent
         * group. If there is a join variable, then we need to project
         * solutions which include the join variables from the subquery
         * and the "ASK". At that point we can hash join against the
         * projected solutions and the ASK succeeds if the hash join
         * succeeds. [Add unit tests for this too.]
         */

        continue;
      }

      if (subqueryRoot.hasSlice()) {

        /*
         * Lift out SPARQL 1.1 subqueries which use LIMIT and/or OFFSET.
         *
         * The SliceOp in the subquery will cause the IRunningQuery in
         * which it appears to be interrupted. Therefore, when a SLICE
         * is required for a subquery we need to lift it out to run it
         * as a named subquery.
         *
         * TODO There may well be other cases that we have to handle
         * with as-bound evaluation of a Subquery with a LIMIT/OFFSET.
         * If so, then the subquery will have to be run using the
         * SubqueryOp.
         */

        liftSparql11Subquery(context, sa, subqueryRoot);

        continue;
      }
      if (subqueryRoot.hasSlice() && subqueryRoot.getOrderBy() != null) {

        /*
         * Lift out SPARQL 1.1 subqueries which use both LIMIT and ORDER
         * BY. Due to the interaction of the LIMIT and ORDER BY clause,
         * these subqueries MUST be run first since they can produce
         * different results if they are run "as-bound".
         */

        liftSparql11Subquery(context, sa, subqueryRoot);

        continue;
      }

      if (StaticAnalysis.isAggregate(subqueryRoot)) {

        /*
         * Lift out SPARQL 1.1 subqueries which use {@link IAggregate}s.
         * This typically provides more efficient evaluation than
         * repeated as-bound evaluation of the sub-select. It also
         * prevents inappropriate sharing of the internal state of the
         * {@link IAggregate} functions.
         */

        liftSparql11Subquery(context, sa, subqueryRoot);

        continue;
      }

      if (subqueryRoot.isRunOnce()) {

        /*
         * Lift out SPARQL 1.1 subqueries for which the RUN_ONCE
         * annotation was specified.
         */

        liftSparql11Subquery(context, sa, subqueryRoot);

        continue;
      }

      /*
       * FIXME We can not correctly predict the join variables at this
       * time because that depends on the actual evaluation order. This
       * has been commented out for now because it will otherwise cause
       * all sub-selects to be lifted out.
       */
      if (false) {
        final Set<IVariable<?>> joinVars =
            sa.getJoinVars(subqueryRoot, new LinkedHashSet<IVariable<?>>());

        if (joinVars.isEmpty()) {

          /*
           * Lift out SPARQL 1.1 subqueries for which the RUN_ONCE
           * annotation was specified.
           */

          liftSparql11Subquery(context, sa, subqueryRoot);

          continue;
        }
      }
    }
  }