private void addResult(RexNode exp) {
      // Cast of literal can't be reduced, so skip those (otherwise we'd
      // go into an infinite loop as we add them back).
      if (exp.getKind() == RexKind.Cast) {
        RexCall cast = (RexCall) exp;
        RexNode operand = cast.getOperands()[0];
        if (operand instanceof RexLiteral) {
          return;
        }
      }
      constExprs.add(exp);

      // In the case where the expression corresponds to a UDR argument,
      // we need to preserve casts.  Note that this only applies to
      // the topmost argument, not expressions nested within the UDR
      // call.
      //
      // REVIEW zfong 6/13/08 - Are there other expressions where we
      // also need to preserve casts?
      if (parentCallTypeStack.isEmpty()) {
        addCasts.add(false);
      } else {
        addCasts.add(
            parentCallTypeStack.get(parentCallTypeStack.size() - 1)
                instanceof FarragoUserDefinedRoutine);
      }
    }
Exemplo n.º 2
0
  /**
   * Applies a mapping to a collation list.
   *
   * @param mapping Mapping
   * @param collationList Collation list
   * @return collation list with mapping applied to each field
   */
  public static List<RelCollation> apply(
      Mappings.TargetMapping mapping, List<RelCollation> collationList) {
    final List<RelCollation> newCollationList = new ArrayList<RelCollation>();
    for (RelCollation collation : collationList) {
      final List<RelFieldCollation> newFieldCollationList = new ArrayList<RelFieldCollation>();
      for (RelFieldCollation fieldCollation : collation.getFieldCollations()) {
        final RelFieldCollation newFieldCollation = apply(mapping, fieldCollation);
        if (newFieldCollation == null) {
          // This field is not mapped. Stop here. The leading edge
          // of the collation is still valid (although it's useless
          // if it's empty).
          break;
        }
        newFieldCollationList.add(newFieldCollation);
      }
      // Truncation to collations to their leading edge creates empty
      // and duplicate collations. Ignore these.
      if (!newFieldCollationList.isEmpty()) {
        final RelCollationImpl newCollation = new RelCollationImpl(newFieldCollationList);
        if (!newCollationList.contains(newCollation)) {
          newCollationList.add(newCollation);
        }
      }
    }

    // REVIEW: There might be redundant collations in the list. For example,
    // in {(x), (x, y)}, (x) is redundant because it is a leading edge of
    // another collation in the list. Could remove redundant collations.

    return newCollationList;
  }
  /**
   * Determines if a filter condition is a simple one and returns the parameters corresponding to
   * the simple filters.
   *
   * @param calcRel original CalcRel
   * @param filterExprs filter expression being analyzed
   * @param filterList returns the list of filter ordinals in the simple expression
   * @param literals returns the list of literals to be used in the simple comparisons
   * @param op returns the operator to be used in the simple comparison
   * @return true if the filter condition is simple
   */
  private boolean isConditionSimple(
      CalcRel calcRel,
      RexNode filterExprs,
      List<Integer> filterList,
      List<RexLiteral> literals,
      List<CompOperatorEnum> op) {
    SargFactory sargFactory = new SargFactory(calcRel.getCluster().getRexBuilder());
    SargRexAnalyzer rexAnalyzer = sargFactory.newRexAnalyzer(true);
    List<SargBinding> sargBindingList = rexAnalyzer.analyzeAll(filterExprs);

    // Currently, it's all or nothing.  So, if there are filters rejected
    // by the analyzer, we can't process a subset using the reshape
    // exec stream
    if (rexAnalyzer.getNonSargFilterRexNode() != null) {
      return false;
    }

    List<RexInputRef> filterCols = new ArrayList<RexInputRef>();
    List<RexNode> filterOperands = new ArrayList<RexNode>();
    if (FennelRelUtil.extractSimplePredicates(sargBindingList, filterCols, filterOperands, op)) {
      for (RexInputRef filterCol : filterCols) {
        filterList.add(filterCol.getIndex());
      }
      for (RexNode operand : filterOperands) {
        literals.add((RexLiteral) operand);
      }
      return true;
    } else {
      return false;
    }
  }
Exemplo n.º 4
0
 public List<SqlMoniker> getAllSchemaObjectNames(List<String> names) {
   List<SqlMoniker> result;
   switch (names.size()) {
     case 0:
       // looking for schema names
       result = new ArrayList<SqlMoniker>();
       for (MockSchema schema : schemas.values()) {
         result.add(new SqlMonikerImpl(schema.name, SqlMonikerType.Schema));
       }
       return result;
     case 1:
       // looking for table names in the given schema
       MockSchema schema = schemas.get(names.get(0));
       if (schema == null) {
         return Collections.emptyList();
       }
       result = new ArrayList<SqlMoniker>();
       for (String tableName : schema.tableNames) {
         result.add(new SqlMonikerImpl(tableName, SqlMonikerType.Table));
       }
       return result;
     default:
       return Collections.emptyList();
   }
 }
        public void onMatch(RelOptRuleCall call) {
          CalcRel calc = (CalcRel) call.getRels()[0];
          RexProgram program = calc.getProgram();
          final List<RexNode> exprList = program.getExprList();

          // Form a list of expressions with sub-expressions fully
          // expanded.
          final List<RexNode> expandedExprList = new ArrayList<RexNode>(exprList.size());
          final RexShuttle shuttle =
              new RexShuttle() {
                public RexNode visitLocalRef(RexLocalRef localRef) {
                  return expandedExprList.get(localRef.getIndex());
                }
              };
          for (RexNode expr : exprList) {
            expandedExprList.add(expr.accept(shuttle));
          }
          if (reduceExpressions(calc, expandedExprList)) {
            final RexProgramBuilder builder =
                new RexProgramBuilder(
                    calc.getChild().getRowType(), calc.getCluster().getRexBuilder());
            List<RexLocalRef> list = new ArrayList<RexLocalRef>();
            for (RexNode expr : expandedExprList) {
              list.add(builder.registerInput(expr));
            }
            if (program.getCondition() != null) {
              final int conditionIndex = program.getCondition().getIndex();
              final RexNode newConditionExp = expandedExprList.get(conditionIndex);
              if (newConditionExp.isAlwaysTrue()) {
                // condition is always TRUE - drop it
              } else if ((newConditionExp instanceof RexLiteral)
                  || RexUtil.isNullLiteral(newConditionExp, true)) {
                // condition is always NULL or FALSE - replace calc
                // with empty
                call.transformTo(new EmptyRel(calc.getCluster(), calc.getRowType()));
                return;
              } else {
                builder.addCondition(list.get(conditionIndex));
              }
            }
            int k = 0;
            for (RexLocalRef projectExpr : program.getProjectList()) {
              final int index = projectExpr.getIndex();
              builder.addProject(
                  list.get(index).getIndex(),
                  program.getOutputRowType().getFieldList().get(k++).getName());
            }
            call.transformTo(
                new CalcRel(
                    calc.getCluster(),
                    calc.getTraits(),
                    calc.getChild(),
                    calc.getRowType(),
                    builder.getProgram(),
                    calc.getCollationList()));

            // New plan is absolutely better than old plan.
            call.getPlanner().setImportance(calc, 0.0);
          }
        }
    private void analyzeCall(RexCall call, Constancy callConstancy) {
      parentCallTypeStack.add(call.getOperator());

      // visit operands, pushing their states onto stack
      super.visitCall(call);

      // look for NON_CONSTANT operands
      int nOperands = call.getOperands().length;
      List<Constancy> operandStack = stack.subList(stack.size() - nOperands, stack.size());
      for (Constancy operandConstancy : operandStack) {
        if (operandConstancy == Constancy.NON_CONSTANT) {
          callConstancy = Constancy.NON_CONSTANT;
        }
      }

      // Even if all operands are constant, the call itself may
      // be non-deterministic.
      if (!call.getOperator().isDeterministic()) {
        callConstancy = Constancy.NON_CONSTANT;
      } else if (call.getOperator().isDynamicFunction()) {
        // We can reduce the call to a constant, but we can't
        // cache the plan if the function is dynamic
        preparingStmt.disableStatementCaching();
      }

      // Row operator itself can't be reduced to a literal, but if
      // the operands are constants, we still want to reduce those
      if ((callConstancy == Constancy.REDUCIBLE_CONSTANT)
          && (call.getOperator() instanceof SqlRowOperator)) {
        callConstancy = Constancy.NON_CONSTANT;
      }

      if (callConstancy == Constancy.NON_CONSTANT) {
        // any REDUCIBLE_CONSTANT children are now known to be maximal
        // reducible subtrees, so they can be added to the result
        // list
        for (int iOperand = 0; iOperand < nOperands; ++iOperand) {
          Constancy constancy = operandStack.get(iOperand);
          if (constancy == Constancy.REDUCIBLE_CONSTANT) {
            addResult(call.getOperands()[iOperand]);
          }
        }

        // if this cast expression can't be reduced to a literal,
        // then see if we can remove the cast
        if (call.getOperator() == SqlStdOperatorTable.castFunc) {
          reduceCasts(call);
        }
      }

      // pop operands off of the stack
      operandStack.clear();

      // pop this parent call operator off the stack
      parentCallTypeStack.remove(parentCallTypeStack.size() - 1);

      // push constancy result for this call onto stack
      stack.add(callConstancy);
    }
  /**
   * Creates projection list for scan. If the projection contains expressions, then the input
   * references from those expressions are extracted and that list of references becomes the
   * projection list.
   *
   * @param origScan row scan underneath the project
   * @param projRel ProjectRel that we will be creating the projection for
   * @param projectedColumns returns a list of the projected column ordinals, if it is possible to
   *     project
   * @param preserveExprCondition condition that identifies special expressions that should be
   *     preserved in the projection
   * @param defaultExpr expression to be used in the projection if no fields or special columns are
   *     selected
   * @param newProjList returns a new projection RelNode corresponding to a projection that now
   *     references a rowscan that is projecting the input references that were extracted from the
   *     original projection expressions; if the original expression didn't contain expressions,
   *     then this list is returned empty
   * @return true if columns in projection list from the scan need to be renamed
   */
  public boolean createProjectionList(
      FennelRel origScan,
      ProjectRel projRel,
      List<Integer> projectedColumns,
      PushProjector.ExprCondition preserveExprCondition,
      RexNode defaultExpr,
      List<ProjectRel> newProjList) {
    // REVIEW:  what about AnonFields?
    int n = projRel.getChildExps().length;
    RelDataType rowType = origScan.getRowType();
    RelDataType projType = projRel.getRowType();
    RelDataTypeField[] projFields = projType.getFields();
    List<Integer> tempProjList = new ArrayList<Integer>();
    boolean needRename = false;
    for (int i = 0; i < n; ++i) {
      RexNode exp = projRel.getChildExps()[i];
      List<String> origFieldName = new ArrayList<String>();
      Integer projIndex = mapProjCol(exp, origFieldName, rowType);
      if (projIndex == null) {
        // there are expressions in the projection; we need to extract
        // all input references and any special expressions from the
        // projection
        PushProjector pushProject =
            new PushProjector(projRel, null, origScan, preserveExprCondition);
        ProjectRel newProject = pushProject.convertProject(defaultExpr);
        if (newProject == null) {
          // can't do any further projection
          return false;
        }
        newProjList.add(newProject);

        // using the input references we just extracted, it should now
        // be possible to create a projection for the row scan
        needRename =
            createProjectionList(
                origScan,
                (ProjectRel) newProject.getChild(),
                projectedColumns,
                preserveExprCondition,
                defaultExpr,
                newProjList);
        assert (projectedColumns.size() > 0);
        return needRename;
      }
      String projFieldName = projFields[i].getName();
      if (!projFieldName.equals(origFieldName.get(0))) {
        needRename = true;
      }
      tempProjList.add(projIndex);
    }

    // now that we've determined it is possible to project, add the
    // ordinals to the return list
    projectedColumns.addAll(tempProjList);
    return needRename;
  }
 /** Splits a condition into conjunctions that do or do not intersect with a given bit set. */
 static void split(
     RexNode condition, BitSet bitSet, List<RexNode> intersecting, List<RexNode> nonIntersecting) {
   for (RexNode node : RelOptUtil.conjunctions(condition)) {
     BitSet inputBitSet = RelOptUtil.InputFinder.bits(node);
     if (bitSet.intersects(inputBitSet)) {
       intersecting.add(node);
     } else {
       nonIntersecting.add(node);
     }
   }
 }
Exemplo n.º 9
0
  /**
   * Combines the inputs into a JoinRel into an array of inputs.
   *
   * @param join original join
   * @param left left input into join
   * @param right right input into join
   * @param projFieldsList returns a list of the new combined projection fields
   * @param joinFieldRefCountsList returns a list of the new combined join field reference counts
   * @return combined left and right inputs in an array
   */
  private RelNode[] combineInputs(
      JoinRel join,
      RelNode left,
      RelNode right,
      List<BitSet> projFieldsList,
      List<int[]> joinFieldRefCountsList) {
    // leave the null generating sides of an outer join intact; don't
    // pull up those children inputs into the array we're constructing
    int nInputs;
    int nInputsOnLeft;
    MultiJoinRel leftMultiJoin = null;
    JoinRelType joinType = join.getJoinType();
    boolean combineLeft = canCombine(left, joinType.generatesNullsOnLeft());
    if (combineLeft) {
      leftMultiJoin = (MultiJoinRel) left;
      nInputs = left.getInputs().length;
      nInputsOnLeft = nInputs;
    } else {
      nInputs = 1;
      nInputsOnLeft = 1;
    }
    MultiJoinRel rightMultiJoin = null;
    boolean combineRight = canCombine(right, joinType.generatesNullsOnRight());
    if (combineRight) {
      rightMultiJoin = (MultiJoinRel) right;
      nInputs += right.getInputs().length;
    } else {
      nInputs += 1;
    }

    RelNode[] newInputs = new RelNode[nInputs];
    int i = 0;
    if (combineLeft) {
      for (; i < left.getInputs().length; i++) {
        newInputs[i] = leftMultiJoin.getInput(i);
        projFieldsList.add(((MultiJoinRel) left).getProjFields()[i]);
        joinFieldRefCountsList.add(((MultiJoinRel) left).getJoinFieldRefCountsMap().get(i));
      }
    } else {
      newInputs[0] = left;
      i = 1;
      projFieldsList.add(null);
      joinFieldRefCountsList.add(new int[left.getRowType().getFieldCount()]);
    }
    if (combineRight) {
      for (; i < nInputs; i++) {
        newInputs[i] = rightMultiJoin.getInput(i - nInputsOnLeft);
        projFieldsList.add(((MultiJoinRel) right).getProjFields()[i - nInputsOnLeft]);
        joinFieldRefCountsList.add(
            ((MultiJoinRel) right).getJoinFieldRefCountsMap().get(i - nInputsOnLeft));
      }
    } else {
      newInputs[i] = right;
      projFieldsList.add(null);
      joinFieldRefCountsList.add(new int[right.getRowType().getFieldCount()]);
    }

    return newInputs;
  }
Exemplo n.º 10
0
 private List<SqlNode> toSql(RexProgram program, List<RexNode> operandList) {
   final List<SqlNode> list = new ArrayList<SqlNode>();
   for (RexNode rex : operandList) {
     list.add(toSql(program, rex));
   }
   return list;
 }
Exemplo n.º 11
0
  /** Variant of {@link #trimFields(RelNode, BitSet, Set)} for {@link TableFunctionRel}. */
  public TrimResult trimFields(
      TableFunctionRel tabFun, BitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
    final RelDataType rowType = tabFun.getRowType();
    final int fieldCount = rowType.getFieldCount();
    List<RelNode> newInputs = new ArrayList<RelNode>();

    for (RelNode input : tabFun.getInputs()) {
      final int inputFieldCount = input.getRowType().getFieldCount();
      BitSet inputFieldsUsed = Util.bitSetBetween(0, inputFieldCount);

      // Create input with trimmed columns.
      final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
      TrimResult trimResult = trimChildRestore(tabFun, input, inputFieldsUsed, inputExtraFields);
      assert trimResult.right.isIdentity();
      newInputs.add(trimResult.left);
    }

    TableFunctionRel newTabFun = tabFun;
    if (!tabFun.getInputs().equals(newInputs)) {
      newTabFun = tabFun.copy(tabFun.getTraitSet(), newInputs);
    }
    assert newTabFun.getClass() == tabFun.getClass();

    // Always project all fields.
    Mapping mapping = Mappings.createIdentity(fieldCount);
    return new TrimResult(newTabFun, mapping);
  }
Exemplo n.º 12
0
 /**
  * Applies a mapping to a list of field collations.
  *
  * @param mapping Mapping
  * @param fieldCollations Field collations
  * @return collations with mapping applied
  */
 public static List<RelFieldCollation> apply(
     Mapping mapping, List<RelFieldCollation> fieldCollations) {
   final List<RelFieldCollation> newFieldCollations =
       new ArrayList<RelFieldCollation>(fieldCollations.size());
   for (RelFieldCollation fieldCollation : fieldCollations) {
     newFieldCollations.add(apply(mapping, fieldCollation));
   }
   return newFieldCollations;
 }
  /**
   * Determines if a projection is simple.
   *
   * @param calcRel CalcRel containing the projection
   * @param projOrdinals if the projection is simple, returns the ordinals of the projection inputs
   * @return rowtype corresponding to the projection, provided it is simple; otherwise null is
   *     returned
   */
  private RelDataType isProjectSimple(CalcRel calcRel, List<Integer> projOrdinals) {
    // Loop through projection expressions.  If we find a non-simple
    // projection expression, simply return.
    RexProgram program = calcRel.getProgram();
    List<RexLocalRef> projList = program.getProjectList();
    int nProjExprs = projList.size();
    RelDataType[] types = new RelDataType[nProjExprs];
    String[] fieldNames = new String[nProjExprs];
    RelDataTypeField[] projFields = calcRel.getRowType().getFields();

    for (int i = 0; i < nProjExprs; i++) {
      RexNode projExpr = program.expandLocalRef(projList.get(i));
      if (projExpr instanceof RexInputRef) {
        projOrdinals.add(((RexInputRef) projExpr).getIndex());
        types[i] = projExpr.getType();
        fieldNames[i] = projFields[i].getName();
        continue;
      } else if (!(projExpr instanceof RexCall)) {
        return null;
      }

      RexCall rexCall = (RexCall) projExpr;
      if (rexCall.getOperator() != SqlStdOperatorTable.castFunc) {
        return null;
      }
      RexNode castOperand = rexCall.getOperands()[0];
      if (!(castOperand instanceof RexInputRef)) {
        return null;
      }
      RelDataType castType = projExpr.getType();
      RelDataType origType = castOperand.getType();
      if (isCastSimple(origType, castType)) {
        projOrdinals.add(((RexInputRef) castOperand).getIndex());
        types[i] = castType;
        fieldNames[i] = projFields[i].getName();
      } else {
        return null;
      }
    }

    // return the rowtype corresponding to the output of the projection
    return calcRel.getCluster().getTypeFactory().createStructType(types, fieldNames);
  }
Exemplo n.º 14
0
  /**
   * Variant of {@link #trimFields(RelNode, BitSet, Set)} for {@link SetOpRel} (including UNION and
   * UNION ALL).
   */
  public TrimResult trimFields(
      SetOpRel setOp, BitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
    final RelDataType rowType = setOp.getRowType();
    final int fieldCount = rowType.getFieldCount();
    int changeCount = 0;

    // Fennel abhors an empty row type, so pretend that the parent rel
    // wants the last field. (The last field is the least likely to be a
    // system field.)
    if (fieldsUsed.isEmpty()) {
      fieldsUsed.set(rowType.getFieldCount() - 1);
    }

    // Compute the desired field mapping. Give the consumer the fields they
    // want, in the order that they appear in the bitset.
    final Mapping mapping = createMapping(fieldsUsed, fieldCount);

    // Create input with trimmed columns.
    final List<RelNode> newInputs = new ArrayList<RelNode>();
    for (RelNode input : setOp.getInputs()) {
      TrimResult trimResult = trimChild(setOp, input, fieldsUsed, extraFields);
      RelNode newInput = trimResult.left;
      final Mapping inputMapping = trimResult.right;

      // We want "mapping", the input gave us "inputMapping", compute
      // "remaining" mapping.
      //    |                   |                |
      //    |---------------- mapping ---------->|
      //    |-- inputMapping -->|                |
      //    |                   |-- remaining -->|
      //
      // For instance, suppose we have columns [a, b, c, d],
      // the consumer asked for mapping = [b, d],
      // and the transformed input has columns inputMapping = [d, a, b].
      // remaining will permute [b, d] to [d, a, b].
      Mapping remaining = Mappings.divide(mapping, inputMapping);

      // Create a projection; does nothing if remaining is identity.
      newInput = CalcRel.projectMapping(newInput, remaining, null);

      if (input != newInput) {
        ++changeCount;
      }
      newInputs.add(newInput);
    }

    // If the input is unchanged, and we need to project all columns,
    // there's to do.
    if (changeCount == 0 && mapping.isIdentity()) {
      return new TrimResult(setOp, mapping);
    }

    RelNode newSetOp = setOp.copy(setOp.getTraitSet(), newInputs);
    return new TrimResult(newSetOp, mapping);
  }
Exemplo n.º 15
0
 /** Converts a call to an aggregate function to an expression. */
 public SqlNode toSql(AggregateCall aggCall) {
   SqlOperator op = (SqlAggFunction) aggCall.getAggregation();
   final List<SqlNode> operands = Expressions.list();
   for (int arg : aggCall.getArgList()) {
     operands.add(field(arg));
   }
   return op.createCall(
       aggCall.isDistinct() ? SqlSelectKeyword.DISTINCT.symbol(POS) : null,
       POS,
       operands.toArray(new SqlNode[operands.size()]));
 }
    private void reduceCasts(RexCall outerCast) {
      RexNode[] operands = outerCast.getOperands();
      if (operands.length != 1) {
        return;
      }
      RelDataType outerCastType = outerCast.getType();
      RelDataType operandType = operands[0].getType();
      if (operandType.equals(outerCastType)) {
        removableCasts.add(outerCast);
        return;
      }

      // See if the reduction
      // CAST((CAST x AS type) AS type NOT NULL)
      // -> CAST(x AS type NOT NULL)
      // applies.  TODO jvs 15-Dec-2008:  consider
      // similar cases for precision changes.
      if (!(operands[0] instanceof RexCall)) {
        return;
      }
      RexCall innerCast = (RexCall) operands[0];
      if (innerCast.getOperator() != SqlStdOperatorTable.castFunc) {
        return;
      }
      if (innerCast.getOperands().length != 1) {
        return;
      }
      RelDataTypeFactory typeFactory = preparingStmt.getFarragoTypeFactory();
      RelDataType outerTypeNullable = typeFactory.createTypeWithNullability(outerCastType, true);
      RelDataType innerTypeNullable = typeFactory.createTypeWithNullability(operandType, true);
      if (outerTypeNullable != innerTypeNullable) {
        return;
      }
      if (operandType.isNullable()) {
        removableCasts.add(innerCast);
      }
    }
Exemplo n.º 17
0
  public void onMatch(RelOptRuleCall call) {
    assert matches(call);
    final JoinRel join = (JoinRel) call.rels[0];
    final List<Integer> leftKeys = new ArrayList<Integer>();
    final List<Integer> rightKeys = new ArrayList<Integer>();
    RelNode right = join.getRight();
    final RelNode left = join.getLeft();
    RexNode remainingCondition =
        RelOptUtil.splitJoinCondition(left, right, join.getCondition(), leftKeys, rightKeys);
    assert leftKeys.size() == rightKeys.size();
    final List<CorrelatorRel.Correlation> correlationList =
        new ArrayList<CorrelatorRel.Correlation>();
    if (leftKeys.size() > 0) {
      final RelOptCluster cluster = join.getCluster();
      final RexBuilder rexBuilder = cluster.getRexBuilder();
      int k = 0;
      RexNode condition = null;
      for (Integer leftKey : leftKeys) {
        Integer rightKey = rightKeys.get(k++);
        final String dyn_inIdStr = cluster.getQuery().createCorrel();
        final int dyn_inId = RelOptQuery.getCorrelOrdinal(dyn_inIdStr);

        // Create correlation to say 'each row, set variable #id
        // to the value of column #leftKey'.
        correlationList.add(new CorrelatorRel.Correlation(dyn_inId, leftKey));
        condition =
            RelOptUtil.andJoinFilters(
                rexBuilder,
                condition,
                rexBuilder.makeCall(
                    SqlStdOperatorTable.equalsOperator,
                    rexBuilder.makeInputRef(
                        right.getRowType().getFieldList().get(rightKey).getType(), rightKey),
                    rexBuilder.makeCorrel(
                        left.getRowType().getFieldList().get(leftKey).getType(), dyn_inIdStr)));
      }
      right = CalcRel.createFilter(right, condition);
    }
    RelNode newRel =
        new CorrelatorRel(
            join.getCluster(),
            left,
            right,
            remainingCondition,
            correlationList,
            join.getJoinType());
    call.transformTo(newRel);
  }
Exemplo n.º 18
0
 public RelOptTable getTableForMember(List<String> names) {
   final SqlValidatorTable table = catalogReader.getTable(names);
   final RelDataType rowType = table.getRowType();
   final List<RelCollation> collationList = deduceMonotonicity(table);
   if (names.size() < 3) {
     String[] newNames2 = {"CATALOG", "SALES", ""};
     List<String> newNames = new ArrayList<String>();
     int i = 0;
     while (newNames.size() < newNames2.length) {
       newNames.add(i, newNames2[i]);
       ++i;
     }
     names = newNames;
   }
   return createColumnSet(table, names, rowType, collationList);
 }
Exemplo n.º 19
0
 public RexNode visitCall(RexCall call) {
   List<RexNode> normalizedOperands = new ArrayList<RexNode>();
   int diffCount = 0;
   for (RexNode operand : call.getOperands()) {
     operand.accept(this);
     final RexNode normalizedOperand = lookup(operand);
     normalizedOperands.add(normalizedOperand);
     if (normalizedOperand != operand) {
       ++diffCount;
     }
   }
   if (diffCount > 0) {
     call = call.clone(call.getType(), normalizedOperands);
   }
   return register(call);
 }
Exemplo n.º 20
0
  private static List<RelCollation> deduceMonotonicity(Prepare.PreparingTable table) {
    final List<RelCollation> collationList = new ArrayList<RelCollation>();

    // Deduce which fields the table is sorted on.
    int i = -1;
    for (RelDataTypeField field : table.getRowType().getFieldList()) {
      ++i;
      final SqlMonotonicity monotonicity = table.getMonotonicity(field.getName());
      if (monotonicity != SqlMonotonicity.NotMonotonic) {
        final RelFieldCollation.Direction direction =
            monotonicity.isDecreasing()
                ? RelFieldCollation.Direction.Descending
                : RelFieldCollation.Direction.Ascending;
        collationList.add(
            RelCollationImpl.of(
                new RelFieldCollation(i, direction, RelFieldCollation.NullDirection.UNSPECIFIED)));
      }
    }
    return collationList;
  }
Exemplo n.º 21
0
    private List<RelCollation> deduceMonotonicity(SqlValidatorTable table) {
      final RelDataType rowType = table.getRowType();
      final List<RelCollation> collationList = new ArrayList<RelCollation>();

      // Deduce which fields the table is sorted on.
      int i = -1;
      for (RelDataTypeField field : rowType.getFieldList()) {
        ++i;
        final SqlMonotonicity monotonicity = table.getMonotonicity(field.getName());
        if (monotonicity != SqlMonotonicity.NOT_MONOTONIC) {
          final RelFieldCollation.Direction direction =
              monotonicity.isDecreasing()
                  ? RelFieldCollation.Direction.DESCENDING
                  : RelFieldCollation.Direction.ASCENDING;
          collationList.add(
              RelCollationImpl.of(
                  new RelFieldCollation(
                      i, direction, RelFieldCollation.NullDirection.UNSPECIFIED)));
        }
      }
      return collationList;
    }
 private List<AggregateCall> transformAggCalls(
     RelDataTypeFactory typeFactory, int nGroupCols, List<AggregateCall> origCalls) {
   List<AggregateCall> newCalls = new ArrayList<AggregateCall>();
   int iInput = nGroupCols;
   for (AggregateCall origCall : origCalls) {
     if (origCall.isDistinct()
         || !SUPPORTED_AGGREGATES.containsKey(origCall.getAggregation().getClass())) {
       return null;
     }
     Aggregation aggFun;
     RelDataType aggType;
     if (origCall.getAggregation().getName().equals("COUNT")) {
       aggFun = new SqlSumEmptyIsZeroAggFunction(origCall.getType());
       SqlAggFunction af = (SqlAggFunction) aggFun;
       final AggregateRelBase.AggCallBinding binding =
           new AggregateRelBase.AggCallBinding(
               typeFactory, af, Collections.singletonList(origCall.getType()), nGroupCols);
       // count(any) is always not null, however nullability of sum might
       // depend on the number of columns in GROUP BY.
       // Here we use SUM0 since we are sure we will not face nullable
       // inputs nor we'll face empty set.
       aggType = af.inferReturnType(binding);
     } else {
       aggFun = origCall.getAggregation();
       aggType = origCall.getType();
     }
     AggregateCall newCall =
         new AggregateCall(
             aggFun,
             origCall.isDistinct(),
             Collections.singletonList(iInput),
             aggType,
             origCall.getName());
     newCalls.add(newCall);
     ++iInput;
   }
   return newCalls;
 }
Exemplo n.º 23
0
 /**
  * Trims a child relational expression, then adds back a dummy project to restore the fields that
  * were removed.
  *
  * <p>Sounds pointless? It causes unused fields to be removed further down the tree (towards the
  * leaves), but it ensure that the consuming relational expression continues to see the same
  * fields.
  *
  * @param rel Relational expression
  * @param input Input relational expression, whose fields to trim
  * @param fieldsUsed Bitmap of fields needed by the consumer
  * @return New relational expression and its field mapping
  */
 protected TrimResult trimChildRestore(
     RelNode rel, RelNode input, BitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
   TrimResult trimResult = trimChild(rel, input, fieldsUsed, extraFields);
   if (trimResult.right.isIdentity()) {
     return trimResult;
   }
   final RelDataType rowType = input.getRowType();
   List<RelDataTypeField> fieldList = rowType.getFieldList();
   final List<RexNode> exprList = new ArrayList<RexNode>();
   final List<String> nameList = rowType.getFieldNames();
   RexBuilder rexBuilder = rel.getCluster().getRexBuilder();
   assert trimResult.right.getSourceCount() == fieldList.size();
   for (int i = 0; i < fieldList.size(); i++) {
     int source = trimResult.right.getTargetOpt(i);
     RelDataTypeField field = fieldList.get(i);
     exprList.add(
         source < 0
             ? rexBuilder.makeZeroLiteral(field.getType())
             : rexBuilder.makeInputRef(field.getType(), source));
   }
   RelNode project = CalcRel.createProject(trimResult.left, exprList, nameList);
   return new TrimResult(project, Mappings.createIdentity(fieldList.size()));
 }
Exemplo n.º 24
0
  /**
   * Variant of {@link #trimFields(RelNode, BitSet, Set)} for {@link org.eigenbase.rel.ValuesRel}.
   */
  public TrimResult trimFields(
      ValuesRel values, BitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
    final RelDataType rowType = values.getRowType();
    final int fieldCount = rowType.getFieldCount();

    // If they are asking for no fields, we can't give them what they want,
    // because zero-column records are illegal. Give them the last field,
    // which is unlikely to be a system field.
    if (fieldsUsed.isEmpty()) {
      fieldsUsed = Util.bitSetBetween(fieldCount - 1, fieldCount);
    }

    // If all fields are used, return unchanged.
    if (fieldsUsed.equals(Util.bitSetBetween(0, fieldCount))) {
      Mapping mapping = Mappings.createIdentity(fieldCount);
      return new TrimResult(values, mapping);
    }

    List<List<RexLiteral>> newTuples = new ArrayList<List<RexLiteral>>();
    for (List<RexLiteral> tuple : values.getTuples()) {
      List<RexLiteral> newTuple = new ArrayList<RexLiteral>();
      for (int field : Util.toIter(fieldsUsed)) {
        newTuple.add(tuple.get(field));
      }
      newTuples.add(newTuple);
    }

    final Mapping mapping = createMapping(fieldsUsed, fieldCount);
    RelDataType newRowType =
        values
            .getCluster()
            .getTypeFactory()
            .createStructType(Mappings.apply3(mapping, rowType.getFieldList()));
    final ValuesRel newValues = new ValuesRel(values.getCluster(), newRowType, newTuples);
    return new TrimResult(newValues, mapping);
  }
Exemplo n.º 25
0
  /** Variant of {@link #trimFields(RelNode, BitSet, Set)} for {@link AggregateRel}. */
  public TrimResult trimFields(
      AggregateRel aggregate, BitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
    // Fields:
    //
    // | sys fields | group fields | agg functions |
    //
    // Two kinds of trimming:
    //
    // 1. If agg rel has system fields but none of these are used, create an
    // agg rel with no system fields.
    //
    // 2. If aggregate functions are not used, remove them.
    //
    // But grouping fields stay, even if they are not used.

    final RelDataType rowType = aggregate.getRowType();

    // Compute which input fields are used.
    BitSet inputFieldsUsed = new BitSet();
    // 1. group fields are always used
    for (int i : Util.toIter(aggregate.getGroupSet())) {
      inputFieldsUsed.set(i);
    }
    // 2. agg functions
    for (AggregateCall aggCall : aggregate.getAggCallList()) {
      for (int i : aggCall.getArgList()) {
        inputFieldsUsed.set(i);
      }
    }

    // Create input with trimmed columns.
    final RelNode input = aggregate.getInput(0);
    final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
    final TrimResult trimResult = trimChild(aggregate, input, inputFieldsUsed, inputExtraFields);
    final RelNode newInput = trimResult.left;
    final Mapping inputMapping = trimResult.right;

    // If the input is unchanged, and we need to project all columns,
    // there's nothing to do.
    if (input == newInput && fieldsUsed.equals(Util.bitSetBetween(0, rowType.getFieldCount()))) {
      return new TrimResult(aggregate, Mappings.createIdentity(rowType.getFieldCount()));
    }

    // Which agg calls are used by our consumer?
    final int groupCount = aggregate.getGroupSet().cardinality();
    int j = groupCount;
    int usedAggCallCount = 0;
    for (int i = 0; i < aggregate.getAggCallList().size(); i++) {
      if (fieldsUsed.get(j++)) {
        ++usedAggCallCount;
      }
    }

    // Offset due to the number of system fields having changed.
    Mapping mapping =
        Mappings.create(
            MappingType.InverseSurjection, rowType.getFieldCount(), groupCount + usedAggCallCount);

    final BitSet newGroupSet = Mappings.apply(inputMapping, aggregate.getGroupSet());

    // Populate mapping of where to find the fields. System and grouping
    // fields first.
    for (IntPair pair : inputMapping) {
      if (pair.source < groupCount) {
        mapping.set(pair.source, pair.target);
      }
    }

    // Now create new agg calls, and populate mapping for them.
    final List<AggregateCall> newAggCallList = new ArrayList<AggregateCall>();
    j = groupCount;
    for (AggregateCall aggCall : aggregate.getAggCallList()) {
      if (fieldsUsed.get(j)) {
        AggregateCall newAggCall =
            new AggregateCall(
                aggCall.getAggregation(),
                aggCall.isDistinct(),
                Mappings.apply2(inputMapping, aggCall.getArgList()),
                aggCall.getType(),
                aggCall.getName());
        if (newAggCall.equals(aggCall)) {
          newAggCall = aggCall; // immutable -> canonize to save space
        }
        mapping.set(j, groupCount + newAggCallList.size());
        newAggCallList.add(newAggCall);
      }
      ++j;
    }

    RelNode newAggregate =
        new AggregateRel(aggregate.getCluster(), newInput, newGroupSet, newAggCallList);

    assert newAggregate.getClass() == aggregate.getClass();

    return new TrimResult(newAggregate, mapping);
  }
  public void onMatch(RelOptRuleCall call) {
    AggregateRel aggRel = call.rel(0);
    UnionRel unionRel = call.rel(1);

    if (!unionRel.all) {
      // This transformation is only valid for UNION ALL.
      // Consider t1(i) with rows (5), (5) and t2(i) with
      // rows (5), (10), and the query
      // select sum(i) from (select i from t1) union (select i from t2).
      // The correct answer is 15.  If we apply the transformation,
      // we get
      // select sum(i) from
      // (select sum(i) as i from t1) union (select sum(i) as i from t2)
      // which yields 25 (incorrect).
      return;
    }

    RelOptCluster cluster = unionRel.getCluster();

    List<AggregateCall> transformedAggCalls =
        transformAggCalls(
            aggRel.getCluster().getTypeFactory(),
            aggRel.getGroupSet().cardinality(),
            aggRel.getAggCallList());
    if (transformedAggCalls == null) {
      // we've detected the presence of something like AVG,
      // which we can't handle
      return;
    }

    boolean anyTransformed = false;

    // create corresponding aggs on top of each union child
    List<RelNode> newUnionInputs = new ArrayList<RelNode>();
    for (RelNode input : unionRel.getInputs()) {
      boolean alreadyUnique = RelMdUtil.areColumnsDefinitelyUnique(input, aggRel.getGroupSet());

      if (alreadyUnique) {
        newUnionInputs.add(input);
      } else {
        anyTransformed = true;
        newUnionInputs.add(
            new AggregateRel(cluster, input, aggRel.getGroupSet(), aggRel.getAggCallList()));
      }
    }

    if (!anyTransformed) {
      // none of the children could benefit from the pushdown,
      // so bail out (preventing the infinite loop to which most
      // planners would succumb)
      return;
    }

    // create a new union whose children are the aggs created above
    UnionRel newUnionRel = new UnionRel(cluster, newUnionInputs, true);

    AggregateRel newTopAggRel =
        new AggregateRel(cluster, newUnionRel, aggRel.getGroupSet(), transformedAggCalls);

    // In case we transformed any COUNT (which is always NOT NULL)
    // to SUM (which is always NULLABLE), cast back to keep the
    // planner happy.
    RelNode castRel = RelOptUtil.createCastRel(newTopAggRel, aggRel.getRowType(), false);

    call.transformTo(castRel);
  }
 protected Integer mapFieldRef(RexNode exp, List<String> origFieldName, RelDataType rowType) {
   RexInputRef fieldAccess = (RexInputRef) exp;
   origFieldName.add(rowType.getFields()[fieldAccess.getIndex()].getName());
   return fieldAccess.getIndex();
 }
Exemplo n.º 28
0
 public Void visitFieldAccess(RexFieldAccess fieldAccess) {
   fieldAccessList.add(fieldAccess);
   return null;
 }
Exemplo n.º 29
0
 public void addTable(String name) {
   tableNames.add(name);
 }
Exemplo n.º 30
0
 /**
  * Adds a provider, giving it higher priority than all those already in chain. Chain order
  * matters, since the first provider which answers a query is used.
  *
  * @param provider provider to add
  */
 public void addProvider(RelMetadataProvider provider) {
   providers.add(0, provider);
 }