コード例 #1
0
ファイル: RexNodeConverter.java プロジェクト: Leolh/hive
 protected RexNode convert(ExprNodeColumnDesc col) throws SemanticException {
   InputCtx ic = getInputCtx(col);
   int pos = ic.hiveNameToPosMap.get(col.getColumn());
   return cluster
       .getRexBuilder()
       .makeInputRef(
           ic.calciteInpDataType.getFieldList().get(pos).getType(),
           pos + ic.offsetInCalciteSchema);
 }
コード例 #2
0
ファイル: RexNodeConverter.java プロジェクト: Leolh/hive
 private RexNode convert(final ExprNodeFieldDesc fieldDesc) throws SemanticException {
   RexNode rexNode = convert(fieldDesc.getDesc());
   if (rexNode instanceof RexCall) {
     // regular case of accessing nested field in a column
     return cluster.getRexBuilder().makeFieldAccess(rexNode, fieldDesc.getFieldName(), true);
   } else {
     // This may happen for schema-less tables, where columns are dynamically
     // supplied by serdes.
     throw new CalciteSemanticException(
         "Unexpected rexnode : " + rexNode.getClass().getCanonicalName(),
         UnsupportedFeature.Schema_less_table);
   }
 }
コード例 #3
0
ファイル: HiveCalciteUtil.java プロジェクト: sandeepks/hive
  public static RexNode getTypeSafePred(RelOptCluster cluster, RexNode rex, RelDataType rType) {
    RexNode typeSafeRex = rex;
    if ((typeSafeRex instanceof RexCall) && HiveCalciteUtil.isComparisonOp((RexCall) typeSafeRex)) {
      RexBuilder rb = cluster.getRexBuilder();
      List<RexNode> fixedPredElems = new ArrayList<RexNode>();
      RelDataType commonType =
          cluster.getTypeFactory().leastRestrictive(RexUtil.types(((RexCall) rex).getOperands()));
      for (RexNode rn : ((RexCall) rex).getOperands()) {
        fixedPredElems.add(rb.ensureType(commonType, rn, true));
      }

      typeSafeRex = rb.makeCall(((RexCall) typeSafeRex).getOperator(), fixedPredElems);
    }

    return typeSafeRex;
  }
コード例 #4
0
ファイル: RexNodeConverter.java プロジェクト: Leolh/hive
  private RexNode handleExplicitCast(ExprNodeGenericFuncDesc func, List<RexNode> childRexNodeLst)
      throws CalciteSemanticException {
    RexNode castExpr = null;

    if (childRexNodeLst != null && childRexNodeLst.size() == 1) {
      GenericUDF udf = func.getGenericUDF();
      if ((udf instanceof GenericUDFToChar)
          || (udf instanceof GenericUDFToVarchar)
          || (udf instanceof GenericUDFToDecimal)
          || (udf instanceof GenericUDFToDate)
          || (udf instanceof GenericUDFToBinary)
          || castExprUsingUDFBridge(udf)) {
        castExpr =
            cluster
                .getRexBuilder()
                .makeAbstractCast(
                    TypeConverter.convert(func.getTypeInfo(), cluster.getTypeFactory()),
                    childRexNodeLst.get(0));
      }
    }

    return castExpr;
  }
コード例 #5
0
ファイル: HiveRelOptUtil.java プロジェクト: lirui-intel/hive
  private static void splitJoinCondition(
      List<RelDataTypeField> sysFieldList,
      List<RelNode> inputs,
      RexNode condition,
      List<List<RexNode>> joinKeys,
      List<Integer> filterNulls,
      List<SqlOperator> rangeOp,
      List<RexNode> nonEquiList)
      throws CalciteSemanticException {
    final int sysFieldCount = sysFieldList.size();
    final RelOptCluster cluster = inputs.get(0).getCluster();
    final RexBuilder rexBuilder = cluster.getRexBuilder();

    if (condition instanceof RexCall) {
      RexCall call = (RexCall) condition;
      if (call.getOperator() == SqlStdOperatorTable.AND) {
        for (RexNode operand : call.getOperands()) {
          splitJoinCondition(
              sysFieldList, inputs, operand, joinKeys, filterNulls, rangeOp, nonEquiList);
        }
        return;
      }

      RexNode leftKey = null;
      RexNode rightKey = null;
      int leftInput = 0;
      int rightInput = 0;
      List<RelDataTypeField> leftFields = null;
      List<RelDataTypeField> rightFields = null;
      boolean reverse = false;

      SqlKind kind = call.getKind();

      // Only consider range operators if we haven't already seen one
      if ((kind == SqlKind.EQUALS)
          || (filterNulls != null && kind == SqlKind.IS_NOT_DISTINCT_FROM)
          || (rangeOp != null
              && rangeOp.isEmpty()
              && (kind == SqlKind.GREATER_THAN
                  || kind == SqlKind.GREATER_THAN_OR_EQUAL
                  || kind == SqlKind.LESS_THAN
                  || kind == SqlKind.LESS_THAN_OR_EQUAL))) {
        final List<RexNode> operands = call.getOperands();
        RexNode op0 = operands.get(0);
        RexNode op1 = operands.get(1);

        final ImmutableBitSet projRefs0 = InputFinder.bits(op0);
        final ImmutableBitSet projRefs1 = InputFinder.bits(op1);

        final ImmutableBitSet[] inputsRange = new ImmutableBitSet[inputs.size()];
        int totalFieldCount = 0;
        for (int i = 0; i < inputs.size(); i++) {
          final int firstField = totalFieldCount + sysFieldCount;
          totalFieldCount = firstField + inputs.get(i).getRowType().getFieldCount();
          inputsRange[i] = ImmutableBitSet.range(firstField, totalFieldCount);
        }

        boolean foundBothInputs = false;
        for (int i = 0; i < inputs.size() && !foundBothInputs; i++) {
          if (projRefs0.intersects(inputsRange[i])
              && projRefs0.union(inputsRange[i]).equals(inputsRange[i])) {
            if (leftKey == null) {
              leftKey = op0;
              leftInput = i;
              leftFields = inputs.get(leftInput).getRowType().getFieldList();
            } else {
              rightKey = op0;
              rightInput = i;
              rightFields = inputs.get(rightInput).getRowType().getFieldList();
              reverse = true;
              foundBothInputs = true;
            }
          } else if (projRefs1.intersects(inputsRange[i])
              && projRefs1.union(inputsRange[i]).equals(inputsRange[i])) {
            if (leftKey == null) {
              leftKey = op1;
              leftInput = i;
              leftFields = inputs.get(leftInput).getRowType().getFieldList();
            } else {
              rightKey = op1;
              rightInput = i;
              rightFields = inputs.get(rightInput).getRowType().getFieldList();
              foundBothInputs = true;
            }
          }
        }

        if ((leftKey != null) && (rightKey != null)) {
          // adjustment array
          int[] adjustments = new int[totalFieldCount];
          for (int i = 0; i < inputs.size(); i++) {
            final int adjustment = inputsRange[i].nextSetBit(0);
            for (int j = adjustment; j < inputsRange[i].length(); j++) {
              adjustments[j] = -adjustment;
            }
          }

          // replace right Key input ref
          rightKey =
              rightKey.accept(
                  new RelOptUtil.RexInputConverter(
                      rexBuilder, rightFields, rightFields, adjustments));

          // left key only needs to be adjusted if there are system
          // fields, but do it for uniformity
          leftKey =
              leftKey.accept(
                  new RelOptUtil.RexInputConverter(
                      rexBuilder, leftFields, leftFields, adjustments));

          RelDataType leftKeyType = leftKey.getType();
          RelDataType rightKeyType = rightKey.getType();

          if (leftKeyType != rightKeyType) {
            // perform casting using Hive rules
            TypeInfo rType = TypeConverter.convert(rightKeyType);
            TypeInfo lType = TypeConverter.convert(leftKeyType);
            TypeInfo tgtType = FunctionRegistry.getCommonClassForComparison(lType, rType);

            if (tgtType == null) {
              throw new CalciteSemanticException(
                  "Cannot find common type for join keys "
                      + leftKey
                      + " (type "
                      + leftKeyType
                      + ") and "
                      + rightKey
                      + " (type "
                      + rightKeyType
                      + ")");
            }
            RelDataType targetKeyType = TypeConverter.convert(tgtType, rexBuilder.getTypeFactory());

            if (leftKeyType != targetKeyType
                && TypeInfoUtils.isConversionRequiredForComparison(tgtType, lType)) {
              leftKey = rexBuilder.makeCast(targetKeyType, leftKey);
            }

            if (rightKeyType != targetKeyType
                && TypeInfoUtils.isConversionRequiredForComparison(tgtType, rType)) {
              rightKey = rexBuilder.makeCast(targetKeyType, rightKey);
            }
          }
        }
      }

      if ((leftKey != null) && (rightKey != null)) {
        // found suitable join keys
        // add them to key list, ensuring that if there is a
        // non-equi join predicate, it appears at the end of the
        // key list; also mark the null filtering property
        addJoinKey(joinKeys.get(leftInput), leftKey, (rangeOp != null) && !rangeOp.isEmpty());
        addJoinKey(joinKeys.get(rightInput), rightKey, (rangeOp != null) && !rangeOp.isEmpty());
        if (filterNulls != null && kind == SqlKind.EQUALS) {
          // nulls are considered not matching for equality comparison
          // add the position of the most recently inserted key
          filterNulls.add(joinKeys.get(leftInput).size() - 1);
        }
        if (rangeOp != null && kind != SqlKind.EQUALS && kind != SqlKind.IS_DISTINCT_FROM) {
          if (reverse) {
            kind = reverse(kind);
          }
          rangeOp.add(op(kind, call.getOperator()));
        }
        return;
      } // else fall through and add this condition as nonEqui condition
    }

    // The operator is not of RexCall type
    // So we fail. Fall through.
    // Add this condition to the list of non-equi-join conditions.
    nonEquiList.add(condition);
  }
コード例 #6
0
ファイル: RexNodeConverter.java プロジェクト: Leolh/hive
 private RexNode createNullLiteral(ExprNodeDesc expr) throws CalciteSemanticException {
   return cluster
       .getRexBuilder()
       .makeNullLiteral(
           TypeConverter.convert(expr.getTypeInfo(), cluster.getTypeFactory()).getSqlTypeName());
 }
コード例 #7
0
ファイル: RexNodeConverter.java プロジェクト: Leolh/hive
  protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticException {
    RexBuilder rexBuilder = cluster.getRexBuilder();
    RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
    PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
    RelDataType calciteDataType = TypeConverter.convert(hiveType, dtFactory);

    PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();

    ConstantObjectInspector coi = literal.getWritableObjectInspector();
    Object value =
        ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);

    RexNode calciteLiteral = null;
    // TODO: Verify if we need to use ConstantObjectInspector to unwrap data
    switch (hiveTypeCategory) {
      case BOOLEAN:
        calciteLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue());
        break;
      case BYTE:
        calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
        break;
      case SHORT:
        calciteLiteral =
            rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
        break;
      case INT:
        calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
        break;
      case LONG:
        calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
        break;
        // TODO: is Decimal an exact numeric or approximate numeric?
      case DECIMAL:
        if (value instanceof HiveDecimal) {
          value = ((HiveDecimal) value).bigDecimalValue();
        } else if (value instanceof Decimal128) {
          value = ((Decimal128) value).toBigDecimal();
        }
        if (value == null) {
          // We have found an invalid decimal value while enforcing precision and
          // scale. Ideally,
          // we would replace it with null here, which is what Hive does. However,
          // we need to plumb
          // this thru up somehow, because otherwise having different expression
          // type in AST causes
          // the plan generation to fail after CBO, probably due to some residual
          // state in SA/QB.
          // For now, we will not run CBO in the presence of invalid decimal
          // literals.
          throw new CalciteSemanticException(
              "Expression " + literal.getExprString() + " is not a valid decimal",
              UnsupportedFeature.Invalid_decimal);
          // TODO: return createNullLiteral(literal);
        }
        BigDecimal bd = (BigDecimal) value;
        BigInteger unscaled = bd.unscaledValue();
        if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) {
          calciteLiteral = rexBuilder.makeExactLiteral(bd);
        } else {
          // CBO doesn't support unlimited precision decimals. In practice, this
          // will work...
          // An alternative would be to throw CboSemanticException and fall back
          // to no CBO.
          RelDataType relType =
              cluster
                  .getTypeFactory()
                  .createSqlType(SqlTypeName.DECIMAL, bd.scale(), unscaled.toString().length());
          calciteLiteral = rexBuilder.makeExactLiteral(bd, relType);
        }
        break;
      case FLOAT:
        calciteLiteral =
            rexBuilder.makeApproxLiteral(new BigDecimal((Float) value), calciteDataType);
        break;
      case DOUBLE:
        calciteLiteral =
            rexBuilder.makeApproxLiteral(new BigDecimal((Double) value), calciteDataType);
        break;
      case CHAR:
        if (value instanceof HiveChar) {
          value = ((HiveChar) value).getValue();
        }
        calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
        break;
      case VARCHAR:
        if (value instanceof HiveVarchar) {
          value = ((HiveVarchar) value).getValue();
        }
        calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
        break;
      case STRING:
        calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
        break;
      case DATE:
        Calendar cal = new GregorianCalendar();
        cal.setTime((Date) value);
        calciteLiteral = rexBuilder.makeDateLiteral(cal);
        break;
      case TIMESTAMP:
        Calendar c = null;
        if (value instanceof Calendar) {
          c = (Calendar) value;
        } else {
          c = Calendar.getInstance();
          c.setTimeInMillis(((Timestamp) value).getTime());
        }
        calciteLiteral = rexBuilder.makeTimestampLiteral(c, RelDataType.PRECISION_NOT_SPECIFIED);
        break;
      case INTERVAL_YEAR_MONTH:
        // Calcite year-month literal value is months as BigDecimal
        BigDecimal totalMonths =
            BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths());
        calciteLiteral =
            rexBuilder.makeIntervalLiteral(
                totalMonths,
                new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
        break;
      case INTERVAL_DAY_TIME:
        // Calcite day-time interval is millis value as BigDecimal
        // Seconds converted to millis
        BigDecimal secsValueBd =
            BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000);
        // Nanos converted to millis
        BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6);
        calciteLiteral =
            rexBuilder.makeIntervalLiteral(
                secsValueBd.add(nanosValueBd),
                new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1, 1)));
        break;
      case VOID:
        calciteLiteral =
            cluster
                .getRexBuilder()
                .makeLiteral(null, cluster.getTypeFactory().createSqlType(SqlTypeName.NULL), true);
        break;
      case BINARY:
      case UNKNOWN:
      default:
        throw new RuntimeException("UnSupported Literal");
    }

    return calciteLiteral;
  }
コード例 #8
0
ファイル: RexNodeConverter.java プロジェクト: Leolh/hive
  private RexNode convert(final ExprNodeGenericFuncDesc func) throws SemanticException {
    ExprNodeDesc tmpExprNode;
    RexNode tmpRN;

    List<RexNode> childRexNodeLst = new LinkedList<RexNode>();
    Builder<RelDataType> argTypeBldr = ImmutableList.<RelDataType>builder();

    // TODO: 1) Expand to other functions as needed 2) What about types other than primitive.
    TypeInfo tgtDT = null;
    GenericUDF tgtUdf = func.getGenericUDF();

    boolean isNumeric =
        (tgtUdf instanceof GenericUDFBaseBinary
            && func.getTypeInfo().getCategory() == Category.PRIMITIVE
            && (PrimitiveGrouping.NUMERIC_GROUP
                == PrimitiveObjectInspectorUtils.getPrimitiveGrouping(
                    ((PrimitiveTypeInfo) func.getTypeInfo()).getPrimitiveCategory())));
    boolean isCompare = !isNumeric && tgtUdf instanceof GenericUDFBaseCompare;

    if (isNumeric) {
      tgtDT = func.getTypeInfo();

      assert func.getChildren().size() == 2;
      // TODO: checking 2 children is useless, compare already does that.
    } else if (isCompare && (func.getChildren().size() == 2)) {
      tgtDT =
          FunctionRegistry.getCommonClassForComparison(
              func.getChildren().get(0).getTypeInfo(), func.getChildren().get(1).getTypeInfo());
    }

    for (ExprNodeDesc childExpr : func.getChildren()) {
      tmpExprNode = childExpr;
      if (tgtDT != null
          && TypeInfoUtils.isConversionRequiredForComparison(tgtDT, childExpr.getTypeInfo())) {
        if (isCompare) {
          // For compare, we will convert requisite children
          tmpExprNode = ParseUtils.createConversionCast(childExpr, (PrimitiveTypeInfo) tgtDT);
        } else if (isNumeric) {
          // For numeric, we'll do minimum necessary cast - if we cast to the type
          // of expression, bad things will happen.
          PrimitiveTypeInfo minArgType = ExprNodeDescUtils.deriveMinArgumentCast(childExpr, tgtDT);
          tmpExprNode = ParseUtils.createConversionCast(childExpr, minArgType);
        } else {
          throw new AssertionError("Unexpected " + tgtDT + " - not a numeric op or compare");
        }
      }
      argTypeBldr.add(TypeConverter.convert(tmpExprNode.getTypeInfo(), cluster.getTypeFactory()));
      tmpRN = convert(tmpExprNode);
      childRexNodeLst.add(tmpRN);
    }

    // See if this is an explicit cast.
    RexNode expr = null;
    RelDataType retType = null;
    expr = handleExplicitCast(func, childRexNodeLst);

    if (expr == null) {
      // This is not a cast; process the function.
      retType = TypeConverter.convert(func.getTypeInfo(), cluster.getTypeFactory());
      SqlOperator calciteOp =
          SqlFunctionConverter.getCalciteOperator(
              func.getFuncText(), func.getGenericUDF(), argTypeBldr.build(), retType);
      expr = cluster.getRexBuilder().makeCall(calciteOp, childRexNodeLst);
    } else {
      retType = expr.getType();
    }

    // TODO: Cast Function in Calcite have a bug where it infer type on cast throws
    // an exception
    if (flattenExpr
        && (expr instanceof RexCall)
        && !(((RexCall) expr).getOperator() instanceof SqlCastFunction)) {
      RexCall call = (RexCall) expr;
      expr =
          cluster
              .getRexBuilder()
              .makeCall(
                  retType,
                  call.getOperator(),
                  RexUtil.flatten(call.getOperands(), call.getOperator()));
    }

    return expr;
  }