コード例 #1
0
ファイル: JoinDesc.java プロジェクト: vthacker/impala-port
  @Explain(displayName = "condition expressions")
  public Map<Byte, String> getExprsStringMap() {
    if (getExprs() == null) {
      return null;
    }

    LinkedHashMap<Byte, String> ret = new LinkedHashMap<Byte, String>();

    for (Map.Entry<Byte, List<ExprNodeDesc>> ent : getExprs().entrySet()) {
      StringBuilder sb = new StringBuilder();
      boolean first = true;
      if (ent.getValue() != null) {
        for (ExprNodeDesc expr : ent.getValue()) {
          if (!first) {
            sb.append(" ");
          }

          first = false;
          sb.append("{");
          sb.append(expr.getExprString());
          sb.append("}");
        }
      }
      ret.put(ent.getKey(), sb.toString());
    }

    return ret;
  }
コード例 #2
0
 // traversing origin, find ExprNodeDesc in sources and replaces it with ExprNodeDesc
 // in targets having same index.
 // return null if failed to find
 public static ExprNodeDesc replace(
     ExprNodeDesc origin, List<ExprNodeDesc> sources, List<ExprNodeDesc> targets) {
   int index = indexOf(origin, sources);
   if (index >= 0) {
     return targets.get(index);
   }
   // encountered column or field which cannot be found in sources
   if (origin instanceof ExprNodeColumnDesc || origin instanceof ExprNodeFieldDesc) {
     return null;
   }
   // for ExprNodeGenericFuncDesc, it should be deterministic and stateless
   if (origin instanceof ExprNodeGenericFuncDesc) {
     ExprNodeGenericFuncDesc func = (ExprNodeGenericFuncDesc) origin;
     if (!FunctionRegistry.isDeterministic(func.getGenericUDF())
         || FunctionRegistry.isStateful(func.getGenericUDF())) {
       return null;
     }
     List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
     for (int i = 0; i < origin.getChildren().size(); i++) {
       ExprNodeDesc child = replace(origin.getChildren().get(i), sources, targets);
       if (child == null) {
         return null;
       }
       children.add(child);
     }
     // duplicate function with possibly replaced children
     ExprNodeGenericFuncDesc clone = (ExprNodeGenericFuncDesc) func.clone();
     clone.setChildren(children);
     return clone;
   }
   // constant or null, just return it
   return origin;
 }
コード例 #3
0
 public static ArrayList<ExprNodeDesc> clone(List<ExprNodeDesc> sources) {
   ArrayList<ExprNodeDesc> result = new ArrayList<ExprNodeDesc>();
   for (ExprNodeDesc expr : sources) {
     result.add(expr.clone());
   }
   return result;
 }
コード例 #4
0
 /**
  * A normal reduce operator's rowObjectInspector looks like a struct containing nested key/value
  * structs that contain the column values: { key: { reducesinkkey0:int }, value: { _col0:int,
  * _col1:int, .. } }
  *
  * <p>While the rowObjectInspector looks the same for vectorized queries during compilation time,
  * within the tasks at query execution the rowObjectInspector has changed to a flatter structure
  * without nested key/value structs: { 'key.reducesinkkey0':int, 'value._col0':int,
  * 'value._col1':int, .. }
  *
  * <p>Trying to fetch 'key.reducesinkkey0' by name from the list of flattened ObjectInspectors
  * does not work because the '.' gets interpreted as a field member, even though it is a flattened
  * list of column values. This workaround converts the column name referenced in the ExprNodeDesc
  * from a nested field name (key.reducesinkkey0) to key_reducesinkkey0, simply by replacing '.'
  * with '_'.
  *
  * @param source
  * @return
  */
 public static ExprNodeDesc flattenExpr(ExprNodeDesc source) {
   if (source instanceof ExprNodeGenericFuncDesc) {
     // all children expression should be resolved
     ExprNodeGenericFuncDesc function = (ExprNodeGenericFuncDesc) source.clone();
     List<ExprNodeDesc> newChildren = flattenExprList(function.getChildren());
     for (ExprNodeDesc newChild : newChildren) {
       if (newChild == null) {
         // Could not resolve all of the function children, fail
         return null;
       }
     }
     function.setChildren(newChildren);
     return function;
   }
   if (source instanceof ExprNodeColumnDesc) {
     ExprNodeColumnDesc column = (ExprNodeColumnDesc) source;
     // Create a new ColumnInfo, replacing STRUCT.COLUMN with STRUCT_COLUMN
     String newColumn = column.getColumn().replace('.', '_');
     return new ExprNodeColumnDesc(source.getTypeInfo(), newColumn, column.getTabAlias(), false);
   }
   if (source instanceof ExprNodeFieldDesc) {
     // field expression should be resolved
     ExprNodeFieldDesc field = (ExprNodeFieldDesc) source.clone();
     ExprNodeDesc fieldDesc = flattenExpr(field.getDesc());
     if (fieldDesc == null) {
       return null;
     }
     field.setDesc(fieldDesc);
     return field;
   }
   // constant or null expr, just return
   return source;
 }
コード例 #5
0
    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx, Object... nodeOutputs)
        throws SemanticException {
      GroupByOperator op = (GroupByOperator) nd;
      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      List<String> colLists = new ArrayList<String>();
      GroupByDesc conf = op.getConf();
      ArrayList<ExprNodeDesc> keys = conf.getKeys();
      for (ExprNodeDesc key : keys) {
        colLists = Utilities.mergeUniqElems(colLists, key.getCols());
      }

      ArrayList<AggregationDesc> aggrs = conf.getAggregators();
      for (AggregationDesc aggr : aggrs) {
        ArrayList<ExprNodeDesc> params = aggr.getParameters();
        for (ExprNodeDesc param : params) {
          colLists = Utilities.mergeUniqElems(colLists, param.getCols());
        }
      }
      int groupingSetPosition = conf.getGroupingSetPosition();
      if (groupingSetPosition >= 0) {
        List<String> cols = cppCtx.genColLists(op);
        String groupingColumn = conf.getOutputColumnNames().get(groupingSetPosition);
        if (!cols.contains(groupingColumn)) {
          conf.getOutputColumnNames().remove(groupingSetPosition);
          if (op.getSchema() != null) {
            op.getSchema().getSignature().remove(groupingSetPosition);
          }
        }
      }

      cppCtx.getPrunedColLists().put(op, colLists);
      return null;
    }
コード例 #6
0
    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx, Object... nodeOutputs)
        throws SemanticException {
      ReduceSinkOperator op = (ReduceSinkOperator) nd;
      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      RowResolver resolver = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
      ReduceSinkDesc conf = op.getConf();

      List<String> colLists = new ArrayList<String>();
      ArrayList<ExprNodeDesc> keys = conf.getKeyCols();
      LOG.debug("Reduce Sink Operator " + op.getIdentifier() + " key:" + keys);
      for (ExprNodeDesc key : keys) {
        colLists = Utilities.mergeUniqElems(colLists, key.getCols());
      }

      assert op.getNumChild() == 1;

      Operator<? extends OperatorDesc> child = op.getChildOperators().get(0);

      List<String> childCols;
      if (child instanceof CommonJoinOperator) {
        childCols = cppCtx.getJoinPrunedColLists().get(child).get((byte) conf.getTag());
      } else {
        childCols = cppCtx.getPrunedColList(child);
      }
      List<ExprNodeDesc> valCols = conf.getValueCols();
      List<String> valColNames = conf.getOutputValueColumnNames();

      if (childCols != null) {
        boolean[] flags = new boolean[valCols.size()];

        for (String childCol : childCols) {
          int index = valColNames.indexOf(Utilities.removeValueTag(childCol));
          if (index < 0) {
            continue;
          }
          flags[index] = true;
          colLists = Utilities.mergeUniqElems(colLists, valCols.get(index).getCols());
        }

        Collections.sort(colLists);
        pruneReduceSinkOperator(flags, op, cppCtx);
        cppCtx.getPrunedColLists().put(op, colLists);
        return null;
      }

      // Reduce Sink contains the columns needed - no need to aggregate from
      // children
      for (ExprNodeDesc val : valCols) {
        colLists = Utilities.mergeUniqElems(colLists, val.getCols());
      }

      cppCtx.getPrunedColLists().put(op, colLists);
      return null;
    }
コード例 #7
0
ファイル: MatchPath.java プロジェクト: joellove/hive-udf
 public List<String> getReferencedColumns() throws SemanticException {
   MatchPath matchPath = (MatchPath) evaluator;
   List<String> columns = new ArrayList<String>();
   for (ExprNodeDesc exprNode : matchPath.resultExprInfo.resultExprNodes) {
     Utilities.mergeUniqElems(columns, exprNode.getCols());
   }
   for (ExprNodeDesc exprNode : matchPath.symInfo.symbolExprsDecs) {
     Utilities.mergeUniqElems(columns, exprNode.getCols());
   }
   return columns;
 }
コード例 #8
0
 public static PrimitiveTypeInfo deriveMinArgumentCast(
     ExprNodeDesc childExpr, TypeInfo targetType) {
   assert targetType instanceof PrimitiveTypeInfo : "Not a primitive type" + targetType;
   PrimitiveTypeInfo pti = (PrimitiveTypeInfo) targetType;
   // We only do the minimum cast for decimals. Other types are assumed safe; fix if needed.
   // We also don't do anything for non-primitive children (maybe we should assert).
   if ((pti.getPrimitiveCategory() != PrimitiveCategory.DECIMAL)
       || (!(childExpr.getTypeInfo() instanceof PrimitiveTypeInfo))) return pti;
   PrimitiveTypeInfo childTi = (PrimitiveTypeInfo) childExpr.getTypeInfo();
   // If the child is also decimal, no cast is needed (we hope - can target type be narrower?).
   return HiveDecimalUtils.getDecimalTypeForPrimitiveCategory(childTi);
 }
コード例 #9
0
 /** return true if predicate is already included in source */
 public static boolean containsPredicate(ExprNodeDesc source, ExprNodeDesc predicate) {
   if (source.isSame(predicate)) {
     return true;
   }
   if (FunctionRegistry.isOpAnd(source)) {
     if (containsPredicate(source.getChildren().get(0), predicate)
         || containsPredicate(source.getChildren().get(1), predicate)) {
       return true;
     }
   }
   return false;
 }
コード例 #10
0
 /** Return false if the expression has any non deterministic function */
 public static boolean isDeterministic(ExprNodeDesc desc) {
   if (desc instanceof ExprNodeGenericFuncDesc) {
     if (!FunctionRegistry.isDeterministic(((ExprNodeGenericFuncDesc) desc).getGenericUDF())) {
       return false;
     }
   }
   if (desc.getChildren() != null) {
     for (ExprNodeDesc child : desc.getChildren()) {
       if (!isDeterministic(child)) {
         return false;
       }
     }
   }
   return true;
 }
コード例 #11
0
 /** bind two predicates by AND op */
 public static ExprNodeGenericFuncDesc mergePredicates(ExprNodeDesc prev, ExprNodeDesc next) {
   final List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(2);
   if (FunctionRegistry.isOpAnd(prev)) {
     children.addAll(prev.getChildren());
   } else {
     children.add(prev);
   }
   if (FunctionRegistry.isOpAnd(next)) {
     children.addAll(next.getChildren());
   } else {
     children.add(next);
   }
   return new ExprNodeGenericFuncDesc(
       TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getGenericUDFForAnd(), children);
 }
コード例 #12
0
 /**
  * Get Map of ExprNodeColumnDesc HashCode to ExprNodeColumnDesc.
  *
  * @param exprDesc
  * @param hashCodeToColumnDescMap Assumption: If two ExprNodeColumnDesc have same hash code then
  *     they are logically referring to same projection
  */
 public static void getExprNodeColumnDesc(
     ExprNodeDesc exprDesc, Map<Integer, ExprNodeDesc> hashCodeToColumnDescMap) {
   if (exprDesc instanceof ExprNodeColumnDesc) {
     hashCodeToColumnDescMap.put(exprDesc.hashCode(), exprDesc);
   } else if (exprDesc instanceof ExprNodeColumnListDesc) {
     for (ExprNodeDesc child : exprDesc.getChildren()) {
       getExprNodeColumnDesc(child, hashCodeToColumnDescMap);
     }
   } else if (exprDesc instanceof ExprNodeGenericFuncDesc) {
     for (ExprNodeDesc child : exprDesc.getChildren()) {
       getExprNodeColumnDesc(child, hashCodeToColumnDescMap);
     }
   } else if (exprDesc instanceof ExprNodeFieldDesc) {
     getExprNodeColumnDesc(((ExprNodeFieldDesc) exprDesc).getDesc(), hashCodeToColumnDescMap);
   }
 }
コード例 #13
0
  /**
   * Join keys are expressions based on the select operator. Resolve the expressions so they are
   * based on the ReduceSink operator SEL -> RS -> JOIN
   *
   * @param source
   * @param reduceSinkOp
   * @return
   */
  public static ExprNodeDesc resolveJoinKeysAsRSColumns(
      ExprNodeDesc source, Operator<?> reduceSinkOp) {
    // Assuming this is only being done for join keys. As a result we shouldn't have to recursively
    // check any nested child expressions, because the result of the expression should exist as an
    // output column of the ReduceSink operator
    if (source == null) {
      return null;
    }

    // columnExprMap has the reverse of what we need - a mapping of the internal column names
    // to the ExprNodeDesc from the previous operation.
    // Find the key/value where the ExprNodeDesc value matches the column we are searching for.
    // The key portion of the entry will be the internal column name for the join key expression.
    for (Map.Entry<String, ExprNodeDesc> mapEntry : reduceSinkOp.getColumnExprMap().entrySet()) {
      if (mapEntry.getValue().isSame(source)) {
        String columnInternalName = mapEntry.getKey();
        if (source instanceof ExprNodeColumnDesc) {
          // The join key is a table column. Create the ExprNodeDesc based on this column.
          ColumnInfo columnInfo = reduceSinkOp.getSchema().getColumnInfo(columnInternalName);
          return new ExprNodeColumnDesc(columnInfo);
        } else {
          // Join key expression is likely some expression involving functions/operators, so there
          // is no actual table column for this. But the ReduceSink operator should still have an
          // output column corresponding to this expression, using the columnInternalName.
          // TODO: does tableAlias matter for this kind of expression?
          return new ExprNodeColumnDesc(source.getTypeInfo(), columnInternalName, "", false);
        }
      }
    }

    return null; // Couldn't find reference to expression
  }
 // Check if ExprNodeColumnDesc is wrapped in expr.
 // If so, peel off. Otherwise return itself.
 private ExprNodeDesc getColumnExpr(ExprNodeDesc expr) {
   if (expr instanceof ExprNodeColumnDesc) {
     return expr;
   }
   ExprNodeGenericFuncDesc funcDesc = null;
   if (expr instanceof ExprNodeGenericFuncDesc) {
     funcDesc = (ExprNodeGenericFuncDesc) expr;
   }
   if (null == funcDesc) {
     return expr;
   }
   GenericUDF udf = funcDesc.getGenericUDF();
   // check if its a simple cast expression.
   if ((udf instanceof GenericUDFBridge
           || udf instanceof GenericUDFToBinary
           || udf instanceof GenericUDFToChar
           || udf instanceof GenericUDFToVarchar
           || udf instanceof GenericUDFToDecimal
           || udf instanceof GenericUDFToDate
           || udf instanceof GenericUDFToUnixTimeStamp
           || udf instanceof GenericUDFToUtcTimestamp)
       && funcDesc.getChildren().size() == 1
       && funcDesc.getChildren().get(0) instanceof ExprNodeColumnDesc) {
     return expr.getChildren().get(0);
   }
   return expr;
 }
コード例 #15
0
 public static int indexOf(ExprNodeDesc origin, List<ExprNodeDesc> sources) {
   for (int i = 0; i < sources.size(); i++) {
     if (origin.isSame(sources.get(i))) {
       return i;
     }
   }
   return -1;
 }
コード例 #16
0
    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx, Object... nodeOutputs)
        throws SemanticException {
      LateralViewJoinOperator op = (LateralViewJoinOperator) nd;
      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      List<String> cols = cppCtx.genColLists(op);
      if (cols == null) {
        return null;
      }

      Map<String, ExprNodeDesc> colExprMap = op.getColumnExprMap();
      // As columns go down the DAG, the LVJ will transform internal column
      // names from something like 'key' to '_col0'. Because of this, we need
      // to undo this transformation using the column expression map as the
      // column names propagate up the DAG.

      // this is SEL(*) cols + UDTF cols
      List<String> outputCols = op.getConf().getOutputInternalColNames();

      // cause we cannot prune columns from UDTF branch currently, extract
      // columns from SEL(*) branch only and append all columns from UDTF branch to it
      int numSelColumns = op.getConf().getNumSelColumns();

      List<String> colsAfterReplacement = new ArrayList<String>();
      ArrayList<String> newColNames = new ArrayList<String>();
      for (String col : cols) {
        int index = outputCols.indexOf(col);
        // colExprMap.size() == size of cols from SEL(*) branch
        if (index >= 0 && index < numSelColumns) {
          ExprNodeDesc transformed = colExprMap.get(col);
          Utilities.mergeUniqElems(colsAfterReplacement, transformed.getCols());
          newColNames.add(col);
        }
      }
      // update number of columns from sel(*)
      op.getConf().setNumSelColumns(newColNames.size());

      // add all UDTF columns
      // following SEL will do CP for columns from UDTF, not adding SEL in here
      newColNames.addAll(outputCols.subList(numSelColumns, outputCols.size()));
      op.getConf().setOutputInternalColNames(newColNames);
      pruneOperator(ctx, op, newColNames);
      cppCtx.getPrunedColLists().put(op, colsAfterReplacement);
      return null;
    }
コード例 #17
0
    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx, Object... nodeOutputs)
        throws SemanticException {
      FilterOperator op = (FilterOperator) nd;
      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      ExprNodeDesc condn = op.getConf().getPredicate();
      // get list of columns used in the filter
      List<String> cl = condn.getCols();
      // merge it with the downstream col list
      List<String> filterOpPrunedColLists = Utilities.mergeUniqElems(cppCtx.genColLists(op), cl);
      List<String> filterOpPrunedColListsOrderPreserved =
          preserveColumnOrder(op, filterOpPrunedColLists);
      cppCtx.getPrunedColLists().put(op, filterOpPrunedColListsOrderPreserved);

      pruneOperator(cppCtx, op, cppCtx.getPrunedColLists().get(op));

      return null;
    }
コード例 #18
0
 void dumpFilterExpr(ExprNodeDesc expr) {
   if (expr == null) return;
   List<ExprNodeDesc> children = expr.getChildren();
   if (children != null && children.size() > 0) {
     for (ExprNodeDesc e : children) {
       dumpFilterExpr(e);
     }
   }
 }
コード例 #19
0
 public static ExprNodeDesc backtrack(
     ExprNodeDesc source, Operator<?> current, Operator<?> terminal, boolean foldExpr)
     throws SemanticException {
   Operator<?> parent = getSingleParent(current, terminal);
   if (parent == null) {
     return source;
   }
   if (source instanceof ExprNodeGenericFuncDesc) {
     // all children expression should be resolved
     ExprNodeGenericFuncDesc function = (ExprNodeGenericFuncDesc) source.clone();
     List<ExprNodeDesc> children = backtrack(function.getChildren(), current, terminal, foldExpr);
     for (ExprNodeDesc child : children) {
       if (child == null) {
         // Could not resolve all of the function children, fail
         return null;
       }
     }
     function.setChildren(children);
     if (foldExpr) {
       // fold after replacing, if possible
       ExprNodeDesc foldedFunction = ConstantPropagateProcFactory.foldExpr(function);
       if (foldedFunction != null) {
         return foldedFunction;
       }
     }
     return function;
   }
   if (source instanceof ExprNodeColumnDesc) {
     ExprNodeColumnDesc column = (ExprNodeColumnDesc) source;
     return backtrack(column, parent, terminal);
   }
   if (source instanceof ExprNodeFieldDesc) {
     // field expression should be resolved
     ExprNodeFieldDesc field = (ExprNodeFieldDesc) source.clone();
     ExprNodeDesc fieldDesc = backtrack(field.getDesc(), current, terminal, foldExpr);
     if (fieldDesc == null) {
       return null;
     }
     field.setDesc(fieldDesc);
     return field;
   }
   // constant or null expr, just return
   return source;
 }
コード例 #20
0
ファイル: AggregationDesc.java プロジェクト: ZHIQUANLIU/hive
 public String getExprString() {
   StringBuilder sb = new StringBuilder();
   sb.append(genericUDAFName);
   sb.append("(");
   if (distinct) {
     sb.append("DISTINCT ");
   }
   boolean first = true;
   for (ExprNodeDesc exp : parameters) {
     if (first) {
       first = false;
     } else {
       sb.append(", ");
     }
     sb.append(exp.getExprString());
   }
   sb.append(")");
   return sb.toString();
 }
コード例 #21
0
  public static String extractColName(ExprNodeDesc root) {
    if (root instanceof ExprNodeColumnDesc) {
      return ((ExprNodeColumnDesc) root).getColumn();
    } else {
      if (root.getChildren() == null) {
        return null;
      }

      String column = null;
      for (ExprNodeDesc d : root.getChildren()) {
        String candidate = extractColName(d);
        if (column != null && candidate != null) {
          return null;
        } else if (candidate != null) {
          column = candidate;
        }
      }
      return column;
    }
  }
コード例 #22
0
 /** split predicates by AND op */
 public static List<ExprNodeDesc> split(ExprNodeDesc current, List<ExprNodeDesc> splitted) {
   if (FunctionRegistry.isOpAnd(current)) {
     for (ExprNodeDesc child : current.getChildren()) {
       split(child, splitted);
     }
     return splitted;
   }
   if (indexOf(current, splitted) < 0) {
     splitted.add(current);
   }
   return splitted;
 }
コード例 #23
0
 /** Recommend name for the expression */
 public static String recommendInputName(ExprNodeDesc desc) {
   if (desc instanceof ExprNodeColumnDesc) {
     return ((ExprNodeColumnDesc) desc).getColumn();
   }
   List<ExprNodeDesc> children = desc.getChildren();
   if (FunctionRegistry.isOpPreserveInputName(desc)
       && !children.isEmpty()
       && children.get(0) instanceof ExprNodeColumnDesc) {
     return ((ExprNodeColumnDesc) children.get(0)).getColumn();
   }
   return null;
 }
コード例 #24
0
 /*
  * add any input columns referenced in WindowFn args or expressions.
  */
 private ArrayList<String> prunedColumnsList(
     List<String> prunedCols, WindowTableFunctionDef tDef) {
   // we create a copy of prunedCols to create a list of pruned columns for PTFOperator
   ArrayList<String> mergedColList = new ArrayList<String>(prunedCols);
   if (tDef.getWindowFunctions() != null) {
     for (WindowFunctionDef wDef : tDef.getWindowFunctions()) {
       if (wDef.getArgs() == null) {
         continue;
       }
       for (PTFExpressionDef arg : wDef.getArgs()) {
         ExprNodeDesc exprNode = arg.getExprNode();
         Utilities.mergeUniqElems(mergedColList, exprNode.getCols());
       }
     }
   }
   if (tDef.getPartition() != null) {
     for (PTFExpressionDef col : tDef.getPartition().getExpressions()) {
       ExprNodeDesc exprNode = col.getExprNode();
       Utilities.mergeUniqElems(mergedColList, exprNode.getCols());
     }
   }
   if (tDef.getOrder() != null) {
     for (PTFExpressionDef col : tDef.getOrder().getExpressions()) {
       ExprNodeDesc exprNode = col.getExprNode();
       Utilities.mergeUniqElems(mergedColList, exprNode.getCols());
     }
   }
   return mergedColList;
 }
コード例 #25
0
 // backtrack key exprs of child to parent and compare it with parent's
 protected Integer sameKeys(
     List<ExprNodeDesc> cexprs, List<ExprNodeDesc> pexprs, Operator<?> child, Operator<?> parent)
     throws SemanticException {
   int common = Math.min(cexprs.size(), pexprs.size());
   int limit = Math.max(cexprs.size(), pexprs.size());
   int i = 0;
   for (; i < common; i++) {
     ExprNodeDesc pexpr = pexprs.get(i);
     ExprNodeDesc cexpr = ExprNodeDescUtils.backtrack(cexprs.get(i), child, parent);
     if (cexpr == null || !pexpr.isSame(cexpr)) {
       return null;
     }
   }
   for (; i < limit; i++) {
     if (cexprs.size() > pexprs.size()) {
       if (ExprNodeDescUtils.backtrack(cexprs.get(i), child, parent) == null) {
         // cKey is not present in parent
         return null;
       }
     }
   }
   return Integer.valueOf(cexprs.size()).compareTo(pexprs.size());
 }
コード例 #26
0
ファイル: JoinDesc.java プロジェクト: vthacker/impala-port
  /**
   * Get the string representation of filters.
   *
   * <p>Returns null if they are no filters.
   *
   * @return Map from alias to filters on the alias.
   */
  @Explain(displayName = "filter predicates")
  public Map<Byte, String> getFiltersStringMap() {
    if (getFilters() == null || getFilters().size() == 0) {
      return null;
    }

    LinkedHashMap<Byte, String> ret = new LinkedHashMap<Byte, String>();
    boolean filtersPresent = false;

    for (Map.Entry<Byte, List<ExprNodeDesc>> ent : getFilters().entrySet()) {
      StringBuilder sb = new StringBuilder();
      boolean first = true;
      if (ent.getValue() != null) {
        if (ent.getValue().size() != 0) {
          filtersPresent = true;
        }
        for (ExprNodeDesc expr : ent.getValue()) {
          if (!first) {
            sb.append(" ");
          }

          first = false;
          sb.append("{");
          sb.append(expr.getExprString());
          sb.append("}");
        }
      }
      ret.put(ent.getKey(), sb.toString());
    }

    if (filtersPresent) {
      return ret;
    } else {
      return null;
    }
  }
  private ExprNodeDesc analyzeExpr(
      ExprNodeGenericFuncDesc expr,
      List<IndexSearchCondition> searchConditions,
      Object... nodeOutputs) {

    if (FunctionRegistry.isOpAnd(expr)) {
      assert (nodeOutputs.length == 2);
      ExprNodeDesc residual1 = (ExprNodeDesc) nodeOutputs[0];
      ExprNodeDesc residual2 = (ExprNodeDesc) nodeOutputs[1];
      if (residual1 == null) {
        return residual2;
      }
      if (residual2 == null) {
        return residual1;
      }
      List<ExprNodeDesc> residuals = new ArrayList<ExprNodeDesc>();
      residuals.add(residual1);
      residuals.add(residual2);
      return new ExprNodeGenericFuncDesc(
          TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getGenericUDFForAnd(), residuals);
    }

    GenericUDF genericUDF = expr.getGenericUDF();
    if (!(genericUDF instanceof GenericUDFBaseCompare)) {
      return expr;
    }
    ExprNodeDesc expr1 = (ExprNodeDesc) nodeOutputs[0];
    ExprNodeDesc expr2 = (ExprNodeDesc) nodeOutputs[1];
    // We may need to peel off the GenericUDFBridge that is added by CBO or user
    if (expr1.getTypeInfo().equals(expr2.getTypeInfo())) {
      expr1 = getColumnExpr(expr1);
      expr2 = getColumnExpr(expr2);
    }

    ExprNodeDesc[] extracted = ExprNodeDescUtils.extractComparePair(expr1, expr2);
    if (extracted == null || (extracted.length > 2 && !acceptsFields)) {
      return expr;
    }

    ExprNodeColumnDesc columnDesc;
    ExprNodeConstantDesc constantDesc;
    if (extracted[0] instanceof ExprNodeConstantDesc) {
      genericUDF = genericUDF.flip();
      columnDesc = (ExprNodeColumnDesc) extracted[1];
      constantDesc = (ExprNodeConstantDesc) extracted[0];
    } else {
      columnDesc = (ExprNodeColumnDesc) extracted[0];
      constantDesc = (ExprNodeConstantDesc) extracted[1];
    }

    String udfName = genericUDF.getUdfName();
    if (!udfNames.contains(genericUDF.getUdfName())) {
      return expr;
    }

    if (!allowedColumnNames.contains(columnDesc.getColumn())) {
      return expr;
    }

    String[] fields = null;
    if (extracted.length > 2) {
      ExprNodeFieldDesc fieldDesc = (ExprNodeFieldDesc) extracted[2];
      if (!isValidField(fieldDesc)) {
        return expr;
      }
      fields = ExprNodeDescUtils.extractFields(fieldDesc);
    }

    // We also need to update the expr so that the index query can be generated.
    // Note that, hive does not support UDFToDouble etc in the query text.
    List<ExprNodeDesc> list = new ArrayList<ExprNodeDesc>();
    list.add(expr1);
    list.add(expr2);
    expr = new ExprNodeGenericFuncDesc(expr.getTypeInfo(), expr.getGenericUDF(), list);

    searchConditions.add(new IndexSearchCondition(columnDesc, udfName, constantDesc, expr, fields));

    // we converted the expression to a search condition, so
    // remove it from the residual predicate
    return fields == null ? null : expr;
  }
コード例 #28
0
 public static ExprNodeColumnDesc getColumnExpr(ExprNodeDesc expr) {
   while (FunctionRegistry.isOpCast(expr)) {
     expr = expr.getChildren().get(0);
   }
   return (expr instanceof ExprNodeColumnDesc) ? (ExprNodeColumnDesc) expr : null;
 }
コード例 #29
0
 @Override
 public int hashCode() {
   return exprNodeDesc == null ? 0 : exprNodeDesc.hashCode();
 }
コード例 #30
0
ファイル: ReduceSinkOperator.java プロジェクト: hugh-han/hive
  @Override
  protected void initializeOp(Configuration hconf) throws HiveException {
    super.initializeOp(hconf);
    try {

      numRows = 0;
      cntr = 1;
      logEveryNRows = HiveConf.getLongVar(hconf, HiveConf.ConfVars.HIVE_LOG_N_RECORDS);

      statsMap.put(getCounterName(Counter.RECORDS_OUT_INTERMEDIATE, hconf), recordCounter);

      List<ExprNodeDesc> keys = conf.getKeyCols();

      if (isLogDebugEnabled) {
        LOG.debug("keys size is " + keys.size());
        for (ExprNodeDesc k : keys) {
          LOG.debug("Key exprNodeDesc " + k.getExprString());
        }
      }

      keyEval = new ExprNodeEvaluator[keys.size()];
      int i = 0;
      for (ExprNodeDesc e : keys) {
        keyEval[i++] = ExprNodeEvaluatorFactory.get(e);
      }

      numDistributionKeys = conf.getNumDistributionKeys();
      distinctColIndices = conf.getDistinctColumnIndices();
      numDistinctExprs = distinctColIndices.size();

      valueEval = new ExprNodeEvaluator[conf.getValueCols().size()];
      i = 0;
      for (ExprNodeDesc e : conf.getValueCols()) {
        valueEval[i++] = ExprNodeEvaluatorFactory.get(e);
      }

      partitionEval = new ExprNodeEvaluator[conf.getPartitionCols().size()];
      i = 0;
      for (ExprNodeDesc e : conf.getPartitionCols()) {
        int index = ExprNodeDescUtils.indexOf(e, keys);
        partitionEval[i++] = index < 0 ? ExprNodeEvaluatorFactory.get(e) : keyEval[index];
      }

      if (conf.getBucketCols() != null && !conf.getBucketCols().isEmpty()) {
        bucketEval = new ExprNodeEvaluator[conf.getBucketCols().size()];

        i = 0;
        for (ExprNodeDesc e : conf.getBucketCols()) {
          int index = ExprNodeDescUtils.indexOf(e, keys);
          bucketEval[i++] = index < 0 ? ExprNodeEvaluatorFactory.get(e) : keyEval[index];
        }

        buckColIdxInKey = conf.getPartitionCols().size();
      }

      tag = conf.getTag();
      tagByte[0] = (byte) tag;
      skipTag = conf.getSkipTag();
      if (isLogInfoEnabled) {
        LOG.info("Using tag = " + tag);
      }

      TableDesc keyTableDesc = conf.getKeySerializeInfo();
      keySerializer = (Serializer) keyTableDesc.getDeserializerClass().newInstance();
      keySerializer.initialize(null, keyTableDesc.getProperties());
      keyIsText = keySerializer.getSerializedClass().equals(Text.class);

      TableDesc valueTableDesc = conf.getValueSerializeInfo();
      valueSerializer = (Serializer) valueTableDesc.getDeserializerClass().newInstance();
      valueSerializer.initialize(null, valueTableDesc.getProperties());

      int limit = conf.getTopN();
      float memUsage = conf.getTopNMemoryUsage();

      if (limit >= 0 && memUsage > 0) {
        reducerHash = conf.isPTFReduceSink() ? new PTFTopNHash() : reducerHash;
        reducerHash.initialize(limit, memUsage, conf.isMapGroupBy(), this);
      }

      useUniformHash = conf.getReducerTraits().contains(UNIFORM);

      firstRow = true;
    } catch (Exception e) {
      String msg = "Error initializing ReduceSinkOperator: " + e.getMessage();
      LOG.error(msg, e);
      throw new RuntimeException(e);
    }
  }