Example #1
0
 public ExprNodeGenericFuncEvaluator(ExprNodeGenericFuncDesc expr) {
   this.expr = expr;
   children = new ExprNodeEvaluator[expr.getChildExprs().size()];
   isEager = false;
   for (int i = 0; i < children.length; i++) {
     ExprNodeDesc child = expr.getChildExprs().get(i);
     ExprNodeEvaluator nodeEvaluator = ExprNodeEvaluatorFactory.get(child);
     children[i] = nodeEvaluator;
     // If we have eager evaluators anywhere below us, then we are eager too.
     if (nodeEvaluator instanceof ExprNodeGenericFuncEvaluator) {
       if (((ExprNodeGenericFuncEvaluator) nodeEvaluator).isEager) {
         isEager = true;
       }
       // Base case:  we are eager if a child is stateful
       GenericUDF childUDF = ((ExprNodeGenericFuncDesc) child).getGenericUDF();
       if (FunctionRegistry.isStateful(childUDF)) {
         isEager = true;
       }
     }
   }
   deferredChildren = new GenericUDF.DeferredObject[expr.getChildExprs().size()];
   for (int i = 0; i < deferredChildren.length; i++) {
     if (isEager) {
       deferredChildren[i] = new EagerExprObject(children[i]);
     } else {
       deferredChildren[i] = new DeferredExprObject(children[i]);
     }
   }
 }
    public Handler(
        ObjectInspector inputObjInspector,
        List<ExprNodeDesc> keyCols,
        List<ExprNodeDesc> valueCols,
        List<String> outputKeyColumnNames,
        List<String> outputValueColumnNames,
        Integer tag)
        throws HiveException {

      keyEval = new ExprNodeEvaluator[keyCols.size()];
      int i = 0;
      for (ExprNodeDesc e : keyCols) {
        keyEval[i++] = ExprNodeEvaluatorFactory.get(e);
      }
      outputKey = new Object[keyEval.length];

      valueEval = new ExprNodeEvaluator[valueCols.size()];
      i = 0;
      for (ExprNodeDesc e : valueCols) {
        valueEval[i++] = ExprNodeEvaluatorFactory.get(e);
      }
      outputValue = new Object[valueEval.length];

      this.tag = tag;

      ObjectInspector keyObjectInspector =
          initEvaluatorsAndReturnStruct(keyEval, outputKeyColumnNames, inputObjInspector);
      ObjectInspector valueObjectInspector =
          initEvaluatorsAndReturnStruct(valueEval, outputValueColumnNames, inputObjInspector);
      List<ObjectInspector> ois = new ArrayList<ObjectInspector>();
      ois.add(keyObjectInspector);
      ois.add(valueObjectInspector);
      this.outputObjInspector =
          ObjectInspectorFactory.getStandardStructObjectInspector(
              Utilities.reduceFieldNameList, ois);
      this.forwardedRow = new ArrayList<Object>(Utilities.reduceFieldNameList.size());
    }
Example #3
0
  @Override
  protected void initializeOp(Configuration hconf) throws HiveException {
    super.initializeOp(hconf);
    try {

      numRows = 0;
      cntr = 1;
      logEveryNRows = HiveConf.getLongVar(hconf, HiveConf.ConfVars.HIVE_LOG_N_RECORDS);

      statsMap.put(getCounterName(Counter.RECORDS_OUT_INTERMEDIATE, hconf), recordCounter);

      List<ExprNodeDesc> keys = conf.getKeyCols();

      if (isLogDebugEnabled) {
        LOG.debug("keys size is " + keys.size());
        for (ExprNodeDesc k : keys) {
          LOG.debug("Key exprNodeDesc " + k.getExprString());
        }
      }

      keyEval = new ExprNodeEvaluator[keys.size()];
      int i = 0;
      for (ExprNodeDesc e : keys) {
        keyEval[i++] = ExprNodeEvaluatorFactory.get(e);
      }

      numDistributionKeys = conf.getNumDistributionKeys();
      distinctColIndices = conf.getDistinctColumnIndices();
      numDistinctExprs = distinctColIndices.size();

      valueEval = new ExprNodeEvaluator[conf.getValueCols().size()];
      i = 0;
      for (ExprNodeDesc e : conf.getValueCols()) {
        valueEval[i++] = ExprNodeEvaluatorFactory.get(e);
      }

      partitionEval = new ExprNodeEvaluator[conf.getPartitionCols().size()];
      i = 0;
      for (ExprNodeDesc e : conf.getPartitionCols()) {
        int index = ExprNodeDescUtils.indexOf(e, keys);
        partitionEval[i++] = index < 0 ? ExprNodeEvaluatorFactory.get(e) : keyEval[index];
      }

      if (conf.getBucketCols() != null && !conf.getBucketCols().isEmpty()) {
        bucketEval = new ExprNodeEvaluator[conf.getBucketCols().size()];

        i = 0;
        for (ExprNodeDesc e : conf.getBucketCols()) {
          int index = ExprNodeDescUtils.indexOf(e, keys);
          bucketEval[i++] = index < 0 ? ExprNodeEvaluatorFactory.get(e) : keyEval[index];
        }

        buckColIdxInKey = conf.getPartitionCols().size();
      }

      tag = conf.getTag();
      tagByte[0] = (byte) tag;
      skipTag = conf.getSkipTag();
      if (isLogInfoEnabled) {
        LOG.info("Using tag = " + tag);
      }

      TableDesc keyTableDesc = conf.getKeySerializeInfo();
      keySerializer = (Serializer) keyTableDesc.getDeserializerClass().newInstance();
      keySerializer.initialize(null, keyTableDesc.getProperties());
      keyIsText = keySerializer.getSerializedClass().equals(Text.class);

      TableDesc valueTableDesc = conf.getValueSerializeInfo();
      valueSerializer = (Serializer) valueTableDesc.getDeserializerClass().newInstance();
      valueSerializer.initialize(null, valueTableDesc.getProperties());

      int limit = conf.getTopN();
      float memUsage = conf.getTopNMemoryUsage();

      if (limit >= 0 && memUsage > 0) {
        reducerHash = conf.isPTFReduceSink() ? new PTFTopNHash() : reducerHash;
        reducerHash.initialize(limit, memUsage, conf.isMapGroupBy(), this);
      }

      useUniformHash = conf.getReducerTraits().contains(UNIFORM);

      firstRow = true;
    } catch (Exception e) {
      String msg = "Error initializing ReduceSinkOperator: " + e.getMessage();
      LOG.error(msg, e);
      throw new RuntimeException(e);
    }
  }