public int performRCFileReadFirstAndLastColumnTest(
      FileSystem fs, Path file, int allColumnsNumber, boolean chechCorrect) throws IOException {

    byte[][] checkBytes = null;
    BytesRefArrayWritable checkRow = new BytesRefArrayWritable(allColumnsNumber);
    if (chechCorrect) {
      resetRandomGenerators();
      checkBytes = new byte[allColumnsNumber][];
    }

    int actualReadCount = 0;

    java.util.ArrayList<Integer> readCols = new java.util.ArrayList<Integer>();
    readCols.add(Integer.valueOf(0));
    readCols.add(Integer.valueOf(allColumnsNumber - 1));
    ColumnProjectionUtils.appendReadColumns(conf, readCols);
    RCFile.Reader reader = new RCFile.Reader(fs, file, conf);

    LongWritable rowID = new LongWritable();
    BytesRefArrayWritable cols = new BytesRefArrayWritable();
    while (reader.next(rowID)) {
      reader.getCurrentRow(cols);
      boolean ok = true;
      if (chechCorrect) {
        nextRandomRow(checkBytes, checkRow);
        ok = ok && (checkRow.get(0).equals(cols.get(0)));
        ok = ok && checkRow.get(allColumnsNumber - 1).equals(cols.get(allColumnsNumber - 1));
      }
      if (!ok) {
        throw new IllegalStateException("Compare read and write error.");
      }
      actualReadCount++;
    }
    return actualReadCount;
  }
Esempio n. 2
0
  @Override
  public void initialize(
      HiveConf conf, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) {
    super.initialize(conf, queryPlan, ctx, opContext);
    work.initializeForFetch(opContext);

    try {
      // Create a file system handle
      JobConf job = new JobConf(conf);

      Operator<?> source = work.getSource();
      if (source instanceof TableScanOperator) {
        TableScanOperator ts = (TableScanOperator) source;
        // push down projections
        ColumnProjectionUtils.appendReadColumns(
            job, ts.getNeededColumnIDs(), ts.getNeededColumns());
        // push down filters
        HiveInputFormat.pushFilters(job, ts);
      }
      sink = work.getSink();
      fetch = new FetchOperator(work, job, source, getVirtualColumns(source));
      source.initialize(conf, new ObjectInspector[] {fetch.getOutputObjectInspector()});
      totalRows = 0;
      ExecMapper.setDone(false);

    } catch (Exception e) {
      // Bail out ungracefully - we should never hit
      // this here - but would have hit it in SemanticAnalyzer
      LOG.error(StringUtils.stringifyException(e));
      throw new RuntimeException(e);
    }
  }
Esempio n. 3
0
  private void initializeOperators(Map<FetchOperator, JobConf> fetchOpJobConfMap)
      throws HiveException {
    for (Map.Entry<String, Operator<? extends OperatorDesc>> entry :
        work.getAliasToWork().entrySet()) {
      LOG.debug(
          "initializeOperators: "
              + entry.getKey()
              + ", children = "
              + entry.getValue().getChildOperators());
    }
    // this mapper operator is used to initialize all the operators
    for (Map.Entry<String, FetchWork> entry : work.getAliasToFetchWork().entrySet()) {
      if (entry.getValue() == null) {
        continue;
      }
      JobConf jobClone = new JobConf(job);

      TableScanOperator ts = (TableScanOperator) work.getAliasToWork().get(entry.getKey());
      // push down projections
      ColumnProjectionUtils.appendReadColumns(
          jobClone, ts.getNeededColumnIDs(), ts.getNeededColumns());
      // push down filters
      HiveInputFormat.pushFilters(jobClone, ts);

      // create a fetch operator
      FetchOperator fetchOp = new FetchOperator(entry.getValue(), jobClone);
      fetchOpJobConfMap.put(fetchOp, jobClone);
      fetchOperators.put(entry.getKey(), fetchOp);
      l4j.info("fetchoperator for " + entry.getKey() + " created");
    }
    // initialize all forward operator
    for (Map.Entry<String, FetchOperator> entry : fetchOperators.entrySet()) {
      // get the forward op
      String alias = entry.getKey();
      Operator<? extends OperatorDesc> forwardOp = work.getAliasToWork().get(alias);

      // put the exe context into all the operators
      forwardOp.passExecContext(execContext);
      // All the operators need to be initialized before process
      FetchOperator fetchOp = entry.getValue();
      JobConf jobConf = fetchOpJobConfMap.get(fetchOp);

      if (jobConf == null) {
        jobConf = job;
      }
      // initialize the forward operator
      ObjectInspector objectInspector = fetchOp.getOutputObjectInspector();
      forwardOp.initialize(jobConf, new ObjectInspector[] {objectInspector});
      l4j.info("fetchoperator for " + entry.getKey() + " initialized");
    }
  }