예제 #1
0
  /**
   * Uses the ColumnarSerde to deserialize the buff:BytesRefArrayWritable into a ColumnarStruct
   * instance.
   *
   * @param buff BytesRefArrayWritable
   * @return ColumnarStruct
   */
  private ColumnarStruct readColumnarStruct(BytesRefArrayWritable buff) {
    // use ColumnarSerDe to deserialize row
    ColumnarStruct struct = null;
    try {
      struct = (ColumnarStruct) serde.deserialize(buff);
    } catch (SerDeException e) {
      LOG.error(e.toString(), e);
      throw new RuntimeException(e.toString(), e);
    }

    return struct;
  }
예제 #2
0
  @Override
  public void prepareToRead(@SuppressWarnings("rawtypes") RecordReader reader, PigSplit split)
      throws IOException {

    this.reader = (HiveRCRecordReader) reader;

    // check that the required indexes actually exist i.e. the columns that
    // should be read.
    // assuming this is always defined simplifies the readColumnarTuple
    // logic.

    int requiredIndexes[] = getRequiredColumns();
    if (requiredIndexes == null) {

      int fieldLen = pigSchema.getFields().length;

      // if any the partition keys should already exist
      String[] partitionKeys = getPartitionKeys(null, null);
      if (partitionKeys != null) {
        fieldLen = partitionKeys.length;
      }

      requiredIndexes = new int[fieldLen];

      for (int i = 0; i < fieldLen; i++) {
        requiredIndexes[i] = i;
      }

      this.requiredColumns = requiredIndexes;
    }

    try {
      serde = new ColumnarSerDe();
      serde.initialize(hiveConf, props);
    } catch (SerDeException e) {
      LOG.error(e.toString(), e);
      throw new IOException(e);
    }
  }