@Override
  public void configure(JobConf job) {
    super.configure(job);
    try {
      _partFileWithHeader = TfUtils.isPartFileWithHeader(job);
      tfmapper = new TfUtils(job);
      tfmapper.loadTfMetadata(job, true);

      // Load relevant information for CSV Reblock
      ByteWritable key = new ByteWritable();
      OffsetCount value = new OffsetCount();
      Path p = new Path(job.get(CSVReblockMR.ROWID_FILE_NAME));

      FileSystem fs = FileSystem.get(job);
      Path thisPath = new Path(job.get("map.input.file")).makeQualified(fs);
      String thisfile = thisPath.toString();

      SequenceFile.Reader reader = new SequenceFile.Reader(fs, p, job);
      while (reader.next(key, value)) {
        // "key" needn't be checked since the offset file has information about a single CSV input
        // (the raw data file)
        if (thisfile.equals(value.filename)) offsetMap.put(value.fileOffset, value.count);
      }
      reader.close();

      idxRow = new CSVReblockMapper.IndexedBlockRow();
      int maxBclen = 0;

      for (ArrayList<CSVReblockInstruction> insv : csv_reblock_instructions)
        for (CSVReblockInstruction in : insv) {
          if (maxBclen < in.bclen) maxBclen = in.bclen;
        }

      // always dense since common csv usecase
      idxRow.getRow().data.reset(1, maxBclen, false);

    } catch (IOException e) {
      throw new RuntimeException(e);
    } catch (JSONException e) {
      throw new RuntimeException(e);
    }
  }
  @Override
  public void map(
      LongWritable rawKey,
      Text rawValue,
      OutputCollector<TaggedFirstSecondIndexes, CSVReblockMR.BlockRow> out,
      Reporter reporter)
      throws IOException {

    if (_first) {
      rowOffset = offsetMap.get(rawKey.get());
      _reporter = reporter;
      _first = false;
    }

    // output the header line
    if (rawKey.get() == 0 && _partFileWithHeader) {
      tfmapper.processHeaderLine();
      if (tfmapper.hasHeader()) return;
    }

    // parse the input line and apply transformation
    String[] words = tfmapper.getWords(rawValue);

    if (!tfmapper.omit(words)) {
      words = tfmapper.apply(words);
      try {
        tfmapper.check(words);

        // Perform CSV Reblock
        CSVReblockInstruction ins = csv_reblock_instructions.get(0).get(0);
        idxRow =
            CSVReblockMapper.processRow(
                idxRow,
                words,
                rowOffset,
                num,
                ins.output,
                ins.brlen,
                ins.bclen,
                ins.fill,
                ins.fillValue,
                out);
      } catch (DMLRuntimeException e) {
        throw new RuntimeException(e.getMessage() + ":" + rawValue.toString());
      }
      num++;
    }
  }