public RecordReaderWrapper()

in core/src/main/java/com/twitter/elephantbird/mapred/input/DeprecatedInputFormatWrapper.java [237:273]


    public RecordReaderWrapper(InputFormat<K, V> newInputFormat, InputSplit oldSplit, JobConf oldJobConf,
				Reporter reporter, DeprecatedInputFormatValueCopier<V> valueCopier) throws IOException {

			this.valueCopier = valueCopier;
      splitLen = oldSplit.getLength();

      org.apache.hadoop.mapreduce.InputSplit split;
      if (oldSplit.getClass() == FileSplit.class) {
        split = new org.apache.hadoop.mapreduce.lib.input.FileSplit(
            ((FileSplit)oldSplit).getPath(),
            ((FileSplit)oldSplit).getStart(),
            ((FileSplit)oldSplit).getLength(),
            oldSplit.getLocations());
      } else {
        split = ((InputSplitWrapper)oldSplit).realSplit;
      }

      TaskAttemptID taskAttemptID = TaskAttemptID.forName(oldJobConf.get("mapred.task.id"));
      if (taskAttemptID == null) {
        taskAttemptID = new TaskAttemptID();
      }

      // create a MapContext to pass reporter to record reader (for counters)
      TaskAttemptContext taskContext = HadoopCompat
          .newMapContext(oldJobConf, taskAttemptID, null, null, null,
              new ReporterWrapper(reporter), null);
      try {
        realReader = newInputFormat.createRecordReader(split, taskContext);
        realReader.initialize(split, taskContext);

        if (realReader instanceof MapredInputFormatCompatible) {
          mifcReader = ((MapredInputFormatCompatible) realReader);
        }
      } catch (InterruptedException e) {
        throw new IOException(e);
      }
    }