Esempio n. 1
0
  @Override
  @SuppressWarnings("unchecked")
  public String[] getLocations() throws IOException, InterruptedException {
    if (locations == null) {
      HashMap<String, Long> locMap = new HashMap<String, Long>();
      Long lenInMap;
      for (InputSplit split : wrappedSplits) {
        String[] locs = split.getLocations();
        for (String loc : locs) {
          if ((lenInMap = locMap.get(loc)) == null) locMap.put(loc, split.getLength());
          else locMap.put(loc, lenInMap + split.getLength());
        }
      }
      Set<Map.Entry<String, Long>> entrySet = locMap.entrySet();
      Map.Entry<String, Long>[] hostSize = entrySet.toArray(new Map.Entry[entrySet.size()]);
      Arrays.sort(
          hostSize,
          new Comparator<Map.Entry<String, Long>>() {

            @Override
            public int compare(Entry<String, Long> o1, Entry<String, Long> o2) {
              long diff = o1.getValue() - o2.getValue();
              if (diff < 0) return 1;
              if (diff > 0) return -1;
              return 0;
            }
          });
      // maximum 5 locations are in list: refer to PIG-1648 for more details
      int nHost = Math.min(hostSize.length, 5);
      locations = new String[nHost];
      for (int i = 0; i < nHost; ++i) {
        locations[i] = hostSize[i].getKey();
      }
    }
    return locations;
  }
    @Override
    public void initialize(InputSplit split, final TaskAttemptContext context)
        throws IOException, InterruptedException {

      org.apache.hadoop.mapred.InputSplit oldSplit;

      if (split.getClass() == FileSplit.class) {
        oldSplit =
            new org.apache.hadoop.mapred.FileSplit(
                ((FileSplit) split).getPath(),
                ((FileSplit) split).getStart(),
                ((FileSplit) split).getLength(),
                split.getLocations());
      } else {
        oldSplit = ((InputSplitWrapper) split).realSplit;
      }

      @SuppressWarnings("unchecked")
      Reporter reporter = new Reporter() { // Reporter interface over ctx

            final TaskInputOutputContext ioCtx =
                context instanceof TaskInputOutputContext ? (TaskInputOutputContext) context : null;

            public void progress() {
              HadoopCompat.progress(context);
            }

            // @Override
            public float getProgress() {
              return (ioCtx != null) ? ioCtx.getProgress() : 0;
            }

            public void setStatus(String status) {
              if (ioCtx != null) HadoopCompat.setStatus(ioCtx, status);
            }

            public void incrCounter(String group, String counter, long amount) {
              if (ioCtx != null)
                HadoopCompat.incrementCounter(ioCtx.getCounter(group, counter), amount);
            }

            @SuppressWarnings("unchecked")
            public void incrCounter(Enum<?> key, long amount) {
              if (ioCtx != null) HadoopCompat.incrementCounter(ioCtx.getCounter(key), amount);
            }

            public org.apache.hadoop.mapred.InputSplit getInputSplit()
                throws UnsupportedOperationException {
              throw new UnsupportedOperationException();
            }

            public Counter getCounter(String group, String name) {
              return ioCtx != null ? (Counter) HadoopCompat.getCounter(ioCtx, group, name) : null;
            }

            @SuppressWarnings("unchecked")
            public Counter getCounter(Enum<?> name) {
              return ioCtx != null ? (Counter) ioCtx.getCounter(name) : null;
            }
          };

      realReader =
          realInputFormat.getRecordReader(
              oldSplit, (JobConf) HadoopCompat.getConfiguration(context), reporter);

      keyObj = realReader.createKey();
      valueObj = realReader.createValue();
    }
Esempio n. 3
0
 @Override
 public String[] getLocations() throws IOException, InterruptedException {
   return delegate.getLocations();
 }