예제 #1
0
  /**
   * After a map op chain is executed, moveOutput will be called on the root map op. By default, the
   * map op's output is stored in a location other than where its final resting place will be, and
   * this method is responsible for moving the content to that location (e.g. toName).
   *
   * @param fs
   * @param toName
   * @throws IOException
   */
  @Override
  public void moveOutput(final String toName) throws IOException {
    // make sure the toName doesn't exist, otherwise the move() will move the output into a
    // directory
    // under the toName.
    DataProviderFactory.delete(toName, getProviderProperties());

    MrsImageDataProvider dp =
        DataProviderFactory.getMrsImageDataProvider(
            getOutputName(), AccessMode.READ, getProviderProperties());
    if (dp != null) {
      dp.move(toName);
    }
    _outputName = toName;
  }
예제 #2
0
 public String resolveOutputName() throws IOException {
   if (_outputName == null) {
     MrsImageDataProvider dp =
         DataProviderFactory.createTempMrsImageDataProvider(getProviderProperties());
     _outputName = dp.getResourceName();
     addTempResource(_outputName);
   }
   return _outputName;
 }
예제 #3
0
  private void setupConfig(final Job job, final MrsImageDataProvider provider)
      throws DataProviderException {
    try {
      Configuration conf = job.getConfiguration();
      DataProviderFactory.saveProviderPropertiesToConfig(provider.getProviderProperties(), conf);
      context.save(conf);
      // Add the input pyramid metadata to the job configuration
      for (final String input : context.getInputs()) {
        MrsImagePyramid pyramid;
        try {
          pyramid = MrsImagePyramid.open(input, context.getProviderProperties());
        } catch (IOException e) {
          throw new DataProviderException("Failure opening input image pyramid: " + input, e);
        }
        final MrsImagePyramidMetadata metadata = pyramid.getMetadata();
        log.debug(
            "In HadoopUtils.setupMrsPyramidInputFormat, loading pyramid for "
                + input
                + " pyramid instance is "
                + pyramid
                + " metadata instance is "
                + metadata);

        String image = metadata.getName(context.getZoomLevel());
        // if we don't have this zoom level, use the max, then we'll decimate/subsample that one
        if (image == null) {
          log.error(
              "Could not get image in setupMrsPyramidInputFormat at zoom level "
                  + context.getZoomLevel()
                  + " for "
                  + pyramid);
          image = metadata.getName(metadata.getMaxZoomLevel());
        }

        HadoopUtils.setMetadata(conf, metadata);
      }
    } catch (IOException e) {
      throw new DataProviderException(
          "Failure configuring map/reduce job " + context.toString(), e);
    }
  }
예제 #4
0
 private void setup(final Configuration conf, final ProviderProperties providerProperties) {
   DataProviderFactory.saveProviderPropertiesToConfig(providerProperties, conf);
   context.save(conf);
 }