Exemplo n.º 1
0
  @Override
  public void postBuild(Progress p, boolean buildPyramid)
      throws IOException, JobFailedException, JobCancelledException {
    if (buildPyramid) {
      MrsImagePyramid pyramid = MrsImagePyramid.open(_outputName, getProviderProperties());
      MrsImagePyramidMetadata metadata = pyramid.getMetadata();

      Aggregator aggregator;
      if (metadata.getClassification() == Classification.Continuous) {
        aggregator = new MeanAggregator();
      } else {
        aggregator = new ModeAggregator();
      }
      BuildPyramidSpark.build(
          _outputName, aggregator, createConfiguration(), getProviderProperties());
    }
    p.complete();
  }
Exemplo n.º 2
0
  private void setupConfig(final Job job, final MrsImageDataProvider provider)
      throws DataProviderException {
    try {
      Configuration conf = job.getConfiguration();
      DataProviderFactory.saveProviderPropertiesToConfig(provider.getProviderProperties(), conf);
      context.save(conf);
      // Add the input pyramid metadata to the job configuration
      for (final String input : context.getInputs()) {
        MrsImagePyramid pyramid;
        try {
          pyramid = MrsImagePyramid.open(input, context.getProviderProperties());
        } catch (IOException e) {
          throw new DataProviderException("Failure opening input image pyramid: " + input, e);
        }
        final MrsImagePyramidMetadata metadata = pyramid.getMetadata();
        log.debug(
            "In HadoopUtils.setupMrsPyramidInputFormat, loading pyramid for "
                + input
                + " pyramid instance is "
                + pyramid
                + " metadata instance is "
                + metadata);

        String image = metadata.getName(context.getZoomLevel());
        // if we don't have this zoom level, use the max, then we'll decimate/subsample that one
        if (image == null) {
          log.error(
              "Could not get image in setupMrsPyramidInputFormat at zoom level "
                  + context.getZoomLevel()
                  + " for "
                  + pyramid);
          image = metadata.getName(metadata.getMaxZoomLevel());
        }

        HadoopUtils.setMetadata(conf, metadata);
      }
    } catch (IOException e) {
      throw new DataProviderException(
          "Failure configuring map/reduce job " + context.toString(), e);
    }
  }
Exemplo n.º 3
0
  public static MrsImagePyramid flushRasterMapOpOutput(MapOp op, int argumentNumber)
      throws IOException, JobFailedException, JobCancelledException {
    if (!(op instanceof RasterMapOp)) {
      throw new IllegalArgumentException(
          "Expected raster input data for argument " + argumentNumber);
    }

    RasterMapOp rasterOp = (RasterMapOp) op;
    // if our source is a file, then use it directly.
    if (rasterOp.getOutputName() == null) {
      throw new IllegalArgumentException(
          "Invalid raster input data - no resource name " + "for argument " + argumentNumber);
    }
    return MrsImagePyramid.open(rasterOp.getOutputName(), op.getProviderProperties());
  }