@Test
  @Category(IntegrationTest.class)
  public void testGetOneWorker() throws Exception {
    // find a small enough tile bounds to 4 tiles
    TileBounds tb = new TileBounds(2764, 1365, 2765, 1366);
    Bounds bounds = TMSUtils.tileToBounds(tb, zoomLevel, tileSize);
    TiledInputFormatContext ifContext =
        new TiledInputFormatContext(
            zoomLevel,
            tileSize,
            new HashSet<String>(),
            bounds.convertNewToOldBounds(),
            new Properties());
    ifContext.save(conf);

    int numWorkers = CostDistanceWorkersConfiguration.getNumWorkers(metadata, conf);
    Assert.assertEquals(1, numWorkers);
  }
  @Test(expected = IllegalArgumentException.class)
  @Category(IntegrationTest.class)
  public void testMoreWorkersThanMapSlots() throws Exception {
    HadoopUtils.setupLocalRunner(conf);

    // find a large enough tile bounds
    TileBounds tb = new TileBounds(2764, 1365, 2878, 1479);
    Bounds bounds = TMSUtils.tileToBounds(tb, zoomLevel, tileSize);
    TiledInputFormatContext ifContext =
        new TiledInputFormatContext(
            zoomLevel,
            tileSize,
            new HashSet<String>(),
            bounds.convertNewToOldBounds(),
            new Properties());
    ifContext.save(conf);

    CostDistanceWorkersConfiguration.getNumWorkers(metadata, conf);
  }
Ejemplo n.º 3
0
 /** Sub-classes that override this method must call super.setupJob(job). */
 @Override
 public Configuration setupSparkJob(Configuration conf, MrsImageDataProvider provider)
     throws DataProviderException {
   try {
     Configuration conf1 = provider.setupSparkJob(conf);
     Job job = new Job(conf1);
     setupConfig(job, provider);
     return job.getConfiguration();
   } catch (IOException e) {
     throw new DataProviderException(
         "Failure configuring map/reduce job " + context.toString(), e);
   }
 }
  @Test
  @Category(IntegrationTest.class)
  public void testHardCodedWorkers() throws Exception {
    final int numExpectedWorkers = 5;
    MrGeoProperties.getInstance().setProperty("giraph.workers", String.valueOf(numExpectedWorkers));

    // find a large enough tile bounds
    TileBounds tb = new TileBounds(2764, 1365, 2878, 1479);
    Bounds bounds = TMSUtils.tileToBounds(tb, zoomLevel, tileSize);
    TiledInputFormatContext ifContext =
        new TiledInputFormatContext(
            zoomLevel,
            tileSize,
            new HashSet<String>(),
            bounds.convertNewToOldBounds(),
            new Properties());
    ifContext.save(conf);

    int numWorkers = CostDistanceWorkersConfiguration.getNumWorkers(metadata, conf);
    Assert.assertEquals(numExpectedWorkers, numWorkers);
    MrGeoProperties.getInstance().remove("giraph.workers");
  }
  // tests that numTiles is correctly computed - I had an off-by-one error due to assuming that
  // boundsToTile returns tile boundaries where the upper right is exclusive instead of inclusive
  @Test
  @Category(IntegrationTest.class)
  public void testNumTiles() throws Exception {
    conf.set("mapred.child.java.opts", "-Xmx2048M");
    conf.setInt("io.sort.mb", 100);

    // find a large enough tile bounds
    TileBounds tb = new TileBounds(50, 100, 1000, 100);
    Bounds bounds = TMSUtils.tileToBounds(tb, zoomLevel, tileSize);
    TiledInputFormatContext ifContext =
        new TiledInputFormatContext(
            zoomLevel,
            tileSize,
            new HashSet<String>(),
            bounds.convertNewToOldBounds(),
            new Properties());
    ifContext.save(conf);

    int numWorkers =
        CostDistanceWorkersConfiguration.getNumWorkers(
            metadata, conf, false /* disable map slots check */);
    Assert.assertEquals(4, numWorkers);
  }
Ejemplo n.º 6
0
  private void setupConfig(final Job job, final MrsImageDataProvider provider)
      throws DataProviderException {
    try {
      Configuration conf = job.getConfiguration();
      DataProviderFactory.saveProviderPropertiesToConfig(provider.getProviderProperties(), conf);
      context.save(conf);
      // Add the input pyramid metadata to the job configuration
      for (final String input : context.getInputs()) {
        MrsImagePyramid pyramid;
        try {
          pyramid = MrsImagePyramid.open(input, context.getProviderProperties());
        } catch (IOException e) {
          throw new DataProviderException("Failure opening input image pyramid: " + input, e);
        }
        final MrsImagePyramidMetadata metadata = pyramid.getMetadata();
        log.debug(
            "In HadoopUtils.setupMrsPyramidInputFormat, loading pyramid for "
                + input
                + " pyramid instance is "
                + pyramid
                + " metadata instance is "
                + metadata);

        String image = metadata.getName(context.getZoomLevel());
        // if we don't have this zoom level, use the max, then we'll decimate/subsample that one
        if (image == null) {
          log.error(
              "Could not get image in setupMrsPyramidInputFormat at zoom level "
                  + context.getZoomLevel()
                  + " for "
                  + pyramid);
          image = metadata.getName(metadata.getMaxZoomLevel());
        }

        HadoopUtils.setMetadata(conf, metadata);
      }
    } catch (IOException e) {
      throw new DataProviderException(
          "Failure configuring map/reduce job " + context.toString(), e);
    }
  }