@Override
 protected void cleanup(Context context) throws IOException, InterruptedException {
   for (Map.Entry<Integer, WeightedVectorWritable> entry : mostDistantPoints.entrySet()) {
     context.write(new IntWritable(entry.getKey()), entry.getValue());
   }
   super.cleanup(context);
 }
 /**
  * Expert users can override this method for more complete control over the execution of the
  * Mapper.
  *
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
   setup(context);
   while (context.nextKeyValue()) {
     map(context.getCurrentKey(), context.getCurrentValue(), context);
   }
   cleanup(context);
 }
Exemple #3
0
 @Override
 protected void cleanup(Context context) throws IOException, InterruptedException {
   super.cleanup(context);
   table.flushCommits();
   table.close();
   connection.close();
 }
 @Override
 protected void cleanup(Context context) throws IOException, InterruptedException {
   try {
     // Write the last & final Mutation Batch
     if (mutationState != null) {
       writeBatch(mutationState, context);
     }
     // We are writing some dummy key-value as map output here so that we commit only one
     // output to reducer.
     context.write(
         new ImmutableBytesWritable(UUID.randomUUID().toString().getBytes()), new IntWritable(0));
     super.cleanup(context);
   } catch (SQLException e) {
     LOG.error(" Error {}  while read/write of a record ", e.getMessage());
     context.getCounter(PhoenixJobCounters.FAILED_RECORDS).increment(1);
     throw new RuntimeException(e);
   } finally {
     if (connection != null) {
       try {
         connection.close();
       } catch (SQLException e) {
         LOG.error(
             "Error {} while closing connection in the PhoenixIndexMapper class ", e.getMessage());
       }
     }
     if (writer != null) {
       writer.close();
     }
   }
 }
 @Override
 protected void cleanup(Context ctx) throws IOException, InterruptedException {
   super.cleanup(ctx);
   // dirty trick
   ctx.write(new IntWritable(NORM_VECTOR_MARKER), new VectorWritable(norms));
   ctx.write(
       new IntWritable(NUM_NON_ZERO_ENTRIES_VECTOR_MARKER), new VectorWritable(nonZeroEntries));
   ctx.write(new IntWritable(MAXVALUE_VECTOR_MARKER), new VectorWritable(maxValues));
 }
 @Override
 protected void cleanup(Context context) throws IOException, InterruptedException {
   super.cleanup(context);
 }
  /** {@inheritDoc} */
  @Override
  protected void cleanup(Context ctx) throws IOException, InterruptedException {
    super.cleanup(ctx);

    HadoopErrorSimulator.instance().onMapCleanup();
  }