コード例 #1
0
  /**
   * Takes the results of running linkWalkFirstPhase and creates an m/r job from them
   *
   * @param firstPhaseResult the results of running linkWalkfirstPhase.
   * @return the results from the intermediate bkeys of phase one.
   * @throws IOException
   */
  private MapReduceResult linkWalkSecondPhase(final MapReduceResult firstPhaseResult)
      throws IOException {
    try {
      @SuppressWarnings("rawtypes")
      Collection<LinkedList> bkeys = firstPhaseResult.getResult(LinkedList.class);

      BucketKeyMapReduce mr = new BucketKeyMapReduce(this);
      int stepCnt = 0;

      for (LinkedList<List<String>> step : bkeys) {
        // TODO find a way to *enforce* order here (custom
        // deserializer?)
        stepCnt++;
        for (List<String> input : step) {
          // use the step count as key data so we can aggregate the
          // results into the correct steps when they come back
          mr.addInput(input.get(0), input.get(1), Integer.toString(stepCnt));
        }
      }

      mr.addReducePhase(new NamedErlangFunction("riak_kv_mapreduce", "reduce_set_union"), false);
      mr.addMapPhase(
          new JSSourceFunction("function(v, keyData) { return [{\"step\": keyData, \"v\": v}]; }"),
          true);

      return mr.execute();
    } catch (ConversionException e) {
      throw new IOException(e.getMessage());
    } catch (RiakException e) {
      throw (IOException) e.getCause();
    }
  }
コード例 #2
0
  @Override
  public void deleteLiveStatisticsOlderThan(Date date, String accountName) {
    Long fromHoursSince1970 = date.getTime() / (15000 * 240);
    Long toHoursSince1970 = new Date().getTime() / (15000 * 240);
    logger.info("hoursSince1970: " + fromHoursSince1970);
    logger.info("toSince1970: " + toHoursSince1970);

    try {

      for (int index = fromHoursSince1970.intValue();
          index <= toHoursSince1970.intValue();
          index++) {
        int keys = 0;
        Bucket hourBucket = riakClient.fetchBucket(accountName + ";" + index).execute();
        try {
          for (String key : hourBucket.keys()) {
            hourBucket.delete(key);
            keys++;
          }
        } catch (RiakException e) {
          e
              .printStackTrace(); // To change body of catch statement use File | Settings | File
                                  // Templates.
        }

        logger.info("deleted all keys(" + keys + ") in bucket: " + accountName + ";" + index);
      }

    } catch (RiakRetryFailedException rrfe) {
      rrfe.printStackTrace();
    }

    // To change body of implemented methods use File | Settings | File Templates.
  }
コード例 #3
0
  /**
   * Creates an m/r job from the supplied link spec and executes it
   *
   * @param linkWalkSpec the Link Walk spec
   * @return {@link MapReduceResult} containing the end of the link and any intermediate bkeys for a
   *     second pass
   * @throws IOException
   */
  private MapReduceResult linkWalkFirstPhase(final LinkWalkSpec linkWalkSpec) throws IOException {
    BucketKeyMapReduce mr = new BucketKeyMapReduce(this);
    mr.addInput(linkWalkSpec.getStartBucket(), linkWalkSpec.getStartKey());
    int size = linkWalkSpec.size();
    int cnt = 0;

    for (LinkWalkStep step : linkWalkSpec) {
      cnt++;
      boolean keep = linkAccumulateToLinkPhaseKeep(step.getKeep(), cnt == size);
      mr.addLinkPhase(step.getBucket(), step.getTag(), keep);
    }

    // this is a bit of a hack. The low level API is using the high level
    // API so must strip out the exception.
    try {
      return mr.execute();
    } catch (RiakException e) {
      throw (IOException) e.getCause();
    }
  }