@Override
 protected void setup(Context context) throws IOException, InterruptedException {
   super.setup(context);
   minSupport =
       context
           .getConfiguration()
           .getInt(DictionaryVectorizer.MIN_SUPPORT, DictionaryVectorizer.DEFAULT_MIN_SUPPORT);
   parseWeibo = context.getConfiguration().getBoolean(DictionaryVectorizer.PARSE_WEIBO, false);
 }
Exemple #2
0
  /**
   * Test connecting three clients to one server.
   *
   * @throws IOException
   */
  @Test
  public void connectThreeClientsToOneServer() throws IOException {
    @SuppressWarnings("rawtypes")
    Context context = mock(Context.class);
    when(context.getConfiguration()).thenReturn(conf);

    ServerData<IntWritable, IntWritable, IntWritable, IntWritable> serverData =
        MockUtils.createNewServerData(conf, context);
    WorkerInfo workerInfo = new WorkerInfo();
    NettyServer server =
        new NettyServer(
            conf, new WorkerRequestServerHandler.Factory(serverData), workerInfo, context);
    server.start();
    workerInfo.setInetSocketAddress(server.getMyAddress());

    List<WorkerInfo> addresses = Lists.<WorkerInfo>newArrayList(workerInfo);
    NettyClient client1 = new NettyClient(context, conf, new WorkerInfo());
    client1.connectAllAddresses(addresses);
    NettyClient client2 = new NettyClient(context, conf, new WorkerInfo());
    client2.connectAllAddresses(addresses);
    NettyClient client3 = new NettyClient(context, conf, new WorkerInfo());
    client3.connectAllAddresses(addresses);

    client1.stop();
    client2.stop();
    client3.stop();
    server.stop();
  }
Exemple #3
0
 /* (non-Javadoc)
  * @see org.apache.hadoop.mapreduce.Mapper#setup(org.apache.hadoop.mapreduce.Mapper.Context)
  */
 protected void setup(Context context) throws IOException, InterruptedException {
   Configuration config = context.getConfiguration();
   fieldDelimRegex = config.get("field.delim.regex", ",");
   isValidationMode = config.getBoolean("validation.mode", true);
   classCondtionWeighted = config.getBoolean("class.condition.weighted", false);
   String predictionMode = config.get("prediction.mode", "classification");
   String regressionMethod = config.get("regression.method", "average");
   isLinearRegression =
       predictionMode.equals("regression") && regressionMethod.equals("linearRegression");
 }
Exemple #4
0
    @Override
    public void map(Object key, Text value, Context context)
        throws IOException, InterruptedException {
      Configuration conf = context.getConfiguration();
      String keywords = conf.get("keyword");
      // System.out.println(keywords);
      String line = value.toString();
      String[] terms = line.split("\t"); // terms[0] is required keyword
      String[] multiKeywords = keywords.split(" ");
      boolean foundflag = false;
      for (int i = 0; i < multiKeywords.length; ++i) {
        if (multiKeywords[i].equals(terms[0])) {
          foundflag = true;
        }
      }

      if (foundflag) {
        context.write(new Text("Keywords"), new Text(terms[0] + "@" + terms[1].toString()));
      }
    }
Exemple #5
0
  /**
   * Test connecting one client to three servers.
   *
   * @throws IOException
   */
  @Test
  public void connectOneClientToThreeServers() throws IOException {
    @SuppressWarnings("rawtypes")
    Context context = mock(Context.class);
    when(context.getConfiguration()).thenReturn(conf);

    ServerData<IntWritable, IntWritable, IntWritable, IntWritable> serverData =
        MockUtils.createNewServerData(conf, context);
    RequestServerHandler.Factory requestServerHandlerFactory =
        new WorkerRequestServerHandler.Factory(serverData);

    WorkerInfo workerInfo1 = new WorkerInfo();
    workerInfo1.setTaskId(1);
    NettyServer server1 = new NettyServer(conf, requestServerHandlerFactory, workerInfo1, context);
    server1.start();
    workerInfo1.setInetSocketAddress(server1.getMyAddress());

    WorkerInfo workerInfo2 = new WorkerInfo();
    workerInfo1.setTaskId(2);
    NettyServer server2 = new NettyServer(conf, requestServerHandlerFactory, workerInfo2, context);
    server2.start();
    workerInfo2.setInetSocketAddress(server2.getMyAddress());

    WorkerInfo workerInfo3 = new WorkerInfo();
    workerInfo1.setTaskId(3);
    NettyServer server3 = new NettyServer(conf, requestServerHandlerFactory, workerInfo3, context);
    server3.start();
    workerInfo3.setInetSocketAddress(server3.getMyAddress());

    NettyClient client = new NettyClient(context, conf, new WorkerInfo());
    List<WorkerInfo> addresses =
        Lists.<WorkerInfo>newArrayList(workerInfo1, workerInfo2, workerInfo3);
    client.connectAllAddresses(addresses);

    client.stop();
    server1.stop();
    server2.stop();
    server3.stop();
  }
  public void map(Object key, Text value, Context context)
      throws IOException, InterruptedException {

    Configuration conf = context.getConfiguration();
    FileSplit split = (FileSplit) context.getInputSplit();
    String rootFolder = split.getPath().getParent().toString();
    String uri = rootFolder + "/" + value.toString();
    // C:/hadoopsample/input/images+"/"+image1.jpg";

    FileSystem fs = FileSystem.get(URI.create(uri), conf);
    FSDataInputStream in = null;
    try {
      in = fs.open(new Path(uri));
      java.io.ByteArrayOutputStream bout = new ByteArrayOutputStream();
      byte buffer[] = new byte[1024 * 1024];

      while (in.read(buffer, 0, buffer.length) >= 0) {
        bout.write(buffer);
      }
      context.write(value, new BytesWritable(bout.toByteArray()));
    } finally {
      IOUtils.closeStream(in);
    }
  }
 @Override
 protected void setup(Context context) throws IOException, InterruptedException {
   Configuration conf = context.getConfiguration();
   this.N = conf.getInt("N", 10);
 }
Exemple #8
0
    /* (non-Javadoc)
     * @see org.apache.hadoop.mapreduce.Reducer#setup(org.apache.hadoop.mapreduce.Reducer.Context)
     */
    protected void setup(Context context) throws IOException, InterruptedException {
      Configuration config = context.getConfiguration();
      if (config.getBoolean("debug.on", false)) {
        LOG.setLevel(Level.DEBUG);
        System.out.println("in debug mode");
      }

      fieldDelim = config.get("field.delim", ",");
      topMatchCount = config.getInt("top.match.count", 10);
      isValidationMode = config.getBoolean("validation.mode", true);
      kernelFunction = config.get("kernel.function", "none");
      kernelParam = config.getInt("kernel.param", -1);
      classCondtionWeighted = config.getBoolean("class.condtion.weighted", false);
      neighborhood = new Neighborhood(kernelFunction, kernelParam, classCondtionWeighted);
      outputClassDistr = config.getBoolean("output.class.distr", false);
      inverseDistanceWeighted = config.getBoolean("inverse.distance.weighted", false);

      // regression
      String predictionMode = config.get("prediction.mode", "classification");
      if (predictionMode.equals("regression")) {
        neighborhood.withPredictionMode(PredictionMode.Regression);
        String regressionMethod = config.get("regression.method", "average");
        regressionMethod = WordUtils.capitalize(regressionMethod);
        neighborhood.withRegressionMethod(RegressionMethod.valueOf(regressionMethod));
      }

      // decision threshold for classification
      decisionThreshold = Double.parseDouble(config.get("decision.threshold", "-1.0"));
      if (decisionThreshold > 0 && neighborhood.IsInClassificationMode()) {
        String[] classAttrValues = config.get("class.attribute.values").split(",");
        posClassAttrValue = classAttrValues[0];
        negClassAttrValue = classAttrValues[1];
        neighborhood.withDecisionThreshold(decisionThreshold).withPositiveClass(posClassAttrValue);
      }

      // using cost based arbitrator for classification
      useCostBasedClassifier = config.getBoolean("use.cost.based.classifier", false);
      if (useCostBasedClassifier && neighborhood.IsInClassificationMode()) {
        if (null == posClassAttrValue) {
          String[] classAttrValues = config.get("class.attribute.values").split(",");
          posClassAttrValue = classAttrValues[0];
          negClassAttrValue = classAttrValues[1];
        }

        int[] missclassificationCost =
            Utility.intArrayFromString(config.get("misclassification.cost"));
        falsePosCost = missclassificationCost[0];
        falseNegCost = missclassificationCost[1];
        costBasedArbitrator =
            new CostBasedArbitrator(
                negClassAttrValue, posClassAttrValue, falseNegCost, falsePosCost);
      }

      // confusion matrix for classification validation
      if (isValidationMode) {
        if (neighborhood.IsInClassificationMode()) {
          InputStream fs =
              Utility.getFileStream(context.getConfiguration(), "feature.schema.file.path");
          ObjectMapper mapper = new ObjectMapper();
          schema = mapper.readValue(fs, FeatureSchema.class);
          classAttrField = schema.findClassAttrField();
          List<String> cardinality = classAttrField.getCardinality();
          predictingClasses = new String[2];
          predictingClasses[0] = cardinality.get(0);
          predictingClasses[1] = cardinality.get(1);
          confMatrix = new ConfusionMatrix(predictingClasses[0], predictingClasses[1]);
        }
      }
      LOG.debug(
          "classCondtionWeighted:"
              + classCondtionWeighted
              + "outputClassDistr:"
              + outputClassDistr);
    }
 public void setup(Context context) throws IOException, InterruptedException {
   this.jobconf = context.getConfiguration();
 }