public void map(LongWritable key, Text value, Context context)
     throws IOException, InterruptedException {
   String line = value.toString();
   StringTokenizer tokenizer = new StringTokenizer(line);
   while (tokenizer.hasMoreTokens()) {
     word.set(tokenizer.nextToken());
     context.write(word, one);
   }
 }
Example #2
0
    public void reduce(Text key, Iterable<IntWritable> values, Context context)
        throws IOException, InterruptedException {
      int sum = 0;
      boolean filtered = true;
      String keypair = key.toString();
      String[] keys = keypair.split(" ");
      if (keys.length == 1) {
        filtered = false;
      } else {
        if (keys[0].equals("*") || keys[1].equals("*")) {
          filtered = false;
        }
      }

      if (!filtered) {
        for (IntWritable val : values) {
          sum += val.get();
        }
        context.write(key, new IntWritable(sum));
        return;
      }

      for (IntWritable val : values) {
        if (val.get() == -1) {
          filtered = false;
          continue;
        }
        sum += val.get();
      }
      // filter non-needed events
      if (filtered) return;
      context.write(key, new IntWritable(sum));
    }
Example #3
0
    public void map(Object key, Text value, Context context)
        throws IOException, InterruptedException {
      String file = value.toString();
      String[] lines = file.split("\n");

      for (String line : lines) {
        if (line.contains("<author>") && line.contains("</author>")) {
          String author = line.substring(8, line.indexOf("</a"));
          word.set(author);
          context.write(word, one);
        } else if (line.contains("<author>")) {
          String author = line.substring(8);
          word.set(author);
          context.write(word, one);
        }
      }
    }
 public void reduce(
     Text key,
     Iterable<Text> values,
     org.apache.hadoop.mapreduce.Reducer<Text, Text, Text, InvertedListWritable>.Context context)
     throws IOException, InterruptedException {
   InvertedListWritable invertedList = new InvertedListWritable();
   for (Text k : values) {
     StringTokenizer itr = new StringTokenizer(k.toString());
     url = "";
     abs = "";
     if (itr.hasMoreTokens()) url = itr.nextToken();
     while (itr.hasMoreTokens()) {
       abs += itr.nextToken() + " ";
     }
     dr = new ListURL(url, abs);
     invertedList.paddingValueKey(dr);
   }
   invertedList.quickSortNodeKey();
   context.write(key, invertedList);
 }
Example #5
0
 @Override
 public void map(Object key, Text value, Context context)
     throws IOException, InterruptedException {
   String line = value.toString();
   StringTokenizer tokenizer = new StringTokenizer(line);
   int movieId = Integer.parseInt(tokenizer.nextToken());
   while (tokenizer.hasMoreTokens()) {
     String word = tokenizer.nextToken();
     context.write(new Text("1"), new IntWritable(1));
   }
 }
    public void map(
        Text key,
        Text val,
        org.apache.hadoop.mapreduce.Mapper<Text, Text, Text, Text>.Context context)
        throws IOException, InterruptedException {
      int i = 0, n = 0, j = 0, lj = 0, hj = 0;
      String tem = "";

      initStopWordsMap(); // initialize  the stop list
      String line = val.toString();
      StringTokenizer itr =
          new StringTokenizer(line.toLowerCase(), tokenDelimiter); // set delimiter
      n = itr.countTokens();
      cache = new String[n];
      for (i = 0; i < n; i++) {
        cache[i] = new String(""); // initialize the cache
      }
      i = 0;
      while (itr.hasMoreTokens()) {
        cache[i] = itr.nextToken(); // padding the cache with the words of the content
        i++;
      }
      for (i = 0; i < n; i++) {
        keyWord = cache[i];
        keyWord = keyWord.trim();
        if (!hmStopWord.containsKey(keyWord)) {
          lj = i - 10;
          hj = i + 10;
          if (lj < 0) lj = 0;
          if (hj > n) hj = n;
          tem = " ";
          for (j = lj; j < hj; j++) tem += cache[j] + " ";
          location = new Text();
          location.set(key.toString() + tem);
          context.write(new Text(keyWord), location);
        }
      }
    }
Example #7
0
    public void map(LongWritable key, Text value, Context context)
        throws IOException, InterruptedException {
      String line = value.toString();

      splitposition = line.indexOf("\t");
      labels = line.substring(0, splitposition);
      content = line.substring(splitposition + 1, line.length());
      if (content.length() <= 5) {
        word.set(labels);
        int counter = Integer.parseInt(content);
        context.write(word, new IntWritable(counter));
        return;
      }
      labeltokens = labels.split(",");
      contenttokens = tokenizeDoc(content);

      for (String label : labelspace) {
        for (String token : contenttokens) {
          // filter token, denotes the needed events;
          word.set(label + " " + token);
          context.write(word, new IntWritable(-1));
        }
      }
    }