/* (non-Javadoc)
  * @see org.apache.hadoop.mapreduce.Reducer#reduce(KEYIN, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer.Context)
  */
 protected void reduce(TextPair key, Iterable<IntWritable> values, Context context)
     throws IOException, InterruptedException {
   sum = count = 0;
   for (IntWritable value : values) {
     sum += value.get();
     ++count;
   }
   avRatingDiff = sum / count;
   valueOut.set(key.toString() + fieldDelim + avRatingDiff + fieldDelim + count);
   context.write(NullWritable.get(), valueOut);
 }
 /* (non-Javadoc)
  * @see org.apache.hadoop.mapreduce.Mapper#map(KEYIN, VALUEIN, org.apache.hadoop.mapreduce.Mapper.Context)
  */
 @Override
 protected void map(LongWritable key, Text value, Context context)
     throws IOException, InterruptedException {
   String[] items = value.toString().split(fieldDelim);
   String[] ratings = null;
   for (int i = 1; i < items.length; ++i) {
     ratings = items[i].split(subFieldDelim);
     String itemOne = ratings[0];
     int ratingOne = Integer.parseInt(ratings[1]);
     for (int j = i + 1; j < items.length; ++j) {
       ratings = items[i].split(subFieldDelim);
       String itemTwo = ratings[0];
       int ratingTwo = Integer.parseInt(ratings[1]);
       if (itemOne.compareTo(itemTwo) < 0) {
         keyOut.set(itemOne, itemTwo);
         valOut.set(ratingOne - ratingTwo);
       } else {
         keyOut.set(itemTwo, itemOne);
         valOut.set(ratingTwo - ratingOne);
       }
       context.write(keyOut, valOut);
     }
   }
 }