Пример #1
0
 // delete random directory
 protected void cleanup(Context context) throws IOException, InterruptedException {
   Iterator<DataPair> it = tree.iterator();
   while (it.hasNext()) {
     DataPair dp = it.next();
     context.write(new DoubleWritable(dp.getLigandDouble()), dp);
   }
   fo.deleteDir(new File("/home/hadoop/vinaJob/" + tmpPath));
 }
Пример #2
0
 // topK sort,write the result to hdfs
 public void reduce(DoubleWritable key, Iterable<DataPair> values, Context context)
     throws IOException, InterruptedException {
   for (DataPair val : values) {
     if (count < k) {
       hf.createNewHDFSFile(val.getLigandPath(), val.getVinaResult());
       hf.createNewHDFSFile(val.getLogPath(), val.getVinaLog());
       int position = val.getLigandPath().lastIndexOf("/");
       String ligandName = val.getLigandPath().substring(position + 1);
       context.write(new DoubleWritable(val.getLigandDouble()), new Text(ligandName));
       /*  context.write(new DoubleWritable(val.getLigandDouble()),
       new Text(val.getLigandPath()));*/
       count++;
     } else {
       break;
     }
   }
 }