// delete random directory protected void cleanup(Context context) throws IOException, InterruptedException { Iterator<DataPair> it = tree.iterator(); while (it.hasNext()) { DataPair dp = it.next(); context.write(new DoubleWritable(dp.getLigandDouble()), dp); } fo.deleteDir(new File("/home/hadoop/vinaJob/" + tmpPath)); }
// topK sort,write the result to hdfs public void reduce(DoubleWritable key, Iterable<DataPair> values, Context context) throws IOException, InterruptedException { for (DataPair val : values) { if (count < k) { hf.createNewHDFSFile(val.getLigandPath(), val.getVinaResult()); hf.createNewHDFSFile(val.getLogPath(), val.getVinaLog()); int position = val.getLigandPath().lastIndexOf("/"); String ligandName = val.getLigandPath().substring(position + 1); context.write(new DoubleWritable(val.getLigandDouble()), new Text(ligandName)); /* context.write(new DoubleWritable(val.getLigandDouble()), new Text(val.getLigandPath()));*/ count++; } else { break; } } }
/* * (non-Javadoc) * * @see org.apache.hadoop.mapreduce.Mapper#setup(org.apache.hadoop.mapreduce.Mapper.Context) */ public void setup(Context context) throws IOException, InterruptedException { conf = context.getConfiguration(); conf2HDFS = conf.get("conf2HDFS"); receptorHDFS = conf.get("receptorHDFS"); seed = conf.get("seed"); vinaJobID = conf.get("vinaJobID"); k = conf.getInt("k", 1000); hf = new HadoopFile(); fo = new FileOperation(); tmpPath = fo.randomString(); conf2Local = "/home/hadoop/vinaJob/" + tmpPath + "/" + conf2HDFS.substring(conf2HDFS.lastIndexOf("/") + 1); receptorLocal = "/home/hadoop/vinaJob/" + tmpPath + "/" + receptorHDFS.substring(receptorHDFS.lastIndexOf("/") + 1); hf.HadoopToLocal(conf2HDFS, conf2Local); hf.HadoopToLocal(receptorHDFS, receptorLocal); }
protected void setup(Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); hf = new HadoopFile(); k = conf.getInt("k", 1000); }