public static void main(String[] args) throws Exception { String inputDirectory = "/home/cs246/Desktop/HW2/input"; String outputDirectory = "/home/cs246/Desktop/HW2/output"; String centroidDirectory = "/home/cs246/Desktop/HW2/config"; int iterations = 20; for (int i = 1; i <= iterations; i++) { Configuration conf = new Configuration(); String cFile = centroidDirectory + "/c" + i + ".txt"; String nextCFile = centroidDirectory + "/c" + (i + 1) + ".txt"; conf.set("CFILE", cFile); conf.set("NEXTCFILE", nextCFile); String cFile = centroidDirectory + "/c" + i + ".txt"; String nextCFile = centroidDirectory + "/c" + (i + 1) + ".txt"; conf.set("CFILE", cFile); conf.set("NEXTCFILE", nextCFile); Job job = new Job(conf, "HW2_Q4." + i); job.setJarByClass(HW2_Q4.class); job.setOutputKeyClass(IntWritable.class); job.setOutputValueClass(Text.class); job.setMapperClass(Map1.class); job.setReducerClass(Reduce1.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); FileInputFormat.addInputPath(job, new Path(inputDirectory)); FileOutputFormat.setOutputPath(job, new Path(outputDirectory + "/output" + i)); job.waitForCompletion(true); } }
public int run(String[] args) throws Exception { Configuration argConf = getConf(); // JobConf conf = new JobConf(diffdb.class); Configuration config = HBaseConfiguration.create(); HBaseAdmin hbAdmin = new HBaseAdmin(config); dbutil db_util = new dbutil(config); HTable runTable = new HTable(config, "gestore_runs"); Get runGet = new Get(argConf.get("id").getBytes()); Result pipeline = runTable.get(runGet); NavigableMap<byte[], byte[]> pipeMap = pipeline.getFamilyMap("d".getBytes()); Map.Entry<byte[], byte[]> results = pipeMap.pollFirstEntry(); HashMap<String, HashMap<String, String>> resultMap = new HashMap<String, HashMap<String, String>>(); while (results != null) { String resultKey = new String(results.getKey()); String resultValue = new String(results.getValue()); String field = "type"; HashMap<String, String> tempMap = new HashMap<String, String>(); String entry = resultKey; if (resultKey.endsWith("_db_timestamp")) { field = "db_timestamp"; entry = resultKey.substring(0, resultKey.lastIndexOf("_db_timestamp")); } else if (resultKey.endsWith("_filename")) { field = "filename"; entry = resultKey.substring(0, resultKey.lastIndexOf("_filename")); } else if (resultKey.endsWith("_regex")) { field = "regex"; entry = resultKey.substring(0, resultKey.lastIndexOf("_regex")); } if (resultMap.containsKey(entry)) { tempMap = resultMap.get(entry); } tempMap.put(field, resultValue); resultMap.put(entry, tempMap); // System.out.println("Key: " + resultKey + " Value: " + resultValue); results = pipeMap.pollFirstEntry(); } for (String key : resultMap.keySet()) { System.out.println("File ID: " + key); for (String subKey : resultMap.get(key).keySet()) { // System.out.println("\t " + subKey + "\t\t" + resultMap.get(key).get(subKey)); System.out.format(" %1$-20s %2$s\n", subKey, resultMap.get(key).get(subKey)); } } return 0; }
private static String getConfigString(Configuration config) { String output = ""; Iterator<Map.Entry<String, String>> iterConfig = config.iterator(); while (iterConfig.hasNext()) { Map.Entry<String, String> curEntry = iterConfig.next(); output = output + "Key: \t" + curEntry.getKey() + "\nValue: \t" + curEntry.getValue() + "\n"; } return output; }
// return the RpcEngine configured to handle a protocol static synchronized RpcEngine getProtocolEngine(Class<?> protocol, Configuration conf) { RpcEngine engine = PROTOCOL_ENGINES.get(protocol); if (engine == null) { Class<?> impl = conf.getClass(ENGINE_PROP + "." + protocol.getName(), WritableRpcEngine.class); engine = (RpcEngine) ReflectionUtils.newInstance(impl, conf); PROTOCOL_ENGINES.put(protocol, engine); } return engine; }
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); Path matFile = new Path(args[0]); FSDataInputStream matData = fs.open(matFile); BufferedReader br = new BufferedReader(new InputStreamReader(matData)); int i = 0; String line; while ((line = br.readLine()) != null) { StringTokenizer tokenizer = new StringTokenizer(line); String iRow = tokenizer.nextToken(); String iCol = tokenizer.nextToken(); if (Integer.parseInt(iRow) == Integer.parseInt(iCol)) { i++; } } br.close(); int dimention = i; conf.setInt("DIMENTION", dimention); Path xFile = new Path("preX/Result"); FSDataOutputStream xData = fs.create(xFile); BufferedWriter iniX = new BufferedWriter(new OutputStreamWriter(xData)); for (int j = 0; j < dimention; j++) { iniX.write(String.valueOf(j) + " 0"); iniX.newLine(); } iniX.close(); URI matVec = new URI(args[0]); DistributedCache.addCacheFile(matVec, conf); int iteration = 0; do { ToolRunner.run(conf, new Jacobi(), args); } while (iteration++ < max_iter && (!stopIteration(conf))); }
/** * @param args * @throws IOException * @throws ClassNotFoundException * @throws InterruptedException */ public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { // TODO Auto-generated method stub Configuration conf = new Configuration(); conf.set("inParameter", toString(args)); Job job = new Job(conf, "MovieGenreIdentifier"); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setJarByClass(MovieGenreIdentifier.class); job.setMapperClass(Map.class); job.setCombinerClass(Reduce.class); job.setReducerClass(Reduce.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); job.waitForCompletion(true); }
/** * Construct an RPC server. * * @param instance the instance whose methods will be called * @param conf the configuration to use * @param bindAddress the address to bind on to listen for connection * @param port the port to listen for connections on * @param numHandlers the number of method handler threads to run * @param verbose whether each call should be logged * @param supportOldJobConf supports server to deserialize old job conf */ public Server( Object instance, Configuration conf, String bindAddress, int port, int numHandlers, boolean verbose, boolean supportOldJobConf) throws IOException { super( bindAddress, port, Invocation.class, numHandlers, conf, classNameBase(instance.getClass().getName()), supportOldJobConf); this.instance = instance; this.verbose = verbose; this.authorize = conf.getBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, false); }
@Override public int run(String[] args) throws Exception { String locatorHost = args[0]; int locatorPort = Integer.parseInt(args[1]); String hdfsHomeDir = args[2]; System.out.println( "KnownKeysMRv2 invoked with args (locatorHost = " + locatorHost + " locatorPort = " + locatorPort + " hdfsHomeDir = " + hdfsHomeDir); Configuration conf = getConf(); conf.set(GFInputFormat.INPUT_REGION, "partitionedRegion"); conf.set(GFInputFormat.HOME_DIR, hdfsHomeDir); conf.setBoolean(GFInputFormat.CHECKPOINT, false); conf.set(GFOutputFormat.REGION, "validationRegion"); conf.set(GFOutputFormat.LOCATOR_HOST, locatorHost); conf.setInt(GFOutputFormat.LOCATOR_PORT, locatorPort); Job job = Job.getInstance(conf, "knownKeysMRv2"); job.setInputFormatClass(GFInputFormat.class); job.setOutputFormatClass(GFOutputFormat.class); job.setMapperClass(KnownKeysMRv2Mapper.class); job.setMapOutputKeyClass(GFKey.class); job.setMapOutputValueClass(PEIWritable.class); job.setReducerClass(KnownKeysMRv2Reducer.class); // job.setOutputKeyClass(String.class); // job.setOutputValueClass(ValueHolder.class); return job.waitForCompletion(false) ? 0 : 1; }
/** * Set a protocol to use a non-default RpcEngine. * * @param conf configuration to use * @param protocol the protocol interface * @param engine the RpcEngine impl */ public static void setProtocolEngine(Configuration conf, Class<?> protocol, Class<?> engine) { conf.setClass(ENGINE_PROP + "." + protocol.getName(), engine, RpcEngine.class); }
protected void setup(Context context) { Configuration config = context.getConfiguration(); inMovies = config.get("inParameter"); }
public int run(String[] args) throws Exception { // printUsage(); /* * SETUP */ Configuration argConf = getConf(); Hashtable<String, String> confArg = new Hashtable<String, String>(); setup(confArg, argConf); Date currentTime = new Date(); Date endDate = new Date(new Long(confArg.get("timestamp_stop"))); Boolean full_run = confArg.get("intermediate").matches("(?i).*true.*"); Boolean quick_add = confArg.get("quick_add").matches("(?i).*true.*"); logger.info("Running GeStore"); // ZooKeeper setup Configuration config = HBaseConfiguration.create(); zkWatcher = new ZooKeeperWatcher(config, "Testing", new HBaseAdmin(config)); zkInstance = new ZooKeeper( ZKConfig.getZKQuorumServersString(config), config.getInt("zookeeper.session.timeout", -1), zkWatcher); if (!confArg.get("task_id").isEmpty()) { confArg.put("temp_path", confArg.get("temp_path") + confArg.get("task_id")); } String lockRequest = confArg.get("file_id"); if (!confArg.get("run_id").isEmpty()) lockRequest = lockRequest + "_" + confArg.get("run_id") + "_"; if (!confArg.get("task_id").isEmpty()) lockRequest = lockRequest + "_" + confArg.get("task_id") + "_"; // Get type of movement toFrom type_move = checkArgs(confArg); if (type_move == toFrom.LOCAL2REMOTE && !confArg.get("format").equals("unknown")) { List<String> arguments = new ArrayList<String>(); arguments.add("-Dinput=" + confArg.get("local_path")); arguments.add("-Dtable=" + confArg.get("file_id")); arguments.add("-Dtimestamp=" + confArg.get("timestamp_stop")); arguments.add("-Dtype=" + confArg.get("format")); arguments.add("-Dtarget_dir=" + confArg.get("base_path") + "_" + confArg.get("file_id")); arguments.add("-Dtemp_hdfs_path=" + confArg.get("temp_path")); arguments.add("-Drun_id=" + confArg.get("run_id")); if (!confArg.get("run_id").isEmpty()) arguments.add("-Drun_id=" + confArg.get("run_id")); if (!confArg.get("task_id").isEmpty()) arguments.add("-Dtask_id=" + confArg.get("task_id")); if (quick_add) arguments.add("-Dquick_add=" + confArg.get("quick_add")); String lockName = lock(lockRequest); String[] argumentString = arguments.toArray(new String[arguments.size()]); adddb.main(argumentString); unlock(lockName); System.exit(0); } // Database registration dbutil db_util = new dbutil(config); db_util.register_database(confArg.get("db_name_files"), true); db_util.register_database(confArg.get("db_name_runs"), true); db_util.register_database(confArg.get("db_name_updates"), true); FileSystem hdfs = FileSystem.get(config); FileSystem localFS = FileSystem.getLocal(config); // Get source type confArg.put("source", getSource(db_util, confArg.get("db_name_files"), confArg.get("file_id"))); confArg.put( "database", isDatabase(db_util, confArg.get("db_name_files"), confArg.get("file_id"))); if (!confArg.get("source").equals("local") && type_move == toFrom.REMOTE2LOCAL && !confArg.get("timestamp_stop").equals(Integer.toString(Integer.MAX_VALUE))) { confArg.put("timestamp_stop", Long.toString(latestVersion(confArg, db_util))); } /* * Get previous timestamp */ Get run_id_get = new Get(confArg.get("run_id").getBytes()); Result run_get = db_util.doGet(confArg.get("db_name_runs"), run_id_get); KeyValue run_file_prev = run_get.getColumnLatest( "d".getBytes(), (confArg.get("file_id") + "_db_timestamp").getBytes()); String last_timestamp = new String("0"); if (null != run_file_prev && !confArg.get("source").equals("local")) { long last_timestamp_real = run_file_prev.getTimestamp(); Long current_timestamp = new Long(confArg.get("timestamp_real")); if ((current_timestamp - last_timestamp_real) > 36000) { last_timestamp = new String(run_file_prev.getValue()); Integer lastTimestamp = new Integer(last_timestamp); lastTimestamp += 1; last_timestamp = lastTimestamp.toString(); logger.info("Last timestamp: " + last_timestamp + " End data: " + endDate); Date last_run = new Date(run_file_prev.getTimestamp()); if (last_run.before(endDate) && !full_run) { confArg.put("timestamp_start", last_timestamp); } } } Integer tse = new Integer(confArg.get("timestamp_stop")); Integer tss = new Integer(confArg.get("timestamp_start")); if (tss > tse) { logger.info("No new version of requested file."); return 0; } /* * Generate file */ String lockName = lock(lockRequest); Get file_id_get = new Get(confArg.get("file_id").getBytes()); Result file_get = db_util.doGet(confArg.get("db_name_files"), file_id_get); if (!file_get.isEmpty()) { boolean found = hasFile( db_util, hdfs, confArg.get("db_name_files"), confArg.get("file_id"), getFullPath(confArg)); if (confArg.get("source").equals("fullfile")) { found = false; } String filenames_put = getFileNames( db_util, confArg.get("db_name_files"), confArg.get("file_id"), getFullPath(confArg)); // Filename not found in file database if (!found && type_move == toFrom.REMOTE2LOCAL) { if (!confArg.get("source").equals("local")) { // Generate intermediate file if (getFile(hdfs, confArg, db_util) == null) { unlock(lockName); return 1; } // Put generated file into file database if (!confArg.get("format").equals("fullfile")) { putFileEntry( db_util, hdfs, confArg.get("db_name_files"), confArg.get("file_id"), confArg.get("full_file_name"), confArg.get("source")); } } else { logger.warn("Remote file not found, and cannot be generated! File: " + confArg); unlock(lockName); return 1; } } } else { if (type_move == toFrom.REMOTE2LOCAL) { logger.warn("Remote file not found, and cannot be generated."); unlock(lockName); return 1; } } /* * Copy file * Update tables */ if (type_move == toFrom.LOCAL2REMOTE) { if (!confArg.get("format").equals("fullfile")) { putFileEntry( db_util, hdfs, confArg.get("db_name_files"), confArg.get("file_id"), getFullPath(confArg), confArg.get("source")); } putRunEntry( db_util, confArg.get("db_name_runs"), confArg.get("run_id"), confArg.get("file_id"), confArg.get("type"), confArg.get("timestamp_real"), confArg.get("timestamp_stop"), getFullPath(confArg), confArg.get("delimiter")); hdfs.copyFromLocalFile(new Path(confArg.get("local_path")), new Path(getFullPath(confArg))); } else if (type_move == toFrom.REMOTE2LOCAL) { FileStatus[] files = hdfs.globStatus(new Path(getFullPath(confArg) + "*")); putRunEntry( db_util, confArg.get("db_name_runs"), confArg.get("run_id"), confArg.get("file_id"), confArg.get("type"), confArg.get("timestamp_real"), confArg.get("timestamp_stop"), getFullPath(confArg), confArg.get("delimiter")); unlock(lockName); for (FileStatus file : files) { Path cur_file = file.getPath(); Path cur_local_path = new Path(new String(confArg.get("local_path") + confArg.get("file_id"))); String suffix = getSuffix(getFileName(confArg), cur_file.getName()); if (suffix.length() > 0) { cur_local_path = cur_local_path.suffix(new String("." + suffix)); } if (confArg.get("copy").equals("true")) { String crc = hdfs.getFileChecksum(cur_file).toString(); if (checksumLocalTest(cur_local_path, crc)) { continue; } else { hdfs.copyToLocalFile(cur_file, cur_local_path); writeChecksum(cur_local_path, crc); } } else { System.out.println(cur_local_path + "\t" + cur_file); } } } unlock(lockName); return 0; }
/** Sets up configuration based on params */ private static boolean setup(Hashtable<String, String> curConf, Configuration argConf) { if (argConf.get("file") == null) { logger.fatal("Missing file parameter"); System.exit(1); } if (argConf.get("hdfs_base_path") == null) { logger.fatal("Missing HDFS base path, check gestore-conf.xml"); System.exit(1); } if (argConf.get("hdfs_temp_path") == null) { logger.fatal("Missing HDFS temp path, check gestore-conf.xml"); System.exit(1); } if (argConf.get("local_temp_path") == null) { logger.fatal("Missing local temp path, check gestore-conf.xml"); System.exit(1); } // Input paramaters curConf.put("run_id", argConf.get("run", "")); curConf.put("task_id", argConf.get("task", "")); curConf.put("file_id", argConf.get("file")); curConf.put("local_path", argConf.get("path", "")); curConf.put("type", argConf.get("type", "l2r")); curConf.put("timestamp_start", argConf.get("timestamp_start", "1")); curConf.put( "timestamp_stop", argConf.get("timestamp_stop", Integer.toString(Integer.MAX_VALUE))); curConf.put("delimiter", argConf.get("regex", "ID=.*")); curConf.put("taxon", argConf.get("taxon", "all")); curConf.put("intermediate", argConf.get("full_run", "false")); curConf.put("quick_add", argConf.get("quick_add", "false")); Boolean full_run = curConf.get("intermediate").matches("(?i).*true.*"); curConf.put("format", argConf.get("format", "unknown")); curConf.put("split", argConf.get("split", "1")); curConf.put("copy", argConf.get("copy", "true")); // Constants curConf.put("base_path", argConf.get("hdfs_base_path")); curConf.put("temp_path", argConf.get("hdfs_temp_path")); curConf.put("local_temp_path", argConf.get("local_temp_path")); curConf.put("db_name_files", argConf.get("hbase_file_table")); curConf.put("db_name_runs", argConf.get("hbase_run_table")); curConf.put("db_name_updates", argConf.get("hbase_db_update_table")); // Timestamps Date currentTime = new Date(); Date endDate = new Date(new Long(curConf.get("timestamp_stop"))); curConf.put("timestamp_real", Long.toString(currentTime.getTime())); return true; }
public void generateFileChunks(JspWriter out, HttpServletRequest req, Configuration conf) throws IOException, InterruptedException { long startOffset = 0; int datanodePort = 0; int chunkSizeToView = 0; String namenodeInfoPortStr = req.getParameter("namenodeInfoPort"); int namenodeInfoPort = -1; if (namenodeInfoPortStr != null) namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr); String filename = HtmlQuoting.unquoteHtmlChars(req.getParameter("filename")); if (filename == null) { out.print("Invalid input (filename absent)"); return; } String blockIdStr = null; long blockId = 0; blockIdStr = req.getParameter("blockId"); if (blockIdStr == null) { out.print("Invalid input (blockId absent)"); return; } blockId = Long.parseLong(blockIdStr); String tokenString = req.getParameter(JspHelper.DELEGATION_PARAMETER_NAME); UserGroupInformation ugi = JspHelper.getUGI(req, conf); final DFSClient dfs = JspHelper.getDFSClient(ugi, jspHelper.nameNodeAddr, conf); Token<BlockTokenIdentifier> accessToken = BlockTokenSecretManager.DUMMY_TOKEN; if (conf.getBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, false)) { List<LocatedBlock> blks = dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks(); if (blks == null || blks.size() == 0) { out.print("Can't locate file blocks"); dfs.close(); return; } for (int i = 0; i < blks.size(); i++) { if (blks.get(i).getBlock().getBlockId() == blockId) { accessToken = blks.get(i).getBlockToken(); break; } } } String blockGenStamp = null; long genStamp = 0; blockGenStamp = req.getParameter("genstamp"); if (blockGenStamp == null) { out.print("Invalid input (genstamp absent)"); return; } genStamp = Long.parseLong(blockGenStamp); String blockSizeStr; long blockSize = 0; blockSizeStr = req.getParameter("blockSize"); if (blockSizeStr == null) { out.print("Invalid input (blockSize absent)"); return; } blockSize = Long.parseLong(blockSizeStr); String chunkSizeToViewStr = req.getParameter("chunkSizeToView"); if (chunkSizeToViewStr != null && Integer.parseInt(chunkSizeToViewStr) > 0) chunkSizeToView = Integer.parseInt(chunkSizeToViewStr); else chunkSizeToView = JspHelper.getDefaultChunkSize(conf); String startOffsetStr = req.getParameter("startOffset"); if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0) startOffset = 0; else startOffset = Long.parseLong(startOffsetStr); String datanodePortStr = req.getParameter("datanodePort"); if (datanodePortStr == null) { out.print("Invalid input (datanodePort absent)"); return; } datanodePort = Integer.parseInt(datanodePortStr); out.print("<h3>File: "); JspHelper.printPathWithLinks( HtmlQuoting.quoteHtmlChars(filename), out, namenodeInfoPort, tokenString); out.print("</h3><hr>"); String parent = new File(filename).getParent(); JspHelper.printGotoForm(out, namenodeInfoPort, tokenString, HtmlQuoting.quoteHtmlChars(parent)); out.print("<hr>"); out.print( "<a href=\"http://" + req.getServerName() + ":" + req.getServerPort() + "/browseDirectory.jsp?dir=" + URLEncoder.encode(parent, "UTF-8") + "&namenodeInfoPort=" + namenodeInfoPort + "\"><i>Go back to dir listing</i></a><br>"); out.print("<a href=\"#viewOptions\">Advanced view/download options</a><br>"); out.print("<hr>"); // Determine the prev & next blocks long nextStartOffset = 0; long nextBlockSize = 0; String nextBlockIdStr = null; String nextGenStamp = null; String nextHost = req.getServerName(); int nextPort = req.getServerPort(); int nextDatanodePort = datanodePort; // determine data for the next link if (startOffset + chunkSizeToView >= blockSize) { // we have to go to the next block from this point onwards List<LocatedBlock> blocks = dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks(); for (int i = 0; i < blocks.size(); i++) { if (blocks.get(i).getBlock().getBlockId() == blockId) { if (i != blocks.size() - 1) { LocatedBlock nextBlock = blocks.get(i + 1); nextBlockIdStr = Long.toString(nextBlock.getBlock().getBlockId()); nextGenStamp = Long.toString(nextBlock.getBlock().getGenerationStamp()); nextStartOffset = 0; nextBlockSize = nextBlock.getBlock().getNumBytes(); DatanodeInfo d = jspHelper.bestNode(nextBlock); String datanodeAddr = d.getName(); nextDatanodePort = Integer.parseInt( datanodeAddr.substring(datanodeAddr.indexOf(':') + 1, datanodeAddr.length())); nextHost = InetAddress.getByName(d.getHost()).getCanonicalHostName(); nextPort = d.getInfoPort(); } } } } else { // we are in the same block nextBlockIdStr = blockIdStr; nextStartOffset = startOffset + chunkSizeToView; nextBlockSize = blockSize; nextGenStamp = blockGenStamp; } String nextUrl = null; if (nextBlockIdStr != null) { nextUrl = "http://" + nextHost + ":" + nextPort + "/browseBlock.jsp?blockId=" + nextBlockIdStr + "&blockSize=" + nextBlockSize + "&startOffset=" + nextStartOffset + "&genstamp=" + nextGenStamp + "&filename=" + URLEncoder.encode(filename, "UTF-8") + "&chunkSizeToView=" + chunkSizeToView + "&datanodePort=" + nextDatanodePort + "&namenodeInfoPort=" + namenodeInfoPort + JspHelper.getDelegationTokenUrlParam(tokenString); out.print("<a href=\"" + nextUrl + "\">View Next chunk</a> "); } // determine data for the prev link String prevBlockIdStr = null; String prevGenStamp = null; long prevStartOffset = 0; long prevBlockSize = 0; String prevHost = req.getServerName(); int prevPort = req.getServerPort(); int prevDatanodePort = datanodePort; if (startOffset == 0) { List<LocatedBlock> blocks = dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks(); for (int i = 0; i < blocks.size(); i++) { if (blocks.get(i).getBlock().getBlockId() == blockId) { if (i != 0) { LocatedBlock prevBlock = blocks.get(i - 1); prevBlockIdStr = Long.toString(prevBlock.getBlock().getBlockId()); prevGenStamp = Long.toString(prevBlock.getBlock().getGenerationStamp()); prevStartOffset = prevBlock.getBlock().getNumBytes() - chunkSizeToView; if (prevStartOffset < 0) prevStartOffset = 0; prevBlockSize = prevBlock.getBlock().getNumBytes(); DatanodeInfo d = jspHelper.bestNode(prevBlock); String datanodeAddr = d.getName(); prevDatanodePort = Integer.parseInt( datanodeAddr.substring(datanodeAddr.indexOf(':') + 1, datanodeAddr.length())); prevHost = InetAddress.getByName(d.getHost()).getCanonicalHostName(); prevPort = d.getInfoPort(); } } } } else { // we are in the same block prevBlockIdStr = blockIdStr; prevStartOffset = startOffset - chunkSizeToView; if (prevStartOffset < 0) prevStartOffset = 0; prevBlockSize = blockSize; prevGenStamp = blockGenStamp; } String prevUrl = null; if (prevBlockIdStr != null) { prevUrl = "http://" + prevHost + ":" + prevPort + "/browseBlock.jsp?blockId=" + prevBlockIdStr + "&blockSize=" + prevBlockSize + "&startOffset=" + prevStartOffset + "&filename=" + URLEncoder.encode(filename, "UTF-8") + "&chunkSizeToView=" + chunkSizeToView + "&genstamp=" + prevGenStamp + "&datanodePort=" + prevDatanodePort + "&namenodeInfoPort=" + namenodeInfoPort + JspHelper.getDelegationTokenUrlParam(tokenString); out.print("<a href=\"" + prevUrl + "\">View Prev chunk</a> "); } out.print("<hr>"); out.print("<textarea cols=\"100\" rows=\"25\" wrap=\"virtual\" style=\"width:100%\" READONLY>"); try { jspHelper.streamBlockInAscii( new InetSocketAddress(req.getServerName(), datanodePort), blockId, accessToken, genStamp, blockSize, startOffset, chunkSizeToView, out, conf); } catch (Exception e) { out.print(e); } out.print("</textarea>"); dfs.close(); }