private Job getJobFromUrl(String url) { try { HttpGet httpget = new HttpGet(url); HttpClient client = new DefaultHttpClient(); HttpResponse response = client.execute(httpget); InputStream in = response.getEntity().getContent(); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); StringBuilder str = new StringBuilder(); String line, html = null; while ((line = reader.readLine()) != null) { str.append(line); } in.close(); String xml = str.toString(); String color = fromXmlToColor(xml); Status status = fromColorToStatus(color); Job job = new Job(status); job.setColor(color); job.setUrl(url); return job; } catch (Exception ex) { System.err.println("Could not get Url: " + ex); return null; } }
/** Read a source file and compile it up to the the current job's last barrier. */ public boolean readSource(FileSource source) { // Add a new SourceJob for the given source. If a Job for the source // already exists, then we will be given the existing job. SourceJob job = addJob(source); if (job == null) { // addJob returns null if the job has already been completed, in // which case we can just ignore the request to read in the source. return true; } // Run the new job up to the currentJob's SourceJob's last barrier, to // make sure that dependencies are satisfied. Pass.ID barrier; if (currentJob != null) { if (currentJob.sourceJob().lastBarrier() == null) { throw new InternalCompilerError( "A Source Job which has " + "not reached a barrier cannot read another " + "source file."); } barrier = currentJob.sourceJob().lastBarrier().id(); } else { barrier = Pass.FIRST_BARRIER; } // Make sure we reach at least the first barrier defined // in the base compiler. This forces types to be constructed. // If FIRST_BARRIER is before "barrier", // then the second runToPass will just return true. return runToPass(job, barrier) && runToPass(job, Pass.FIRST_BARRIER); }
/** Run a job up to the <code>goal</code> pass. */ public boolean runToPass(Job job, Pass goal) { if (Report.should_report(Report.frontend, 1)) Report.report(1, "Running " + job + " to pass " + goal); while (!job.pendingPasses().isEmpty()) { Pass pass = (Pass) job.pendingPasses().get(0); try { runPass(job, pass); } catch (CyclicDependencyException e) { // cause the pass to fail. job.finishPass(pass, false); } if (pass == goal) { break; } } if (job.completed()) { if (Report.should_report(Report.frontend, 1)) Report.report(1, "Job " + job + " completed"); } return job.status(); }
// =================================================== // ObjHtmlPanel.Listener public void linkSelected(java.net.URL href, String target) { String url = href.toExternalForm(); int slash = url.lastIndexOf('/'); if (slash > 0) url = url.substring(slash + 1); Job t = actionMap.get(url); fapp.guiRun().run(this, new Job(t.getPermissions(), t.getCBRunnable())); }
public static void main(String[] args) throws Exception { String inputDirectory = "/home/cs246/Desktop/HW2/input"; String outputDirectory = "/home/cs246/Desktop/HW2/output"; String centroidDirectory = "/home/cs246/Desktop/HW2/config"; int iterations = 20; for (int i = 1; i <= iterations; i++) { Configuration conf = new Configuration(); String cFile = centroidDirectory + "/c" + i + ".txt"; String nextCFile = centroidDirectory + "/c" + (i + 1) + ".txt"; conf.set("CFILE", cFile); conf.set("NEXTCFILE", nextCFile); String cFile = centroidDirectory + "/c" + i + ".txt"; String nextCFile = centroidDirectory + "/c" + (i + 1) + ".txt"; conf.set("CFILE", cFile); conf.set("NEXTCFILE", nextCFile); Job job = new Job(conf, "HW2_Q4." + i); job.setJarByClass(HW2_Q4.class); job.setOutputKeyClass(IntWritable.class); job.setOutputValueClass(Text.class); job.setMapperClass(Map1.class); job.setReducerClass(Reduce1.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); FileInputFormat.addInputPath(job, new Path(inputDirectory)); FileOutputFormat.setOutputPath(job, new Path(outputDirectory + "/output" + i)); job.waitForCompletion(true); } }
/** Run a job until the <code>goal</code> pass completes. */ public boolean runToPass(Job job, Pass.ID goal) { if (Report.should_report(Report.frontend, 1)) Report.report(1, "Running " + job + " to pass named " + goal); if (job.completed(goal)) { return true; } Pass pass = job.passByID(goal); return runToPass(job, pass); }
// Expand grid search related argument sets @Override protected NanoHTTPD.Response serveGrid(NanoHTTPD server, Properties parms, RequestType type) { String[][] values = new String[_arguments.size()][]; boolean gridSearch = false; for (int i = 0; i < _arguments.size(); i++) { Argument arg = _arguments.get(i); if (arg._gridable) { String value = _parms.getProperty(arg._name); if (value != null) { // Skips grid if argument is an array, except if imbricated expression // Little hackish, waiting for real language boolean imbricated = value.contains("("); if (!arg._field.getType().isArray() || imbricated) { values[i] = split(value); if (values[i] != null && values[i].length > 1) gridSearch = true; } else if (arg._field.getType().isArray() && !imbricated) { // Copy values which are arrays values[i] = new String[] {value}; } } } } if (!gridSearch) return superServeGrid(server, parms, type); // Ignore destination key so that each job gets its own _parms.remove("destination_key"); for (int i = 0; i < _arguments.size(); i++) if (_arguments.get(i)._name.equals("destination_key")) values[i] = null; // Iterate over all argument combinations int[] counters = new int[values.length]; ArrayList<Job> jobs = new ArrayList<Job>(); for (; ; ) { Job job = (Job) create(_parms); Properties combination = new Properties(); for (int i = 0; i < values.length; i++) { if (values[i] != null) { String value = values[i][counters[i]]; value = value.trim(); combination.setProperty(_arguments.get(i)._name, value); _arguments.get(i).reset(); _arguments.get(i).check(job, value); } } job._parms = combination; jobs.add(job); if (!increment(counters, values)) break; } GridSearch grid = new GridSearch(); grid.jobs = jobs.toArray(new Job[jobs.size()]); return grid.superServeGrid(server, parms, type); }
/** Adds a dependency from the current job to the given Source. */ public void addDependencyToCurrentJob(Source s) { if (s == null) return; if (currentJob != null) { Object o = jobs.get(s); if (o != COMPLETED_JOB) { if (Report.should_report(Report.frontend, 2)) { Report.report(2, "Adding dependency from " + currentJob.source() + " to " + s); } currentJob.sourceJob().addDependency(s); } } else { throw new InternalCompilerError("No current job!"); } }
void checkFormat(Job job) throws Exception { TaskAttemptContext attemptContext = new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID("123", 0, false, 1, 2)); MyClassMessagePackBase64LineInputFormat format = new MyClassMessagePackBase64LineInputFormat(); FileInputFormat.setInputPaths(job, workDir); List<InputSplit> splits = format.getSplits(job); for (int j = 0; j < splits.size(); j++) { RecordReader<LongWritable, MyClassWritable> reader = format.createRecordReader(splits.get(j), attemptContext); reader.initialize(splits.get(j), attemptContext); int count = 0; try { while (reader.nextKeyValue()) { LongWritable key = reader.getCurrentKey(); MyClassWritable val = reader.getCurrentValue(); MyClass mc = val.get(); assertEquals(mc.v, count); assertEquals(mc.s, Integer.toString(count)); count++; } } finally { reader.close(); } } }
@Override public List<Job> listJobs(UUID subscriptionId, String serviceName) throws AzureCmdException { String[] cmd = new String[] { "mobile", "job", "list", "--json", "-s", subscriptionId.toString(), serviceName, }; String json = AzureCommandHelper.getInstance().consoleExec(cmd); CustomJsonSlurper slurper = new CustomJsonSlurper(); List<Map<String, Object>> tempRes = (List<Map<String, Object>>) slurper.parseText(json); List<Job> res = new ArrayList<Job>(); for (Map<String, Object> item : tempRes) { Job j = new Job(); j.setAppName(item.get("appName").toString()); j.setName(item.get("name").toString()); j.setEnabled(item.get("status").equals("enabled")); j.setId(UUID.fromString(item.get("id").toString())); if (item.get("intervalPeriod") != null) { j.setIntervalPeriod((Integer) item.get("intervalPeriod")); j.setIntervalUnit(item.get("intervalUnit").toString()); } res.add(j); } return res; }
public static void main(String[] args) throws Exception { Job job = new Job(); job.setJarByClass(Sort.class); job.setJobName("Sort"); FileInputFormat.addInputPath(job, new Path("hdfs://localhost:9000/input/")); FileOutputFormat.setOutputPath(job, new Path("hdfs://localhost:9000/output/")); job.setMapperClass(Map.class); // job.setCombinerClass(Reduce.class); job.setReducerClass(Reduce.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setNumReduceTasks(2); System.exit(job.waitForCompletion(true) ? 0 : 1); }
/** Run all pending passes on <code>job</code>. */ public boolean runAllPasses(Job job) { List pending = job.pendingPasses(); // Run until there are no more passes. if (!pending.isEmpty()) { Pass lastPass = (Pass) pending.get(pending.size() - 1); return runToPass(job, lastPass); } return true; }
/** * @param job the job that we need to find the next parameters for * @return the next job parameters if they can be located * @throws JobParametersNotFoundException if there is a problem */ private JobParameters getNextJobParameters(Job job) throws JobParametersNotFoundException { String jobIdentifier = job.getName(); JobParameters jobParameters; List<JobInstance> lastInstances = jobExplorer.getJobInstances(jobIdentifier, 0, 1); JobParametersIncrementer incrementer = job.getJobParametersIncrementer(); if (incrementer == null) { throw new JobParametersNotFoundException( "No job parameters incrementer found for job=" + jobIdentifier); } if (lastInstances.isEmpty()) { jobParameters = incrementer.getNext(new JobParameters()); if (jobParameters == null) { throw new JobParametersNotFoundException( "No bootstrap parameters found from incrementer for job=" + jobIdentifier); } } else { jobParameters = incrementer.getNext(lastInstances.get(0).getJobParameters()); } return jobParameters; }
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); Job job = new Job(conf, "wordcount"); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setMapperClass(TopMapper.class); job.setReducerClass(TopReducer.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); job.setNumReduceTasks(1); job.setJarByClass(WordCount_e3.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); job.waitForCompletion(true); }
private static Job getJobForClient() throws IOException { Job job = Job.getInstance(new Configuration()); job.getConfiguration().set("mapred.job.tracker", "localhost:" + PORT); job.setInputFormatClass(NullInputFormat.class); job.setOutputFormatClass(NullOutputFormat.class); job.setNumReduceTasks(0); return job; }
@Override public int run(String[] args) throws Exception { Job job = new Job(getConf()); job.setJarByClass(ElimiateRepeat.class); job.setJobName("ElimiateRepeat"); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setMapperClass(Map.class); job.setReducerClass(Reduce.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); FileInputFormat.setInputPaths(job, new Path("file0*")); FileOutputFormat.setOutputPath(job, new Path("elimiateRepeat")); boolean success = job.waitForCompletion(true); return success ? 0 : 1; }
@Override public int run(String[] args) throws Exception { String locatorHost = args[0]; int locatorPort = Integer.parseInt(args[1]); String hdfsHomeDir = args[2]; System.out.println( "KnownKeysMRv2 invoked with args (locatorHost = " + locatorHost + " locatorPort = " + locatorPort + " hdfsHomeDir = " + hdfsHomeDir); Configuration conf = getConf(); conf.set(GFInputFormat.INPUT_REGION, "partitionedRegion"); conf.set(GFInputFormat.HOME_DIR, hdfsHomeDir); conf.setBoolean(GFInputFormat.CHECKPOINT, false); conf.set(GFOutputFormat.REGION, "validationRegion"); conf.set(GFOutputFormat.LOCATOR_HOST, locatorHost); conf.setInt(GFOutputFormat.LOCATOR_PORT, locatorPort); Job job = Job.getInstance(conf, "knownKeysMRv2"); job.setInputFormatClass(GFInputFormat.class); job.setOutputFormatClass(GFOutputFormat.class); job.setMapperClass(KnownKeysMRv2Mapper.class); job.setMapOutputKeyClass(GFKey.class); job.setMapOutputValueClass(PEIWritable.class); job.setReducerClass(KnownKeysMRv2Reducer.class); // job.setOutputKeyClass(String.class); // job.setOutputValueClass(ValueHolder.class); return job.waitForCompletion(false) ? 0 : 1; }
static void realMain(String[] args) throws Throwable { // jmap doesn't work on Windows if (System.getProperty("os.name").startsWith("Windows")) return; final String childClassName = Job.class.getName(); final String classToCheckForLeaks = Job.classToCheckForLeaks(); final String uniqueID = String.valueOf(new Random().nextInt(Integer.MAX_VALUE)); final String[] jobCmd = { java, "-Xmx8m", "-classpath", System.getProperty("test.classes", "."), childClassName, uniqueID }; final Process p = new ProcessBuilder(jobCmd).start(); final String childPid = match( commandOutputOf(jps, "-m"), "(?m)^ *([0-9]+) +\\Q" + childClassName + "\\E *" + uniqueID + "$", 1); final int n0 = objectsInUse(p, childPid, classToCheckForLeaks); final int n1 = objectsInUse(p, childPid, classToCheckForLeaks); equal(p.waitFor(), 0); equal(p.exitValue(), 0); failed += p.exitValue(); // Check that no objects were leaked. System.out.printf("%d -> %d%n", n0, n1); check(Math.abs(n1 - n0) < 2); // Almost always n0 == n1 check(n1 < 20); drainers.shutdown(); }
public static List<Job> findJobsByValidLocation() { return Job.find("byLocationValid", true).fetch(); }
public static List<Job> findMissingCoordinatesByCompany(Company company) { return Job.find("latitude < 0 and longitude < 0 and company=?1", company).fetch(); }
public static List<Job> findByCompany(Company company) { return Job.find("byCompany", company).fetch(); }
public static List<Job> findByJobKey(List<String> jobKeyList) { return Job.find("key in (:jobkeys)").bind("jobkeys", jobKeyList).fetch(); }
public static Job byId(long id) { return Job.find("byId", id).first(); }
public static Job findByJobKey(String key) { return Job.find("byKey", key).first(); }
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] remainArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (remainArgs.length != 2) { System.err.println("Usage: wordcount <input> <output>"); System.exit(1); } Job job = new Job(conf, "wordcount"); job.setJarByClass(WordCount.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setMapperClass(Map.class); job.setCombinerClass(Reduce.class); job.setReducerClass(Reduce.class); job.setNumReduceTasks(4); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); FileSystem.get(conf).delete(new Path(remainArgs[1]), true); FileInputFormat.setInputPaths(job, new Path(remainArgs[0])); FileOutputFormat.setOutputPath(job, new Path(remainArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
/** * Before running <code>Pass pass</code> on <code>SourceJob job</code> make sure that all * appropriate scheduling invariants are satisfied, to ensure that all passes of other jobs that * <code>job</code> depends on will have already been done. */ protected void enforceInvariants(Job job, Pass pass) throws CyclicDependencyException { SourceJob srcJob = job.sourceJob(); if (srcJob == null) { return; } BarrierPass lastBarrier = srcJob.lastBarrier(); if (lastBarrier != null) { // make sure that _all_ dependent jobs have completed at least up to // the last barrier (not just children). // // Ideally the invariant should be that only the source jobs that // job _depends on_ should be brought up to the last barrier. // This is work to be done in the future... List allDependentSrcs = new ArrayList(srcJob.dependencies()); Iterator i = allDependentSrcs.iterator(); while (i.hasNext()) { Source s = (Source) i.next(); Object o = jobs.get(s); if (o == COMPLETED_JOB) continue; if (o == null) { throw new InternalCompilerError("Unknown source " + s); } SourceJob sj = (SourceJob) o; if (sj.pending(lastBarrier.id())) { // Make the job run up to the last barrier. // We ignore the return result, since even if the job // fails, we will keep on going and see // how far we get... if (Report.should_report(Report.frontend, 3)) { Report.report(3, "Running " + sj + " to " + lastBarrier.id() + " for " + srcJob); } runToPass(sj, lastBarrier.id()); } } } if (pass instanceof GlobalBarrierPass) { // need to make sure that _all_ jobs have completed just up to // this global barrier. // If we hit a cyclic dependency, ignore it and run the other // jobs up to that pass. Then try again to run the cyclic // pass. If we hit the cycle again for the same job, stop. LinkedList barrierWorklist = new LinkedList(jobs.values()); while (!barrierWorklist.isEmpty()) { Object o = barrierWorklist.removeFirst(); if (o == COMPLETED_JOB) continue; SourceJob sj = (SourceJob) o; if (sj.completed(pass.id()) || sj.nextPass() == sj.passByID(pass.id())) { // the source job has either done this global pass // (which is possible if the job was loaded late in the // game), or is right up to the global barrier. continue; } // Make the job run up to just before the global barrier. // We ignore the return result, since even if the job // fails, we will keep on going and see // how far we get... Pass beforeGlobal = sj.getPreviousTo(pass.id()); if (Report.should_report(Report.frontend, 3)) { Report.report(3, "Running " + sj + " to " + beforeGlobal.id() + " for " + srcJob); } // Don't use runToPass, since that catches the // CyclicDependencyException that we should report // back to the caller. while (!sj.pendingPasses().isEmpty()) { Pass p = (Pass) sj.pendingPasses().get(0); runPass(sj, p); if (p == beforeGlobal) { break; } } } } }
/** * Run the pass <code>pass</code> on the job. Before running the pass on the job, if the job is a * <code>SourceJob</code>, then this method will ensure that the scheduling invariants are * enforced by calling <code>enforceInvariants</code>. */ protected void runPass(Job job, Pass pass) throws CyclicDependencyException { // make sure that all scheduling invariants are satisfied before running // the next pass. We may thus execute some other passes on other // jobs running the given pass. try { enforceInvariants(job, pass); } catch (CyclicDependencyException e) { // A job that depends on this job is still running // an earlier pass. We cannot continue this pass, // but we can just silently fail since the job we're // that depends on this one will eventually try // to run this pass again when it reaches a barrier. return; } if (getOptions().disable_passes.contains(pass.name())) { if (Report.should_report(Report.frontend, 1)) Report.report(1, "Skipping pass " + pass); job.finishPass(pass, true); return; } if (Report.should_report(Report.frontend, 1)) Report.report(1, "Trying to run pass " + pass + " in " + job); if (job.isRunning()) { // We're currently running. We can't reach the goal. throw new CyclicDependencyException(job + " cannot reach pass " + pass); } pass.resetTimers(); boolean result = false; if (job.status()) { Job oldCurrentJob = this.currentJob; this.currentJob = job; Report.should_report.push(pass.name()); // Stop the timer on the old pass. */ Pass oldPass = oldCurrentJob != null ? oldCurrentJob.runningPass() : null; if (oldPass != null) { oldPass.toggleTimers(true); } job.setRunningPass(pass); pass.toggleTimers(false); result = pass.run(); pass.toggleTimers(false); job.setRunningPass(null); Report.should_report.pop(); this.currentJob = oldCurrentJob; // Restart the timer on the old pass. */ if (oldPass != null) { oldPass.toggleTimers(true); } // pretty-print this pass if we need to. if (getOptions().print_ast.contains(pass.name())) { System.err.println("--------------------------------" + "--------------------------------"); System.err.println("Pretty-printing AST for " + job + " after " + pass.name()); PrettyPrinter pp = new PrettyPrinter(); pp.printAst(job.ast(), new CodeWriter(System.err, 78)); } // dump this pass if we need to. if (getOptions().dump_ast.contains(pass.name())) { System.err.println("--------------------------------" + "--------------------------------"); System.err.println("Dumping AST for " + job + " after " + pass.name()); NodeVisitor dumper = new DumpAst(new CodeWriter(System.err, 78)); dumper = dumper.begin(); job.ast().visit(dumper); dumper.finish(); } // This seems to work around a VM bug on linux with JDK // 1.4.0. The mark-sweep collector will sometimes crash. // Running the GC explicitly here makes the bug go away. // If this fails, maybe run with bigger heap. // System.gc(); } Stats stats = getStats(); stats.accumPassTimes(pass.id(), pass.inclusiveTime(), pass.exclusiveTime()); if (Report.should_report(Report.time, 2)) { Report.report( 2, "Finished " + pass + " status=" + str(result) + " inclusive_time=" + pass.inclusiveTime() + " exclusive_time=" + pass.exclusiveTime()); } else if (Report.should_report(Report.frontend, 1)) { Report.report(1, "Finished " + pass + " status=" + str(result)); } job.finishPass(pass, result); }
@Override public int run(String[] arg0) throws Exception { // config a job and start it Configuration conf = getConf(); Job job = new Job(conf, "Index construction.."); job.setJarByClass(IndexConstructorDriver.class); job.setMapperClass(IndexConstructorMapper.class); job.setReducerClass(IndexConstructorReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(InvertedListWritable.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Text.class); job.setInputFormatClass(SequenceFileInputFormat.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); // can add the dir by the config FileSystem fs = FileSystem.get(conf); String workdir = conf.get("org.joy.crawler.dir", "crawler/"); fs.delete(new Path(workdir + "indexOutput/"), true); FileInputFormat.addInputPath(job, new Path(workdir + "content/")); FileOutputFormat.setOutputPath(job, new Path(workdir + "indexOutput/")); System.out.println( "indexer starts to work, it begins to construct the index, please wait ...\n"); return job.waitForCompletion(true) ? 0 : 1; }
public int run(String[] args) throws Exception { Job job = new Job(getConf()); job.setJarByClass(HadoopNBFilter.class); job.setJobName("hadoopnbfilter"); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setMapperClass(Map.class); job.setReducerClass(Reduce.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); job.setNumReduceTasks(Integer.parseInt(args[2])); FileInputFormat.setInputPaths(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); boolean jobCompleted = job.waitForCompletion(true); return jobCompleted ? 0 : 1; }
@Override void solve(Set<? extends Job> jobs) { Job[] sortedJobs = jobs.toArray(new Job[jobs.size()]); Arrays.sort(sortedJobs, Job::compareArrivalTime); processTime = totWT = 0; long usefulTime = 0; int jobCount = jobs.size(); PriorityQueue<Job> queue = new PriorityQueue<>(Job::compareBurstTime); for (Job job : sortedJobs) { if (job == null) { jobCount--; continue; } while (!queue.isEmpty() && processTime < job.getArrivalTime()) { Job nextJob = queue.poll(); long arrivalTime = nextJob.getArrivalTime(); long burstTime = nextJob.getBurstTime(); if (processTime < nextJob.getArrivalTime()) { processList.add(new RunningProcess("Idle", arrivalTime - processTime)); processTime = arrivalTime; } processList.add(new RunningProcess("P" + nextJob.getId(), burstTime)); usefulTime += burstTime; totWT += processTime - arrivalTime; processTime += burstTime; } queue.add(job); } while (!queue.isEmpty()) { Job nextJob = queue.poll(); long arrivalTime = nextJob.getArrivalTime(); long burstTime = nextJob.getBurstTime(); if (processTime < nextJob.getArrivalTime()) { processList.add(new RunningProcess("Idle", arrivalTime - processTime)); processTime = arrivalTime; } processList.add(new RunningProcess("P" + nextJob.getId(), burstTime)); usefulTime += burstTime; totWT += processTime - arrivalTime; processTime += burstTime; } totRT = totWT; totTAT = totWT + usefulTime; avgRT = avgWT = (double) totWT / (double) jobCount; avgTAT = (double) totTAT / (double) jobCount; utilization = usefulTime * 100.0 / processTime; }