public ArtifactData getCandidateAsync(String arg) throws Exception { reporter.trace("coordinate %s", arg); if (isUrl(arg)) try { ArtifactData data = putAsync(new URI(arg)); data.local = true; return data; } catch (Exception e) { reporter.trace("hmm, not a valid url %s, will try the server", arg); } Coordinate c = new Coordinate(arg); if (c.isSha()) { ArtifactData r = get(c.getSha()); if (r != null) return r; } Revision revision = library.getRevisionByCoordinate(c); if (revision == null) return null; reporter.trace("revision %s", Hex.toHexString(revision._id)); ArtifactData ad = get(revision._id); if (ad != null) { reporter.trace("found in cache"); return ad; } URI url = revision.urls.iterator().next(); ArtifactData artifactData = putAsync(url); artifactData.coordinate = c; return artifactData; }
private void checkStartup( Map<String, ServiceData> map, List<ServiceData> start, ServiceData sd, Set<ServiceData> cyclic) { if (sd.after.isEmpty() || start.contains(sd)) return; if (cyclic.contains(sd)) { reporter.error("Cyclic dependency for " + sd.name); return; } cyclic.add(sd); for (String dependsOn : sd.after) { if (dependsOn.equals("boot")) continue; ServiceData deps = map.get(dependsOn); if (deps == null) { reporter.error("No such service " + dependsOn + " but " + sd.name + " depends on it"); } else { checkStartup(map, start, deps, cyclic); } } start.add(sd); }
public void execute() { if (analysisMode.isPreview() || analysisMode.isMediumTest()) { for (Reporter reporter : reporters) { reporter.execute(); } } }
public void persistReporters() { JSONArray jsonReporters = new JSONArray(); for (Reporter reporter : reporters) { jsonReporters.add(reporter.jsonify()); } json.put("reporters", jsonReporters); }
// Sorting the array and printing the result to 'bsOutput' public static void main(String args[]) { long st, et; st = System.nanoTime(); long tt; // getting concurency if (args.length > 1) // 2nd argument to main should be the concurency { concurency = args[1]; // getting the concurency - 2nd parameter if (concurency.equals(low_concurency)) sync = hi_sync; else if (concurency.equals(mid_concurency)) sync = mid_sync; else // in case of hi concurency or need for default value { sync = low_sync; // setting default value if (!concurency.equals(low_concurency)) // in case no legal value was entered System.out.println( "Concurency in 2nd parameter must be one of the following: \n" + "Low concurency: '" + low_concurency + "' , Mid concurency: '" + mid_concurency + "' , or High concurency, which is also the default value: '" + hi_concurency + "'. \n" + "The program runs with the default value"); } } else // in case 2nd parameter was not set sync = low_sync; // setting default value // Running the Bubble Sort Program try { for (int i = 0; i < array.length; ++i) // printing the original array System.out.print(BubbleSort.array[i] + " "); System.out.println(""); System.out.println(""); // creating a BubbleSort object with level of synchronization, oposite to concurency BubbleSortImpl bs = new BubbleSortImpl(array, new File(bsOutput), sync); bs.Sort(); // sorting the array with Bubble sort Algorithem bs.PrintArray(); // printing the sorted array } catch (IOException e) { return; } Reporter reporter = null; // Checking and reporting results try { reporter = Reporter.GetInstance(bsOutput); if (args.length > 0) // in case the program recieves 1st parameter rOutput = args[0]; // this parameter exchanges the default output path reporter.Report(rOutput, array); // creatinr the report } catch (IOException e) { } et = System.nanoTime(); tt = (et - st) / 1000; System.out.println(tt); }
public Reporter getReporter(String firstName, String lastName) { for (Reporter reporter : reporters) { if (reporter.getFirstName() == firstName && reporter.getLastName() == lastName) { return reporter; } } return null; }
public Reporter getReporter(int id) { for (Reporter reporter : reporters) { if (reporter.getId() == id) { return reporter; } } return null; }
/* * Fetch a file that is in a Hadoop file system. Return a local File. * Interruptible. */ private File hdfsFetch(Path fromPath, Reporter reporter) throws IOException, InterruptedException { UUID uniqueId = UUID.randomUUID(); File toFile = new File(tempDir, uniqueId.toString() + "/" + fromPath.getName()); File toDir = new File(toFile.getParent()); if (toDir.exists()) { FileUtils.deleteDirectory(toDir); } toDir.mkdirs(); Path toPath = new Path(toFile.getCanonicalPath()); FileSystem fS = fromPath.getFileSystem(hadoopConf); FileSystem tofS = FileSystem.getLocal(hadoopConf); Throttler throttler = new Throttler((double) bytesPerSecThrottle); try { for (FileStatus fStatus : fS.globStatus(fromPath)) { log.info("Copying " + fStatus.getPath() + " to " + toPath); long bytesSoFar = 0; FSDataInputStream iS = fS.open(fStatus.getPath()); FSDataOutputStream oS = tofS.create(toPath); byte[] buffer = new byte[downloadBufferSize]; int nRead; while ((nRead = iS.read(buffer, 0, buffer.length)) != -1) { // Needed to being able to be interrupted at any moment. if (Thread.interrupted()) { iS.close(); oS.close(); cleanDirNoExceptions(toDir); throw new InterruptedException(); } bytesSoFar += nRead; oS.write(buffer, 0, nRead); throttler.incrementAndThrottle(nRead); if (bytesSoFar >= bytesToReportProgress) { reporter.progress(bytesSoFar); bytesSoFar = 0l; } } if (reporter != null) { reporter.progress(bytesSoFar); } oS.close(); iS.close(); } return toDir; } catch (ClosedByInterruptException e) { // This can be thrown by the method read. cleanDirNoExceptions(toDir); throw new InterruptedIOException(); } }
private void copyFile(File sourceFile, File destFile, Reporter reporter) throws IOException { if (!destFile.exists()) { destFile.createNewFile(); } FileChannel source = null; FileChannel destination = null; Throttler throttler = new Throttler((double) bytesPerSecThrottle); FileInputStream iS = null; FileOutputStream oS = null; try { iS = new FileInputStream(sourceFile); oS = new FileOutputStream(destFile); source = iS.getChannel(); destination = oS.getChannel(); long bytesSoFar = 0; long reportingBytesSoFar = 0; long size = source.size(); int transferred = 0; while (bytesSoFar < size) { // Casting to int here is safe since we will transfer at most "downloadBufferSize" bytes. // This is done on purpose for being able to implement Throttling. transferred = (int) destination.transferFrom(source, bytesSoFar, downloadBufferSize); bytesSoFar += transferred; reportingBytesSoFar += transferred; throttler.incrementAndThrottle(transferred); if (reportingBytesSoFar >= bytesToReportProgress) { reporter.progress(reportingBytesSoFar); reportingBytesSoFar = 0l; } } if (reporter != null) { reporter.progress(reportingBytesSoFar); } } finally { if (iS != null) { iS.close(); } if (oS != null) { oS.close(); } if (source != null) { source.close(); } if (destination != null) { destination.close(); } } }
public void update() { double v = 0; double t = 0; for (Link l : inputs) { t = t + l.getV(); } this.v = (2 / (1 + Math.exp(-2 * t))) - 1; for (Reporter r : reporterList) { r.addValue(this.v); } }
/** * This is called when JPM runs in the background to start jobs * * @throws Exception */ public void daemon() throws Exception { Runtime.getRuntime() .addShutdownHook( new Thread("Daemon shutdown") { public void run() { for (Service service : startedByDaemon) { try { reporter.error("Stopping " + service); service.stop(); reporter.error("Stopped " + service); } catch (Exception e) { // Ignore } } } }); List<ServiceData> services = getServices(); Map<String, ServiceData> map = new HashMap<String, ServiceData>(); for (ServiceData d : services) { map.put(d.name, d); } List<ServiceData> start = new ArrayList<ServiceData>(); Set<ServiceData> set = new HashSet<ServiceData>(); for (ServiceData sd : services) { checkStartup(map, start, sd, set); } if (start.isEmpty()) reporter.warning("No services to start"); for (ServiceData sd : start) { try { Service service = getService(sd.name); reporter.trace("Starting " + service); String result = service.start(); if (result != null) reporter.error("Started error " + result); else startedByDaemon.add(service); reporter.trace("Started " + service); } catch (Exception e) { reporter.error("Cannot start daemon %s, due to %s", sd.name, e); } } while (true) { for (Service sd : startedByDaemon) { try { if (!sd.isRunning()) { reporter.error("Starting due to failure " + sd); String result = sd.start(); if (result != null) reporter.error("Started error " + result); } } catch (Exception e) { reporter.error("Cannot start daemon %s, due to %s", sd, e); } } Thread.sleep(10000); } }
public synchronized void flush() throws IOException { LOG.info("Starting flush of map output"); synchronized (spillLock) { while (kvstart != kvend) { try { reporter.progress(); spillLock.wait(); } catch (InterruptedException e) { throw (IOException) new IOException("Buffer interrupted while waiting for the writer").initCause(e); } } } if (sortSpillException != null) { throw (IOException) new IOException("Spill failed").initCause(sortSpillException); } if (kvend != kvindex) { LOG.info("bufstart = " + bufstart + "; bufend = " + bufmark + "; bufvoid = " + bufvoid); LOG.info( "kvstart = " + kvstart + "; kvend = " + kvindex + "; length = " + kvoffsets.length); kvend = kvindex; bufend = bufmark; sortAndSpill(); } // release sort buffer before the merge kvbuffer = null; mergeParts(); }
private void display() { final StringBuilder s = new StringBuilder(); int ql = 0; int fl = 0; int sl = 0; int sql = 0; for (int i = 0; i < stackASize; i++) { final Turn t = stackA[i].userTurn; ql += Metric.QUARTER.length(t); fl += Metric.FACE.length(t); sl += Metric.SLICE.length(t); sql += Metric.SLICE_QUARTER.length(t); s.append(t.toString()).append(' '); } if (stackBSize > 0) s.append(". "); for (int i = 0; i < stackBSize; i++) { // print out Phase B part final Turn t = stackB[i].userTurn; ql += Metric.QUARTER.length(t); fl += Metric.FACE.length(t); sl += Metric.SLICE.length(t); sql += Metric.SLICE_QUARTER.length(t); s.append(t.toString()).append(' '); } reporter.sequenceFound(s.substring(0, max(s.length() - 1, 0)), ql, fl, sl, sql); }
public static void main(String... args) { System.out.println("-- JSR 363 TCK started --"); List<XmlSuite> suites = new ArrayList<>(); suites.add(new TCKRunner()); TestNG tng = new TestNG(); tng.setXmlSuites(suites); tng.setOutputDirectory("./target/tck-results"); // tng.addListener(new VerboseReporter()); File file = new File(System.getProperty("java.io.tmpdir"), "tck-results.txt"); Reporter rep = new Reporter(file); System.out.println("Writing to file " + file.getAbsolutePath() + " ..."); tng.addListener(rep); tng.run(); rep.writeSummary(); System.out.println("-- JSR 363 TCK finished --"); }
private void checkForMutableFields(Protoclass protoclass, TypeElement element) { for (VariableElement field : ElementFilter.fieldsIn( processing.getElementUtils().getAllMembers(CachingElements.getDelegate(element)))) { if (!field.getModifiers().contains(Modifier.FINAL)) { Reporter report = protoclass.report(); boolean ownField = CachingElements.equals(element, field.getEnclosingElement()); if (ownField) { report .withElement(field) .warning("Avoid introduction of fields (except constants) in abstract value types"); } else { report.warning("Abstract value type inherits mutable fields"); } } } }
public TwoPhaseSolver(final EnumSet<Turn> turns, final Metric metric, final Reporter reporter) { this.metric = metric; this.reporter = reporter; reporter.tableCreationStarted("turn transformation and pruning table"); turnList = new TurnList(turns, metric, TurnList.Phase.A); turnListB = new TurnList(turns, metric, TurnList.Phase.B); for (int i = 0; i < stackA.length; i++) stackA[i] = new ANode(); for (int i = 0; i < stackB.length; i++) stackB[i] = new BNode(); }
@Override public void deprecated() throws Exception { for (ClassDataCollector cd : shortlist) try { cd.deprecated(); } catch (Exception e) { reporter.error("Fail to call deprecated on %s", cd); } }
@Override public void enclosingMethod(TypeRef cName, String mName, String mDescriptor) { for (ClassDataCollector cd : shortlist) try { cd.enclosingMethod(cName, mName, mDescriptor); } catch (Exception e) { reporter.error("Fail to call enclosingMethod on %s", cd); } }
@Override public void signature(String signature) { for (ClassDataCollector cd : shortlist) try { cd.signature(signature); } catch (Exception e) { reporter.error("Fail to call innerClass on %s", cd); } }
@Test public void firstTest() throws Exception { ArrayList<Map<String, ?>> specMapList = new ArrayList<Map<String, ?>>(); // printRecursive(new StringBuilder(), specViewConfig, "", // specification.getChildren(), adapterFactory, specMapList); fillRecursive(specification.getChildren(), specViewConfig, specMapList); Reporter.makeMapReport(specMapList); }
@Override public void classBegin(int access, TypeRef name) { for (ClassDataCollector cd : delegates) try { cd.classBegin(access, name); } catch (Exception e) { reporter.error("Fail to class classBegin on %s", cd); } }
@Override public void constant(Object object) { for (ClassDataCollector cd : shortlist) try { cd.constant(object); } catch (Exception e) { reporter.error("Fail to call constant on %s", cd); } }
@Override public void memberEnd() { for (ClassDataCollector cd : shortlist) try { cd.memberEnd(); } catch (Exception e) { reporter.error("Fail to call memberEnd on %s", cd); } }
@Override public void version(int minor, int major) { for (ClassDataCollector cd : shortlist) try { cd.version(minor, major); } catch (Exception e) { reporter.error("Fail to call version on %s", cd); } }
@Override public void addReference(TypeRef ref) { for (ClassDataCollector cd : shortlist) try { cd.addReference(ref); } catch (Exception e) { reporter.error("Fail to class addReference on %s", cd); } }
@Override public void referenceMethod(int access, TypeRef className, String method, String descriptor) { for (ClassDataCollector cd : shortlist) try { cd.referenceMethod(access, className, method, descriptor); } catch (Exception e) { reporter.error("Fail to call referenceMethod on %s", cd); } }
@Override public void extendsClass(TypeRef zuper) throws Exception { for (ClassDataCollector cd : shortlist) try { cd.extendsClass(zuper); } catch (Exception e) { reporter.error("Fail to class extendsClass on %s", cd); } }
@Override public void field(FieldDef defined) { for (ClassDataCollector cd : shortlist) try { cd.field(defined); } catch (Exception e) { reporter.error("Fail to call field on %s", cd); } }
@Override public void implementsInterfaces(TypeRef[] interfaces) throws Exception { for (ClassDataCollector cd : shortlist) try { cd.implementsInterfaces(interfaces); } catch (Exception e) { reporter.error("Fail to class implementsInterfaces on %s", cd); } }
@Override public void referTo(TypeRef typeRef, int modifiers) { for (ClassDataCollector cd : shortlist) try { cd.referTo(typeRef, modifiers); } catch (Exception e) { reporter.error("Fail to call referTo on %s", cd); } }