@Override public Double Sum(int[] elts) { ForkJoinPool pool = new ForkJoinPool(); Double result = pool.invoke(new SumTask(elts, 0, elts.length)); pool.shutdown(); return result; }
/** * After invoking a single task, isQuiescent eventually becomes true, at which time queues are * empty, threads are not active, the task has completed successfully, and construction parameters * continue to hold */ public void testIsQuiescent() throws Exception { ForkJoinPool p = new ForkJoinPool(2); try (PoolCleaner cleaner = cleaner(p)) { assertTrue(p.isQuiescent()); long startTime = System.nanoTime(); FibTask f = new FibTask(20); p.invoke(f); assertSame(ForkJoinPool.defaultForkJoinWorkerThreadFactory, p.getFactory()); while (!p.isQuiescent()) { if (millisElapsedSince(startTime) > LONG_DELAY_MS) throw new AssertionFailedError("timed out"); assertFalse(p.getAsyncMode()); assertFalse(p.isShutdown()); assertFalse(p.isTerminating()); assertFalse(p.isTerminated()); Thread.yield(); } assertTrue(p.isQuiescent()); assertFalse(p.getAsyncMode()); assertEquals(0, p.getQueuedTaskCount()); assertEquals(0, p.getQueuedSubmissionCount()); assertFalse(p.hasQueuedSubmissions()); while (p.getActiveThreadCount() != 0 && millisElapsedSince(startTime) < LONG_DELAY_MS) Thread.yield(); assertFalse(p.isShutdown()); assertFalse(p.isTerminating()); assertFalse(p.isTerminated()); assertTrue(f.isDone()); assertEquals(6765, (int) f.get()); assertTrue(millisElapsedSince(startTime) < LONG_DELAY_MS); } }
@Override public AbstractFeature build() throws JATEException { List<String> contextIds = new ArrayList<>(frequencyCtxBased.getMapCtx2TTF().keySet()); // start workers int cores = Runtime.getRuntime().availableProcessors(); cores = (int) (cores * properties.getFeatureBuilderMaxCPUsage()); cores = cores == 0 ? 1 : cores; StringBuilder sb = new StringBuilder("Building features using cpu cores="); sb.append(cores) .append(", total ctx=") .append(contextIds.size()) .append(", max per worker=") .append(properties.getFeatureBuilderMaxDocsPerWorker()); LOG.info(sb.toString()); CooccurrenceFBWorker worker = new CooccurrenceFBWorker( contextIds, frequencyTermBased, minTTF, frequencyCtxBased, minTCF, properties.getFeatureBuilderMaxTermsPerWorker()); LOG.info("Filtering candidates with min.ttf=" + minTTF + " min.tcf=" + minTCF); ForkJoinPool forkJoinPool = new ForkJoinPool(cores); Cooccurrence feature = forkJoinPool.invoke(worker); sb = new StringBuilder("Complete building features."); LOG.info(sb.toString()); return feature; }
public static BufferedImage blur(BufferedImage srcImage) { int w = srcImage.getWidth(); int h = srcImage.getHeight(); int[] src = srcImage.getRGB(0, 0, w, h, null, 0, w); int[] dst = new int[src.length]; System.out.println("Array size is " + src.length); System.out.println("Threshold is " + sThreshold); int processors = Runtime.getRuntime().availableProcessors(); System.out.println( Integer.toString(processors) + " processor" + (processors != 1 ? "s are " : " is ") + "available"); ForkBlur fb = new ForkBlur(src, 0, src.length, dst); ForkJoinPool pool = new ForkJoinPool(); long startTime = System.currentTimeMillis(); pool.invoke(fb); long endTime = System.currentTimeMillis(); System.out.println("Image blur took " + (endTime - startTime) + " milliseconds."); BufferedImage dstImage = new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB); dstImage.setRGB(0, 0, w, h, dst, 0, w); return dstImage; }
public static void main(String[] args) { int processors = Runtime.getRuntime().availableProcessors(); System.out.println("Number of processors: " + processors); Fibonacci3 f = new Fibonacci3(Integer.parseInt(args[0])); ForkJoinPool pool = new ForkJoinPool(processors); int result = pool.invoke(f); System.out.println("Result: " + result); }
public static void main(String[] args) { arrayToSearch = new int[N]; for (int i = 0; i < N; i++) { arrayToSearch[i] = ThreadLocalRandom.current().nextInt(0, 1000); } ForkJoinPool pool = new ForkJoinPool(NUM_THREADS); pool.invoke(new SearchTask(0, N - 1)); }
public static void main(String[] args) { sorted = new int[raw.length]; RecursiveActionDemo fb = new RecursiveActionDemo(raw, 0, raw.length, sorted); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(fb); System.out.print('['); for (int i : sorted) { System.out.print(i + ","); } System.out.println(']'); }
public static void main(String[] args) { int n = 50; char[] a = new char[n]; for (int i = 0; i < n; i++) { a[i] = genenateRandomChar(); } CharNum task = new CharNum(a, 0, n); // 统计字母数组中含有a,b,c的数量 int count = mainPool.invoke(task); System.out.println(count); }
public FileReader(File file, Charset charset, int splitCount, ForkJoinPool pool) throws IOException { this.raf = new BufferedAccessFile(file, READ_MODE); this.charset = charset; long threshold = Math.max(MIN_FORK_THRESHOLD, file.length() / splitCount); this.index = Collections.unmodifiableSortedSet( pool.invoke(new IndexingTask(file, 0, file.length(), threshold))); this.lock = new ReentrantLock(); }
private static void mergeRound1( int segmentLen, int[] input, int[] output, ForkJoinPool mainPool) { int twoSegmentLen = 2 * segmentLen; int count = 0; while (count * twoSegmentLen < input.length) { int startIndex = count * twoSegmentLen; int endIndex = (count + 1) * twoSegmentLen - 1; int middle = count * twoSegmentLen + segmentLen - 1; mainPool.invoke( ForkJoinTask.adapt(new MergeTask(input, startIndex, middle, endIndex, output))); count++; } }
/** @param args */ public static void main(String[] args) { // create a random data set final int[] data = new int[10000000]; final Random random = new Random(); for (int i = 0; i < data.length; i++) { data[i] = random.nextInt(1000000); } // submit the task to the pool final ForkJoinPool pool = new ForkJoinPool(1000); final MaximumFinder finder = new MaximumFinder(data); System.out.println(pool.invoke(finder)); }
public static void main(String[] args) { int n = 26; int[] a = new int[n]; for (int i = 0; i < n; i++) { a[i] = i; } SubTask task = new SubTask(a, 0, n); // execute all tasks mainPool.invoke(task); for (int i = 0; i < n; i++) { System.out.print(a[i] + " "); } }
@Override protected ArrayList<Resultado> compute() { // if work is above threshold, break tasks up into smaller tasks if (this.linkPagina.size() > 1) { List<Pagina> subtasks = new ArrayList<>(); subtasks.addAll(createSubtasks()); for (Pagina subtask : subtasks) { subtask.fork(); } Resultado result; ArrayList<Resultado> aregloResultadoTarea; for (Pagina subtask : subtasks) { aregloResultadoTarea = subtask.join(); result = aregloResultadoTarea.get(0); this.resultado.addAll(aregloResultadoTarea); } // imprimir(resultado); return this.resultado; } else { ArrayList<Resultado> arregloResultado = new ArrayList<>(); Resultado resultadoTareaTexto; try { Document doc = Jsoup.connect(this.linkPagina.get(0)).get(); int cores = Runtime.getRuntime().availableProcessors(); String titulo = doc.title(); Texto tareaTexto = new Texto(0, doc.body().text(), doc.body().text(), this.palabra); ForkJoinPool forkJoinPool = new ForkJoinPool(cores); resultadoTareaTexto = forkJoinPool.invoke(tareaTexto); // System.out.println("Titulo: "+titulo); resultadoTareaTexto.setTitulo(titulo); resultadoTareaTexto.setUrl(this.linkPagina.get(0)); // arregloResultado.add(resultadoTareaTexto); arregloResultado.add(resultadoTareaTexto); } catch (IOException e) { } return arregloResultado; } }
static void test(ForkJoinPool pool, int num) throws Exception { int ps = pool.getParallelism(); long start = System.nanoTime(); DynamicFib f = new DynamicFib(num); pool.invoke(f); long time = System.nanoTime() - start; double secs = ((double) time) / NPS; long result = f.number; System.out.print("DynamicFib " + num + " = " + result); System.out.printf("\tTime: %9.3f", secs); long sc = pool.getStealCount(); long ns = sc - lastStealCount; lastStealCount = sc; System.out.printf(" Steals: %4d", ns / ps); System.out.printf(" Workers: %4d", pool.getPoolSize()); System.out.println(); }
public static void main(String[] args) throws IOException, InterruptedException { long startDate = System.nanoTime(); ForkJoinPool forkPool = new ForkJoinPool(); File dir = new File("C:\\Documents and Settings\\akrier\\Mes documents\\Mes images"); File[] files = dir.listFiles(); List<File> filesResized = new LinkedList<File>(); filesResized = (List<File>) forkPool.invoke(new ResizeTask(files)); // tiens pas comptes des fichiers au mauvais format System.out.println("nb fichiers resizés :" + filesResized.size()); long endDate = System.nanoTime(); System.out.println(endDate - startDate); }
public static void main(String[] args) { long start = System.currentTimeMillis(); Properties properties = new Properties(); try { properties.load( Runner.class.getClassLoader().getResourceAsStream("org/bigsorting/config.properties")); String inFileName = properties.getProperty("inFileName"); MergeSortingFileTask mainTask = new MergeSortingFileTask(); ForkJoinPool pool = new ForkJoinPool(); mainTask.setMainFileName(inFileName); pool.invoke(mainTask); logger.info("Sorting is complete. Gratis!"); long end = System.currentTimeMillis(); long delay = end - start; logger.debug(String.format("Time: %s ms", delay)); } catch (IOException e) { logger.error(e.getMessage()); } }
public static void main(String args[]) { int pLevel; int threshold; if (args.length != 2) { System.out.println("Usage: FJExperiment threshold parallism"); return; } pLevel = Integer.parseInt(args[0]); threshold = Integer.parseInt(args[1]); // These variables are used to time the task. long beginT, endT; // Create a task pool. Notice that the parallelsim level is set. ForkJoinPool fjp = new ForkJoinPool(pLevel); double[] nums = new double[1000000]; for (int i = 0; i < nums.length; i++) nums[i] = (double) i; Transform task = new Transform(nums, 0, nums.length, threshold); // Starting timing. beginT = System.nanoTime(); // Start the main ForkJoinTask. fjp.invoke(task); // End timing. endT = System.nanoTime(); System.out.println("Level of parallelism: " + pLevel); System.out.println("Sequential threshold: " + threshold); System.out.println("Elapsed time: " + (endT - beginT) + " ns"); System.out.println(); }
public static void main(String[] args) throws IOException { ImmutableList.Builder<Gene> b = ImmutableList.builder(); List<String> lines = CharStreams.readLines( new InputStreamReader(KNNImpute.class.getResourceAsStream("khan.csv"))); lines = lines.subList(1, lines.size()); int j = 0; for (String line : lines) { String[] l = line.split(";"); float[] d = new float[l.length]; int nans = 0; for (int i = 0; i < l.length; ++i) { if ("NA".equals(l[i])) { nans++; d[i] = Float.NaN; } else { d[i] = Float.parseFloat(l[i]); } } b.add(new Gene(j++, nans, d)); } final KNNImputeDescription desc2 = new KNNImputeDescription(); desc2.setMaxp(100000); KNNImpute r = new KNNImpute(desc2, b.build()); ForkJoinPool p = new ForkJoinPool(); p.invoke(r); try (PrintWriter w = new PrintWriter("khan.imputed.csv")) { w.println(StringUtils.repeat("sample", ";", r.samples)); for (Gene g : r.genes) { float[] d = g.data; int nan = 0; w.print(Float.isNaN(d[0]) ? g.nanReplacements[nan++] : d[0]); for (int i = 1; i < d.length; ++i) w.append(';').append(String.valueOf(Float.isNaN(d[i]) ? g.nanReplacements[nan++] : d[i])); w.println(); } } }
/** Computes which points are inside the hull */ private Mesh buildPreviewMesh() { long start = System.currentTimeMillis(); // First get the bounding box. updateWorldBound(); BoundingBox bound = (BoundingBox) getWorldBound(); Vector3f maxBound = bound.getMax(null); Vector3f originPoint = bound.getMin(null); originPoint.x = Math.min(originPoint.x, -maxBound.x); originPoint.y = Math.min(originPoint.y, -maxBound.y); originPoint.z = Math.min(originPoint.z, -maxBound.z); // Thread Pool ForkJoinPool pool = new ForkJoinPool(); // Create an octree from the data OctreeNode octree = new OctreeNode(originPoint, maxBound); OctreeConstructionTask dcOctreeTask = new OctreeConstructionTask(octree, primitives, 3, 6); pool.invoke(dcOctreeTask); // Contour the octree. AdaptiveDualContouringTask adaptiveTask = new AdaptiveDualContouringTask(octree, primitives); pool.invoke(adaptiveTask); // Retrieve computed data. ArrayList<Vector3f> verticesList = dcOctreeTask.getVertices(); ArrayList<Vector3i> triangles = adaptiveTask.getTriangles(); int numberOfVerticesBefore = verticesList.size(); int numberOfTrianglesBefore = triangles.size(); // Compute normals both from data and triangles. Vector3f normals[] = MeshUtils.facetedNormalsFromFaces( triangles, verticesList, primitives, (float) Math.toRadians(10)); // Drop the triangles to an array. int index = 0; int[] triangleList = new int[3 * triangles.size()]; for (Vector3i v : triangles) { triangleList[index++] = v.x; triangleList[index++] = v.y; triangleList[index++] = v.z; } // Finally, make the mesh itself: Mesh mesh = new Mesh(); mesh.setBuffer( Type.Position, 3, BufferUtils.createFloatBuffer(verticesList.toArray(new Vector3f[0]))); mesh.setBuffer(Type.Index, 3, BufferUtils.createIntBuffer(triangleList)); mesh.setBuffer(Type.Normal, 3, BufferUtils.createFloatBuffer(normals)); mesh.updateBound(); mesh.setStatic(); long timeTaken = System.currentTimeMillis() - start; System.out.println( String.format( "%d Vertices, %d Triangles in %d Milliseconds", verticesList.size(), triangles.size(), timeTaken)); return mesh; }
Long countOccurrencesInParallel(Folder folder, String searchedWord) { return forkJoinPool.invoke(new FolderSearchTask(folder, searchedWord)); }
public static void parallelSort(int[] arr) { ForkJoinPool pool = new ForkJoinPool(PROCESSORS_COUNT); pool.invoke(new MergeSortTask(arr, 0, arr.length - 1)); }
private void run() { ForkJoinPool fjp = new ForkJoinPool(40); MyAction action = new MyAction(0, 1000000); fjp.invoke(action); }
List<GeoPointCarbon> countOccurrencesInParallel(Folder folder, Region region) { return forkJoinPool.invoke(new FolderSearchTask(folder, region)); }
@Override public synchronized T iterate(int steps, T initial) { return pool.invoke(new IterateStepsTask(initial, steps)); }
@Override public void process(Task task) { // Give the pool a subtask to solve. At the beginning task == subtask forkJoinPool.invoke(new Subtask(task, 0, task.getOperations().size(), true)); }
public T iterate(Duration duration, T initial) { return pool.invoke(new IterateTimeTask(initial, duration)); }
public void pmmuli(FloatMatrix self, FloatMatrix other, FloatMatrix result) { pool.invoke(new MulitplyPartly(self, other, result, 0, self.getRows(), 0, other.getColumns())); }