/** * Spins/yields/blocks until node s is matched or caller gives up. * * @param s the waiting node * @param pred the predecessor of s, or s itself if it has no predecessor, or null if unknown (the * null case does not occur in any current calls but may in possible future extensions) * @param e the comparison value for checking match * @param timed if true, wait only until timeout elapses * @param nanos timeout in nanosecs, used only if timed is true * @return matched item, or e if unmatched on interrupt or timeout */ private E awaitMatch(Node s, Node pred, E e, boolean timed, long nanos) { final long deadline = timed ? System.nanoTime() + nanos : 0L; Thread w = Thread.currentThread(); int spins = -1; // initialized after first item and cancel checks ThreadLocalRandom randomYields = null; // bound if needed for (; ; ) { Object item = s.item; if (item != e) { // matched // assert item != s; s.forgetContents(); // avoid garbage return LinkedTransferQueue.<E>cast(item); } if ((w.isInterrupted() || (timed && nanos <= 0)) && s.casItem(e, FORGOTTEN)) { // cancel unsplice(pred, s); return e; } if (spins < 0) { // establish spins at/near front if ((spins = spinsFor(pred, s.isData)) > 0) randomYields = ThreadLocalRandom.current(); } else if (spins > 0) { // spin --spins; if (randomYields.nextInt(CHAINED_SPINS) == 0) Thread.yield(); // occasionally yield } else if (s.waiter == null) { s.waiter = w; // request unpark then recheck } else if (timed) { nanos = deadline - System.nanoTime(); if (nanos > 0L) LockSupport.parkNanos(this, nanos); } else { LockSupport.park(this); } } }
/** * Testing threads thoroughly seems to be a lot of fun, I am not sure yet how to properly do that. * This test tests priority order and total sum received is equal to total sum sent. It also * simulates clients random hanging. */ @Test public void testReceiveData() { DecorateCheckOrderAndCountSumMarshaller sumAppender = new DecorateCheckOrderAndCountSumMarshaller(); QueueWorker worker = new QueueWorker(sumAppender); Thread workerThread = new Thread(worker); // run 20 clients, 10.000 items will be generated per client as defined in // src/test/properties/app.properties for (int i = 0; i < 20; i++) { Runnable clientHangSimulator = null; if (i % 5 == 0) { // simulate occasional client hang for four of the total twenty clients and check worker is // not biased. clientHangSimulator = () -> { if (ThreadLocalRandom.current().nextInt(1001) % 1000 == 0) { try { Thread.sleep(500L); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } }; } executorService.execute(new DataGeneratorTask(clientHangSimulator)); } workerThread.start(); try { barrier.await(); System.out.println("Fired test"); Thread.sleep(1000); executorService.shutdown(); // runs actually much faster, may need update if item count per client is drastically // increased in app.properties executorService.awaitTermination(2 * 60, TimeUnit.SECONDS); System.out.println("exe service awaited"); } catch (InterruptedException | BrokenBarrierException e) { e.printStackTrace(); } try { workerThread.join(Long.MAX_VALUE); } catch (InterruptedException e) { e.printStackTrace(); } Assert.assertEquals( "Sum generated does not match sum received", sumAppender.getSum(), AppContext.getInstance().getTotalGeneratedAmount()); Assert.assertFalse("Worker didn't exit successfully", workerThread.isAlive()); }
/** * Gets random value from given collection. * * @param c Input collection (no {@code null} and not emtpy). * @return Random value from the input collection. */ @SuppressWarnings("UnusedDeclaration") private static <T> T rand(Collection<? extends T> c) { if (c == null) throw new IllegalArgumentException(); int n = ThreadLocalRandom.current().nextInt(c.size()); int i = 0; for (T t : c) { if (i++ == n) return t; } throw new ConcurrentModificationException(); }
@Override public void run() { AppContext appContext = AppContext.getInstance(); try { barrier.await(); } catch (InterruptedException | BrokenBarrierException e) { e.printStackTrace(); Thread.currentThread().interrupt(); } for (int i = 0; i < AppContext.getInstance().getGeneratedItemCountPerConnection(); i++) { if (hangingSimulator != null) { hangingSimulator.run(); } lastTime += ThreadLocalRandom.current().nextInt(100); BigDecimal amount = new BigDecimal(ThreadLocalRandom.current().nextInt(100)); Item item = new Item(lastTime, amount); itemConsumer.accept(item); appContext.getClientRegistry().registerLastClientTime(id, lastTime); appContext.addToTotalGeneratedAmount(amount); } appContext.getClientRegistry().deregisterClient(id); System.out.println(MessageFormat.format("Client id {0} exited successfully.", id)); System.out.println("appContext = " + appContext.getWorkQueue().size()); }
/** * Adds an old value with a fixed timestamp to the reservoir. * * @param value the value to be added * @param timestamp the epoch timestamp of {@code value} in seconds */ public void update(long value, long timestamp) { rescaleIfNeeded(); lockForRegularUsage(); try { final double itemWeight = weight(timestamp - startTime); final WeightedSample sample = new WeightedSample(value, itemWeight); final double priority = itemWeight / ThreadLocalRandom.current().nextDouble(); final long newCount = count.incrementAndGet(); if (newCount <= size) { values.put(priority, sample); } else { Double first = values.firstKey(); if (first < priority && values.putIfAbsent(priority, sample) == null) { // ensure we always remove an item while (values.remove(first) == null) { first = values.firstKey(); } } } } finally { unlockForRegularUsage(); } }
/** * 生成min(包括)到max(包括)范围的随机数 * * @param min 随机数最小值 * @param max 随机数最大值 * @return min(包括)到max(包括)范围的随机数 */ public static long random(long min, long max) { return Math.round(ThreadLocalRandom.current().nextDouble() * (max - min) + min); }