@Test public void usePatternsAskPipe() { ActorSystem system = ActorSystem.create("MySystem"); ActorRef actorA = system.actorOf(new Props(MyUntypedActor.class)); ActorRef actorB = system.actorOf(new Props(MyUntypedActor.class)); ActorRef actorC = system.actorOf(new Props(MyUntypedActor.class)); // #ask-pipe final Timeout t = new Timeout(Duration.create(5, TimeUnit.SECONDS)); final ArrayList<Future<Object>> futures = new ArrayList<Future<Object>>(); futures.add(ask(actorA, "request", 1000)); // using 1000ms timeout futures.add(ask(actorB, "another request", t)); // using timeout from above final Future<Iterable<Object>> aggregate = Futures.sequence(futures, system.dispatcher()); final Future<Result> transformed = aggregate.map( new Mapper<Iterable<Object>, Result>() { public Result apply(Iterable<Object> coll) { final Iterator<Object> it = coll.iterator(); final String s = (String) it.next(); final int x = (Integer) it.next(); return new Result(x, s); } }, system.dispatcher()); pipe(transformed, system.dispatcher()).to(actorC); // #ask-pipe system.shutdown(); }
@Override public F.Promise<Void> store(Path path, String key) { Promise<Void> promise = Futures.promise(); TransferManager transferManager = new TransferManager(credentials); Upload upload = transferManager.upload(bucketName, key, path.toFile()); upload.addProgressListener( (ProgressListener) progressEvent -> { if (progressEvent.getEventType().isTransferEvent()) { if (progressEvent .getEventType() .equals(ProgressEventType.TRANSFER_COMPLETED_EVENT)) { transferManager.shutdownNow(); promise.success(null); } else if (progressEvent .getEventType() .equals(ProgressEventType.TRANSFER_FAILED_EVENT)) { transferManager.shutdownNow(); promise.failure(new Exception("Upload failed")); } } }); return F.Promise.wrap(promise.future()); }
/** * Trains the network on this mini batch and returns a list of futures for each training job * * @param trainingBatch the trees to iterate on */ public List<Future<Object>> fitAsync(final List<Tree> trainingBatch) { int count = 0; List<Future<Object>> futureBatch = new ArrayList<>(); for (final Tree t : trainingBatch) { log.info("Working mini batch " + count++); futureBatch.add( Futures.future( new Callable<Object>() { @Override public Object call() throws Exception { forwardPropagateTree(t); try { INDArray params = getParameters(); INDArray gradient = getValueGradient(trainingBatch); if (params.length() != gradient.length()) throw new IllegalStateException("Params not equal to gradient!"); setParams(params.subi(gradient)); } catch (NegativeArraySizeException e) { log.warn( "Couldnt compute parameters due to negative array size...for trees " + t); } return null; } }, rnTnActorSystem.dispatcher())); } return futureBatch; }
@Override protected Future<MetadataItem> next() { log.debug("next"); Entry<String, byte[]> entry = t.next(); return Futures.successful(new MetadataItem(entry.getKey(), entry.getValue())); }
@Test public void usePipeWithActorSelection() throws Exception { TestProbe probe = new TestProbe(system); ActorSelection selection = system.actorSelection(probe.ref().path()); pipe(Futures.successful("hi!"), system.dispatcher()).to(selection); probe.expectMsg("hi!"); }
/** * Entry point. All recent finished azkaban jobs' lineage. Will write to database stagging table * * @param timeFrame in minutes * @param endTimeStamp in millisecond * @throws Exception */ public void run(int timeFrame, long endTimeStamp) throws Exception { // get recent finished job AzJobChecker azJobChecker = new AzJobChecker(prop); List<AzkabanJobExecRecord> jobExecList = azJobChecker.getRecentFinishedJobFromFlow(timeFrame, endTimeStamp); azJobChecker.close(); logger.info("Total number of azkaban jobs : {}", jobExecList.size()); ActorSystem actorSystem = ActorSystem.create("LineageExtractor"); int numOfActor = Integer.valueOf(prop.getProperty(Constant.LINEAGE_ACTOR_NUM, "50")); ActorRef lineageExtractorActor = actorSystem.actorOf( Props.create(AzLineageExtractorActor.class) .withRouter(new SmallestMailboxRouter(numOfActor)), "lineageExtractorActor"); // initialize // AzkabanServiceCommunicator asc = new AzkabanServiceCommunicator(prop); HadoopNameNodeExtractor hnne = new HadoopNameNodeExtractor(prop); AzDbCommunicator adc = new AzDbCommunicator(prop); String wherehowsUrl = prop.getProperty(Constant.WH_DB_URL_KEY); String wherehowsUserName = prop.getProperty(Constant.WH_DB_USERNAME_KEY); String wherehowsPassWord = prop.getProperty(Constant.WH_DB_PASSWORD_KEY); String connUrl = wherehowsUrl + "?" + "user="******"&password="******"stg_job_execution_data_lineage"); AzLogParser.initialize(conn); PathAnalyzer.initialize(conn); int timeout = 30; // default 30 minutes for one job if (prop.containsKey(Constant.LINEAGE_ACTOR_TIMEOUT_KEY)) timeout = Integer.valueOf(prop.getProperty(Constant.LINEAGE_ACTOR_TIMEOUT_KEY)); List<Future<Object>> result = new ArrayList<>(); for (AzkabanJobExecRecord aje : jobExecList) { AzExecMessage message = new AzExecMessage(aje, prop); message.asc = null; message.hnne = hnne; message.adc = adc; message.databaseWriter = databaseWriter; message.connection = conn; Timeout t = new Timeout(timeout, TimeUnit.SECONDS); Future<Object> fut = Patterns.ask(lineageExtractorActor, message, t); result.add(fut); } // join all threads Future<Iterable<Object>> seq = Futures.sequence(result, actorSystem.dispatcher()); try { Await.result(seq, Duration.create(timeout + " seconds")); } catch (TimeoutException exception) { exception.printStackTrace(); } adc.close(); hnne.close(); databaseWriter.close(); logger.info("All job finished lineage collecting!"); }
public static void main(String[] args) throws Exception { final ActorSystem system = system(); final ExecutionContextExecutor dispatcher = system.dispatcher(); Future<Long> fr = Futures.future(task, dispatcher); Future<Long> sc = Futures.future(task, dispatcher); Future<Long> th = Futures.future(task, dispatcher); Future<Long> fo = Futures.future(task, dispatcher); fr.onComplete(complete, dispatcher); sc.onComplete(complete, dispatcher); th.onComplete(complete, dispatcher); fo.onComplete(complete, dispatcher); Future<Iterable<Long>> sec = Futures.sequence(Arrays.asList(fr, sc, th, fo), dispatcher); Patterns.pipe(sec, dispatcher) .to(system.actorOf(Props.create(F.class))) .future() .ready(Duration.create(20, TimeUnit.SECONDS), null); Await.ready(system.terminate(), Duration.Inf()); }
public Future<Instance> startInstanceAsync(AWSCredentials credentials) { Future<Instance> f = circuitBreaker.callWithCircuitBreaker( () -> Futures.future(() -> startInstance(credentials), executionContext)); PartialFunction<Throwable, Future<Instance>> recovery = new PFBuilder<Throwable, Future<Instance>>() .match( AmazonClientException.class, ex -> ex.isRetryable(), ex -> startInstanceAsync(credentials)) .build(); return f.recoverWith(recovery, executionContext); }
private Future<Iterable<Object>> invokeCohorts(Object message) { List<Future<Object>> futureList = Lists.newArrayListWithCapacity(cohorts.size()); for (ActorSelection cohort : cohorts) { if (LOG.isDebugEnabled()) { LOG.debug("Tx {}: Sending {} to cohort {}", transactionId, message, cohort); } futureList.add( actorContext.executeOperationAsync( cohort, message, actorContext.getTransactionCommitOperationTimeout())); } return Futures.sequence(futureList, actorContext.getClientDispatcher()); }
public Future<TerminateInstancesResult> terminateInstancesAsync( AmazonEC2Client client, Instance... instances) { List<String> ids = Arrays.stream(instances).map(i -> i.getInstanceId()).collect(Collectors.toList()); TerminateInstancesRequest request = new TerminateInstancesRequest(ids); Future<TerminateInstancesResult> f = circuitBreaker.callWithCircuitBreaker( () -> Futures.future(() -> client.terminateInstances(request), executionContext)); PartialFunction<Throwable, Future<TerminateInstancesResult>> recovery = new PFBuilder<Throwable, Future<TerminateInstancesResult>>() .match( AmazonClientException.class, ex -> ex.isRetryable(), ex -> terminateInstancesAsync(client, instances)) .build(); return f.recoverWith(recovery, executionContext); }
public Future<RunInstancesResult> runInstancesAsync( RunInstancesRequest request, AmazonEC2Async client) { Promise<RunInstancesResult> promise = Futures.promise(); client.runInstancesAsync( request, new AsyncHandler<RunInstancesRequest, RunInstancesResult>() { @Override public void onSuccess(RunInstancesRequest request, RunInstancesResult result) { promise.success(result); } @Override public void onError(Exception exception) { promise.failure(exception); } }); return promise.future(); }
private Future<Void> buildCohortList() { Future<Iterable<ActorSelection>> combinedFutures = Futures.sequence(cohortFutures, actorContext.getClientDispatcher()); return combinedFutures.transform( new AbstractFunction1<Iterable<ActorSelection>, Void>() { @Override public Void apply(Iterable<ActorSelection> actorSelections) { cohorts = Lists.newArrayList(actorSelections); if (LOG.isDebugEnabled()) { LOG.debug("Tx {} successfully built cohort path list: {}", transactionId, cohorts); } return null; } }, TransactionReadyReplyMapper.SAME_FAILURE_TRANSFORMER, actorContext.getClientDispatcher()); }
@Override public Tuple2<GraphStageLogic, CompletionStage<A>> createLogicAndMaterializedValue( Attributes inheritedAttributes) { Promise<A> promise = Futures.promise(); GraphStageLogic logic = new GraphStageLogic(shape) { { setHandler( in, new AbstractInHandler() { @Override public void onPush() { A elem = grab(in); promise.success(elem); push(out, elem); // replace handler with one just forwarding setHandler( in, new AbstractInHandler() { @Override public void onPush() { push(out, grab(in)); } }); } }); setHandler( out, new AbstractOutHandler() { @Override public void onPull() { pull(in); } }); } }; return new Tuple2(logic, promise.future()); }
@Override public F.Promise<Void> delete(String key) { Promise<Void> promise = Futures.promise(); AmazonS3 amazonS3 = new AmazonS3Client(credentials); DeleteObjectRequest request = new DeleteObjectRequest(bucketName, key); request.withGeneralProgressListener( progressEvent -> { if (progressEvent.getEventType().isTransferEvent()) { if (progressEvent.getEventType().equals(ProgressEventType.TRANSFER_COMPLETED_EVENT)) { promise.success(null); } else if (progressEvent .getEventType() .equals(ProgressEventType.TRANSFER_FAILED_EVENT)) { promise.failure(new Exception("Delete failed")); } } }); amazonS3.deleteObject(request); return F.Promise.wrap(promise.future()); }
public EmailSenderActor(EmailSender emailSender) { if (emailSender == null) { throw new IllegalArgumentException("emailSender must be defined."); } receive( ReceiveBuilder.match( EmailSenderMsg.class, msg -> { try { emailSender.send( msg.to, msg.cc, msg.ci, msg.object, msg.content, msg.htmlContent, msg.attachments); } catch (RuntimeException e) { LOGGER.error( "Unable to sent email to '{}', with subject '{}'", Arrays.toString(msg.to.toArray()), msg.object, e); sender().tell(Futures.failed(e), self()); } if (LOGGER.isDebugEnabled()) { LOGGER.debug( "Email sent to '{}' with subject '{}'", Arrays.toString(msg.to.toArray()), msg.object); } getContext().stop(self()); }) .matchAny(this::unhandled) .build()); }
public Future<Integer> square(int i) { return Futures.successful(i * i); }
public static void main(String[] args) throws Exception { ApplicationContext context = SpringApplication.run(App.class, args); ActorSystem system = context.getBean(ActorSystem.class); final LoggingAdapter log = Logging.getLogger(system, App.class.getSimpleName()); log.info("Starting up"); SpringExtension ext = context.getBean(SpringExtension.class); // Use the Spring Extension to create props for a named actor bean ActorRef calculator = system.actorOf(ext.props("calculator").withMailbox("akka.priority-mailbox")); /* * Create a completion service instance to await all futures */ final ExecutionContext ec = system.dispatcher(); Timeout timeout = new Timeout(120, TimeUnit.SECONDS); List<Long> ids = new ArrayList<>(); ArrayList<Future<Object>> futures = new ArrayList<Future<Object>>(); for (int i = 1; i <= 10; i++) { Future<Object> future = Patterns.ask(calculator, new CompilationRequest(i + " + 5"), timeout); future.onSuccess( new OnSuccess<Object>() { public void onSuccess(Object result) { if (result instanceof CompilationResult) { synchronized (ids) { log.debug("Compilation result {} ", result.toString()); ids.add(((CompilationResult) result).getExpressionId()); } } else { log.info("Compilation result is unknown type {} ", result.toString()); } } }, ec); futures.add(future); } Future<Iterable<Object>> seq = Futures.sequence(futures, ec); Await.result(seq, Duration.create(30, SECONDS)); log.info("======================================================"); log.info("Done waiting for compilations...{} ids", ids.size()); log.info("======================================================"); futures.clear(); long start = System.nanoTime(); List<Double> results = new ArrayList<>(); Long count = 1_000_000L; for (long i = 1; i <= count; i++) { Future<Object> future = Patterns.ask( calculator, new CalculationRequest(i, ids.get((int) (i % ids.size())), null), timeout); future.onSuccess( new OnSuccess<Object>() { public void onSuccess(Object result) { if (result instanceof CalculationResult) { log.debug("Calculation result {} ", result.toString()); // synchronized(results) // { // results.add((Double) ((CalculationResult) result).getValue()); // } } else { log.info("Calculation result is unknown type {} ", result.toString()); } } }, ec); futures.add(future); } seq = Futures.sequence(futures, ec); Await.result(seq, Duration.create(600, SECONDS)); calculator.tell(PoisonPill.getInstance(), null); while (!calculator.isTerminated()) { Thread.sleep(100); } long end = System.nanoTime(); // //int count = context.getBean(JdbcTemplate.class).queryForObject("SELECT COUNT(*) FROM // tasks", Integer.class); Long elapsed = TimeUnit.MILLISECONDS.convert((end - start), TimeUnit.NANOSECONDS); Double tps = count.doubleValue() / (elapsed.doubleValue() / Double.parseDouble("1000")); log.info("{} calculations in {}ms {}tps", count, elapsed, tps); log.info("Shutting down ------------------------------> {}", results.size()); Thread.sleep(10000); system.shutdown(); system.awaitTermination(); }
@Test public void usePipe() throws Exception { TestProbe probe = new TestProbe(system); pipe(Futures.successful("ho!"), system.dispatcher()).to(probe.ref()); probe.expectMsg("ho!"); }
@Override public Future<Integer> square(int i) { return Futures.successful(cal(i)); }
public static void main(String[] args) { Outlet<Integer> outlet = null; Outlet<Integer> outlet1 = null; Outlet<Integer> outlet2 = null; Inlet<Integer> inlet = null; Inlet<Integer> inlet1 = null; Inlet<Integer> inlet2 = null; Flow<Integer, Integer, BoxedUnit> flow = Flow.of(Integer.class); Flow<Integer, Integer, BoxedUnit> flow1 = Flow.of(Integer.class); Flow<Integer, Integer, BoxedUnit> flow2 = Flow.of(Integer.class); Promise<Option<Integer>> promise = null; { Graph<SourceShape<Integer>, BoxedUnit> graphSource = null; Graph<SinkShape<Integer>, BoxedUnit> graphSink = null; Graph<FlowShape<Integer, Integer>, BoxedUnit> graphFlow = null; // #flow-wrap Source<Integer, BoxedUnit> source = Source.fromGraph(graphSource); Sink<Integer, BoxedUnit> sink = Sink.fromGraph(graphSink); Flow<Integer, Integer, BoxedUnit> aflow = Flow.fromGraph(graphFlow); Flow.fromSinkAndSource(Sink.<Integer>head(), Source.single(0)); Flow.fromSinkAndSourceMat(Sink.<Integer>head(), Source.single(0), Keep.left()); // #flow-wrap Graph<BidiShape<Integer, Integer, Integer, Integer>, BoxedUnit> bidiGraph = null; // #bidi-wrap BidiFlow<Integer, Integer, Integer, Integer, BoxedUnit> bidiFlow = BidiFlow.fromGraph(bidiGraph); BidiFlow.fromFlows(flow1, flow2); BidiFlow.fromFlowsMat(flow1, flow2, Keep.both()); // #bidi-wrap } { // #graph-create GraphDSL.create( builder -> { // ... return ClosedShape.getInstance(); }); GraphDSL.create( builder -> { // ... return new FlowShape<>(inlet, outlet); }); // #graph-create } { // #graph-create-2 GraphDSL.create( builder -> { // ... return SourceShape.of(outlet); }); GraphDSL.create( builder -> { // ... return SinkShape.of(inlet); }); GraphDSL.create( builder -> { // ... return FlowShape.of(inlet, outlet); }); GraphDSL.create( builder -> { // ... return BidiShape.of(inlet1, outlet1, inlet2, outlet2); }); // #graph-create-2 } { // #graph-builder GraphDSL.create( builder -> { builder.from(outlet).toInlet(inlet); builder.from(outlet).via(builder.add(flow)).toInlet(inlet); builder.from(builder.add(Source.single(0))).to(builder.add(Sink.head())); // ... return ClosedShape.getInstance(); }); // #graph-builder } // #source-creators Source<Integer, Promise<Option<Integer>>> src = Source.<Integer>maybe(); // Complete the promise with an empty option to emulate the old lazyEmpty promise.trySuccess(scala.Option.empty()); final Source<String, Cancellable> ticks = Source.tick( FiniteDuration.create(0, TimeUnit.MILLISECONDS), FiniteDuration.create(200, TimeUnit.MILLISECONDS), "tick"); final Source<Integer, BoxedUnit> pubSource = Source.fromPublisher(TestPublisher.<Integer>manualProbe(true, sys)); final Source<Integer, BoxedUnit> futSource = Source.fromFuture(Futures.successful(42)); final Source<Integer, Subscriber<Integer>> subSource = Source.<Integer>asSubscriber(); // #source-creators // #sink-creators final Sink<Integer, BoxedUnit> subSink = Sink.fromSubscriber(TestSubscriber.<Integer>manualProbe(sys)); // #sink-creators // #sink-as-publisher final Sink<Integer, Publisher<Integer>> pubSink = Sink.<Integer>asPublisher(false); final Sink<Integer, Publisher<Integer>> pubSinkFanout = Sink.<Integer>asPublisher(true); // #sink-as-publisher // #empty-flow Flow<Integer, Integer, BoxedUnit> emptyFlow = Flow.<Integer>create(); // or Flow<Integer, Integer, BoxedUnit> emptyFlow2 = Flow.of(Integer.class); // #empty-flow // #flatMapConcat Flow.<Source<Integer, BoxedUnit>>create() .<Integer, BoxedUnit>flatMapConcat( new Function<Source<Integer, BoxedUnit>, Source<Integer, BoxedUnit>>() { @Override public Source<Integer, BoxedUnit> apply(Source<Integer, BoxedUnit> param) throws Exception { return param; } }); // #flatMapConcat Uri uri = null; // #raw-query final akka.japi.Option<String> theRawQueryString = uri.rawQueryString(); // #raw-query // #query-param final akka.japi.Option<String> aQueryParam = uri.query().get("a"); // #query-param // #file-source-sink final Source<ByteString, Future<Long>> fileSrc = FileIO.fromFile(new File(".")); final Source<ByteString, Future<Long>> otherFileSrc = FileIO.fromFile(new File("."), 1024); final Sink<ByteString, Future<Long>> fileSink = FileIO.toFile(new File(".")); // #file-source-sink // #input-output-stream-source-sink final Source<ByteString, Future<java.lang.Long>> inputStreamSrc = StreamConverters.fromInputStream( new Creator<InputStream>() { public InputStream create() { return new SomeInputStream(); } }); final Source<ByteString, Future<java.lang.Long>> otherInputStreamSrc = StreamConverters.fromInputStream( new Creator<InputStream>() { public InputStream create() { return new SomeInputStream(); } }, 1024); final Sink<ByteString, Future<java.lang.Long>> outputStreamSink = StreamConverters.fromOutputStream( new Creator<OutputStream>() { public OutputStream create() { return new SomeOutputStream(); } }); // #input-output-stream-source-sink // #output-input-stream-source-sink final FiniteDuration timeout = FiniteDuration.Zero(); final Source<ByteString, OutputStream> outputStreamSrc = StreamConverters.asOutputStream(); final Source<ByteString, OutputStream> otherOutputStreamSrc = StreamConverters.asOutputStream(timeout); final Sink<ByteString, InputStream> someInputStreamSink = StreamConverters.asInputStream(); final Sink<ByteString, InputStream> someOtherInputStreamSink = StreamConverters.asInputStream(timeout); // #output-input-stream-source-sink }