@Test public void test_actor() throws IllegalArgumentException, InterruptedException, ExecutionException, TimeoutException, IOException { // Create a bucket final DataBucketBean bucket = createBucket("test_actor"); // Create an actor: final ActorRef handler = _db_actor_context .getActorSystem() .actorOf(Props.create(DataBucketChangeActor.class), "test_host"); _db_actor_context .getStreamingEnrichmentMessageBus() .subscribe(handler, ActorUtils.STREAMING_ENRICHMENT_EVENT_BUS); // create the inbox: final Inbox inbox = Inbox.create(_actor_context.getActorSystem()); // 1) A message that it will ignore because it's not for this actor { final BucketActionMessage.DeleteBucketActionMessage delete = new BucketActionMessage.DeleteBucketActionMessage( bucket, new HashSet<String>(Arrays.asList("a", "b"))); inbox.send(handler, delete); try { inbox.receive(Duration.create(1L, TimeUnit.SECONDS)); fail("should have timed out"); } catch (Exception e) { assertEquals(TimeoutException.class, e.getClass()); } } // 2a) Send an offer (ignored, no storm properties) { try { new File( _service_context.getGlobalProperties().local_yarn_config_dir() + File.separator + "storm.yaml") .delete(); } catch (Exception e) { // (don't care if fails, probably just first time through) } final BucketActionMessage.BucketActionOfferMessage broadcast = new BucketActionMessage.BucketActionOfferMessage(bucket); _db_actor_context .getStreamingEnrichmentMessageBus() .publish(new BucketActionEventBusWrapper(inbox.getRef(), broadcast)); final Object msg = inbox.receive(Duration.create(5L, TimeUnit.SECONDS)); assertEquals(BucketActionReplyMessage.BucketActionIgnoredMessage.class, msg.getClass()); } // 2b) Send an offer (accepted, create file) { new File( _service_context.getGlobalProperties().local_yarn_config_dir() + File.separator + "storm.yaml") .createNewFile(); final BucketActionMessage.BucketActionOfferMessage broadcast = new BucketActionMessage.BucketActionOfferMessage(bucket); _db_actor_context .getStreamingEnrichmentMessageBus() .publish(new BucketActionEventBusWrapper(inbox.getRef(), broadcast)); final Object msg = inbox.receive(Duration.create(5L, TimeUnit.SECONDS)); assertEquals(BucketActionReplyMessage.BucketActionWillAcceptMessage.class, msg.getClass()); } // 3) Send a message, currently just check it arrives back { final BucketActionMessage.UpdateBucketActionMessage update = new BucketActionMessage.UpdateBucketActionMessage( bucket, false, bucket, new HashSet<String>( Arrays.asList(_actor_context.getInformationService().getHostname()))); final CompletableFuture<BucketActionReplyMessage> reply4 = AkkaFutureUtils.efficientWrap( Patterns.ask(handler, update, 5000L), _db_actor_context.getActorSystem().dispatcher()); @SuppressWarnings("unused") final BucketActionReplyMessage msg4 = reply4.get(); } }
public void start() { final String hostname = _local_actor_context.getInformationService().getHostname(); final int MAX_ZK_ATTEMPTS = 6; if (!_core_distributed_services.waitForAkkaJoin( Optional.of(Duration.create(60L, TimeUnit.SECONDS)))) { _core_distributed_services.getAkkaSystem().terminate(); // (last ditch attempt to recover) throw new RuntimeException("Problem with CDS/Akka, try to terminate"); } //////////////////////////////////////////////////////////////// // HARVEST if (_service_config.harvest_enabled()) { // Create a bucket change actor and register it vs the local message bus final ActorRef handler = _local_actor_context .getActorSystem() .actorOf( Props.create( com.ikanow.aleph2.data_import_manager.harvest.actors .DataBucketHarvestChangeActor.class), hostname + ActorNameUtils.HARVEST_BUCKET_CHANGE_SUFFIX); _logger.info( ErrorUtils.get( "Attaching harvest DataBucketHarvestChangeActor {0} to bus {1}", handler, ActorUtils.BUCKET_ACTION_EVENT_BUS)); _db_actor_context .getBucketActionMessageBus() .subscribe(handler, ActorUtils.BUCKET_ACTION_EVENT_BUS); _logger.info( ErrorUtils.get("Registering {1} with {0}", ActorUtils.BUCKET_ACTION_ZOOKEEPER, hostname)); for (int i = 0; i <= MAX_ZK_ATTEMPTS; ++i) { try { _core_distributed_services .getCuratorFramework() .create() .creatingParentsIfNeeded() .withMode(CreateMode.EPHEMERAL) .forPath(ActorUtils.BUCKET_ACTION_ZOOKEEPER + "/" + hostname); break; } catch (Exception e) { _logger.warn( ErrorUtils.getLongForm( "Failed to register with Zookeeper: {0}, retrying={1}", e, i < MAX_ZK_ATTEMPTS)); try { Thread.sleep(10000L); } catch (Exception __) { } } } Runtime.getRuntime() .addShutdownHook( new Thread( Lambdas.wrap_runnable_u( () -> { _logger.info( "Shutting down IkanowV1SynchronizationModule subservice=v1_sync_service"); _core_distributed_services .getCuratorFramework() .delete() .deletingChildrenIfNeeded() .forPath(ActorUtils.BUCKET_ACTION_ZOOKEEPER + "/" + hostname); }))); _logger.info("Starting IkanowV1SynchronizationModule subservice=v1_sync_service"); } //////////////////////////////////////////////////////////////// // ANALYTICS if (_service_config.analytics_enabled()) { // Create a analytics bucket change actor and register it vs the local message bus final ActorRef analytics_handler = _local_actor_context .getActorSystem() .actorOf( Props.create( com.ikanow.aleph2.data_import_manager.analytics.actors .DataBucketAnalyticsChangeActor.class), hostname + ActorNameUtils.ANALYTICS_BUCKET_CHANGE_SUFFIX); _logger.info( ErrorUtils.get( "Attaching analytics DataBucketAnalyticsChangeActor {0} to bus {1}", analytics_handler, ActorUtils.BUCKET_ANALYTICS_EVENT_BUS)); _db_actor_context .getAnalyticsMessageBus() .subscribe(analytics_handler, ActorUtils.BUCKET_ANALYTICS_EVENT_BUS); // Create trigger supervisor and worker final Optional<ActorRef> trigger_supervisor = _core_distributed_services.createSingletonActor( hostname + ActorNameUtils.ANALYTICS_TRIGGER_SUPERVISOR_SUFFIX, ImmutableSet.<String>builder() .add( DistributedServicesPropertyBean.ApplicationNames.DataImportManager.toString()) .build(), Props.create( com.ikanow.aleph2.data_import_manager.analytics.actors .AnalyticsTriggerSupervisorActor.class)); if (!trigger_supervisor.isPresent()) { _logger.error( "Analytic trigger supervisor didn't start, unknown reason (wrong CDS application_name?)"); } final ActorRef trigger_worker = _local_actor_context .getActorSystem() .actorOf( Props.create( com.ikanow.aleph2.data_import_manager.analytics.actors .AnalyticsTriggerWorkerActor.class), hostname + ActorNameUtils.ANALYTICS_TRIGGER_WORKER_SUFFIX); _logger.info( ErrorUtils.get( "Attaching analytics AnalyticsTriggerWorkerActor {0} to bus {1}", trigger_worker, ActorUtils.ANALYTICS_TRIGGER_BUS)); _db_actor_context .getAnalyticsTriggerBus() .subscribe(trigger_worker, ActorUtils.ANALYTICS_TRIGGER_BUS); _logger.info( ErrorUtils.get( "Registering {1} with {0}", ActorUtils.BUCKET_ANALYTICS_ZOOKEEPER, hostname)); for (int i = 0; i <= MAX_ZK_ATTEMPTS; ++i) { try { _core_distributed_services .getCuratorFramework() .create() .creatingParentsIfNeeded() .withMode(CreateMode.EPHEMERAL) .forPath(ActorUtils.BUCKET_ANALYTICS_ZOOKEEPER + "/" + hostname); break; } catch (Exception e) { _logger.warn( ErrorUtils.getLongForm( "Failed to register with Zookeeper: {0}, retrying={1}", e, i < MAX_ZK_ATTEMPTS)); try { Thread.sleep(10000L); } catch (Exception __) { } } } Runtime.getRuntime() .addShutdownHook( new Thread( Lambdas.wrap_runnable_u( () -> { _logger.info( "Shutting down IkanowV1SynchronizationModule subservice=analytics"); _core_distributed_services .getCuratorFramework() .delete() .deletingChildrenIfNeeded() .forPath(ActorUtils.BUCKET_ANALYTICS_ZOOKEEPER + "/" + hostname); }))); _logger.info("Starting IkanowV1SynchronizationModule subservice=analytics"); } //////////////////////////////////////////////////////////////// // GOVERNANCE if (_service_config.governance_enabled()) { _core_distributed_services.createSingletonActor( hostname + ".governance.actors.DataAgeOutSupervisor", ImmutableSet.<String>builder() .add(DistributedServicesPropertyBean.ApplicationNames.DataImportManager.toString()) .build(), Props.create(DataAgeOutSupervisor.class)); _logger.info("Starting IkanowV1SynchronizationModule subservice=governance"); } for (; ; ) { try { Thread.sleep(10000); } catch (Exception e) { } } }