コード例 #1
0
 /** The actor constructor - at some point all these things should be inserted by injection */
 public DataBucketChangeActor() {
   _context = DataImportActorContext.get();
   _core_distributed_services = _context.getDistributedServices();
   _actor_system = _core_distributed_services.getAkkaSystem();
   _management_db = _context.getServiceContext().getCoreManagementDbService().readOnlyVersion();
   _globals = _context.getGlobalProperties();
   _fs = _context.getServiceContext().getStorageService();
   _storm_controller = _context.getStormController();
 }
コード例 #2
0
  @Test
  public void test_actor()
      throws IllegalArgumentException, InterruptedException, ExecutionException, TimeoutException,
          IOException {

    // Create a bucket

    final DataBucketBean bucket = createBucket("test_actor");

    // Create an actor:

    final ActorRef handler =
        _db_actor_context
            .getActorSystem()
            .actorOf(Props.create(DataBucketChangeActor.class), "test_host");
    _db_actor_context
        .getStreamingEnrichmentMessageBus()
        .subscribe(handler, ActorUtils.STREAMING_ENRICHMENT_EVENT_BUS);

    // create the inbox:
    final Inbox inbox = Inbox.create(_actor_context.getActorSystem());

    // 1) A message that it will ignore because it's not for this actor
    {
      final BucketActionMessage.DeleteBucketActionMessage delete =
          new BucketActionMessage.DeleteBucketActionMessage(
              bucket, new HashSet<String>(Arrays.asList("a", "b")));

      inbox.send(handler, delete);
      try {
        inbox.receive(Duration.create(1L, TimeUnit.SECONDS));
        fail("should have timed out");
      } catch (Exception e) {
        assertEquals(TimeoutException.class, e.getClass());
      }
    }

    // 2a) Send an offer (ignored, no storm properties)
    {
      try {
        new File(
                _service_context.getGlobalProperties().local_yarn_config_dir()
                    + File.separator
                    + "storm.yaml")
            .delete();
      } catch (Exception e) {
        // (don't care if fails, probably just first time through)
      }

      final BucketActionMessage.BucketActionOfferMessage broadcast =
          new BucketActionMessage.BucketActionOfferMessage(bucket);

      _db_actor_context
          .getStreamingEnrichmentMessageBus()
          .publish(new BucketActionEventBusWrapper(inbox.getRef(), broadcast));

      final Object msg = inbox.receive(Duration.create(5L, TimeUnit.SECONDS));

      assertEquals(BucketActionReplyMessage.BucketActionIgnoredMessage.class, msg.getClass());
    }

    // 2b) Send an offer (accepted, create file)
    {
      new File(
              _service_context.getGlobalProperties().local_yarn_config_dir()
                  + File.separator
                  + "storm.yaml")
          .createNewFile();

      final BucketActionMessage.BucketActionOfferMessage broadcast =
          new BucketActionMessage.BucketActionOfferMessage(bucket);

      _db_actor_context
          .getStreamingEnrichmentMessageBus()
          .publish(new BucketActionEventBusWrapper(inbox.getRef(), broadcast));

      final Object msg = inbox.receive(Duration.create(5L, TimeUnit.SECONDS));

      assertEquals(BucketActionReplyMessage.BucketActionWillAcceptMessage.class, msg.getClass());
    }

    // 3) Send a message, currently just check it arrives back
    {
      final BucketActionMessage.UpdateBucketActionMessage update =
          new BucketActionMessage.UpdateBucketActionMessage(
              bucket,
              false,
              bucket,
              new HashSet<String>(
                  Arrays.asList(_actor_context.getInformationService().getHostname())));

      final CompletableFuture<BucketActionReplyMessage> reply4 =
          AkkaFutureUtils.efficientWrap(
              Patterns.ask(handler, update, 5000L),
              _db_actor_context.getActorSystem().dispatcher());
      @SuppressWarnings("unused")
      final BucketActionReplyMessage msg4 = reply4.get();
    }
  }
コード例 #3
0
  public void start() {
    final String hostname = _local_actor_context.getInformationService().getHostname();
    final int MAX_ZK_ATTEMPTS = 6;

    if (!_core_distributed_services.waitForAkkaJoin(
        Optional.of(Duration.create(60L, TimeUnit.SECONDS)))) {
      _core_distributed_services.getAkkaSystem().terminate(); // (last ditch attempt to recover)
      throw new RuntimeException("Problem with CDS/Akka, try to terminate");
    }

    ////////////////////////////////////////////////////////////////

    // HARVEST

    if (_service_config.harvest_enabled()) {
      // Create a bucket change actor and register it vs the local message bus
      final ActorRef handler =
          _local_actor_context
              .getActorSystem()
              .actorOf(
                  Props.create(
                      com.ikanow.aleph2.data_import_manager.harvest.actors
                          .DataBucketHarvestChangeActor.class),
                  hostname + ActorNameUtils.HARVEST_BUCKET_CHANGE_SUFFIX);

      _logger.info(
          ErrorUtils.get(
              "Attaching harvest DataBucketHarvestChangeActor {0} to bus {1}",
              handler, ActorUtils.BUCKET_ACTION_EVENT_BUS));

      _db_actor_context
          .getBucketActionMessageBus()
          .subscribe(handler, ActorUtils.BUCKET_ACTION_EVENT_BUS);

      _logger.info(
          ErrorUtils.get("Registering {1} with {0}", ActorUtils.BUCKET_ACTION_ZOOKEEPER, hostname));

      for (int i = 0; i <= MAX_ZK_ATTEMPTS; ++i) {
        try {
          _core_distributed_services
              .getCuratorFramework()
              .create()
              .creatingParentsIfNeeded()
              .withMode(CreateMode.EPHEMERAL)
              .forPath(ActorUtils.BUCKET_ACTION_ZOOKEEPER + "/" + hostname);
          break;
        } catch (Exception e) {
          _logger.warn(
              ErrorUtils.getLongForm(
                  "Failed to register with Zookeeper: {0}, retrying={1}", e, i < MAX_ZK_ATTEMPTS));
          try {
            Thread.sleep(10000L);
          } catch (Exception __) {
          }
        }
      }
      Runtime.getRuntime()
          .addShutdownHook(
              new Thread(
                  Lambdas.wrap_runnable_u(
                      () -> {
                        _logger.info(
                            "Shutting down IkanowV1SynchronizationModule subservice=v1_sync_service");
                        _core_distributed_services
                            .getCuratorFramework()
                            .delete()
                            .deletingChildrenIfNeeded()
                            .forPath(ActorUtils.BUCKET_ACTION_ZOOKEEPER + "/" + hostname);
                      })));
      _logger.info("Starting IkanowV1SynchronizationModule subservice=v1_sync_service");
    }

    ////////////////////////////////////////////////////////////////

    // ANALYTICS

    if (_service_config.analytics_enabled()) {
      // Create a analytics bucket change actor and register it vs the local message bus
      final ActorRef analytics_handler =
          _local_actor_context
              .getActorSystem()
              .actorOf(
                  Props.create(
                      com.ikanow.aleph2.data_import_manager.analytics.actors
                          .DataBucketAnalyticsChangeActor.class),
                  hostname + ActorNameUtils.ANALYTICS_BUCKET_CHANGE_SUFFIX);

      _logger.info(
          ErrorUtils.get(
              "Attaching analytics DataBucketAnalyticsChangeActor {0} to bus {1}",
              analytics_handler, ActorUtils.BUCKET_ANALYTICS_EVENT_BUS));

      _db_actor_context
          .getAnalyticsMessageBus()
          .subscribe(analytics_handler, ActorUtils.BUCKET_ANALYTICS_EVENT_BUS);

      // Create trigger supervisor and worker

      final Optional<ActorRef> trigger_supervisor =
          _core_distributed_services.createSingletonActor(
              hostname + ActorNameUtils.ANALYTICS_TRIGGER_SUPERVISOR_SUFFIX,
              ImmutableSet.<String>builder()
                  .add(
                      DistributedServicesPropertyBean.ApplicationNames.DataImportManager.toString())
                  .build(),
              Props.create(
                  com.ikanow.aleph2.data_import_manager.analytics.actors
                      .AnalyticsTriggerSupervisorActor.class));

      if (!trigger_supervisor.isPresent()) {
        _logger.error(
            "Analytic trigger supervisor didn't start, unknown reason (wrong CDS application_name?)");
      }

      final ActorRef trigger_worker =
          _local_actor_context
              .getActorSystem()
              .actorOf(
                  Props.create(
                      com.ikanow.aleph2.data_import_manager.analytics.actors
                          .AnalyticsTriggerWorkerActor.class),
                  hostname + ActorNameUtils.ANALYTICS_TRIGGER_WORKER_SUFFIX);

      _logger.info(
          ErrorUtils.get(
              "Attaching analytics AnalyticsTriggerWorkerActor {0} to bus {1}",
              trigger_worker, ActorUtils.ANALYTICS_TRIGGER_BUS));

      _db_actor_context
          .getAnalyticsTriggerBus()
          .subscribe(trigger_worker, ActorUtils.ANALYTICS_TRIGGER_BUS);

      _logger.info(
          ErrorUtils.get(
              "Registering {1} with {0}", ActorUtils.BUCKET_ANALYTICS_ZOOKEEPER, hostname));

      for (int i = 0; i <= MAX_ZK_ATTEMPTS; ++i) {
        try {
          _core_distributed_services
              .getCuratorFramework()
              .create()
              .creatingParentsIfNeeded()
              .withMode(CreateMode.EPHEMERAL)
              .forPath(ActorUtils.BUCKET_ANALYTICS_ZOOKEEPER + "/" + hostname);
          break;
        } catch (Exception e) {
          _logger.warn(
              ErrorUtils.getLongForm(
                  "Failed to register with Zookeeper: {0}, retrying={1}", e, i < MAX_ZK_ATTEMPTS));
          try {
            Thread.sleep(10000L);
          } catch (Exception __) {
          }
        }
      }
      Runtime.getRuntime()
          .addShutdownHook(
              new Thread(
                  Lambdas.wrap_runnable_u(
                      () -> {
                        _logger.info(
                            "Shutting down IkanowV1SynchronizationModule subservice=analytics");
                        _core_distributed_services
                            .getCuratorFramework()
                            .delete()
                            .deletingChildrenIfNeeded()
                            .forPath(ActorUtils.BUCKET_ANALYTICS_ZOOKEEPER + "/" + hostname);
                      })));
      _logger.info("Starting IkanowV1SynchronizationModule subservice=analytics");
    }

    ////////////////////////////////////////////////////////////////

    // GOVERNANCE

    if (_service_config.governance_enabled()) {
      _core_distributed_services.createSingletonActor(
          hostname + ".governance.actors.DataAgeOutSupervisor",
          ImmutableSet.<String>builder()
              .add(DistributedServicesPropertyBean.ApplicationNames.DataImportManager.toString())
              .build(),
          Props.create(DataAgeOutSupervisor.class));

      _logger.info("Starting IkanowV1SynchronizationModule subservice=governance");
    }
    for (; ; ) {
      try {
        Thread.sleep(10000);
      } catch (Exception e) {
      }
    }
  }
コード例 #4
0
  /* (non-Javadoc)
   * @see akka.actor.AbstractActor#receive()
   */
  @Override
  public PartialFunction<Object, BoxedUnit> receive() {
    return ReceiveBuilder.match(
            BucketActionMessage.class,
            m ->
                !m.handling_clients().isEmpty()
                    && !m.handling_clients()
                        .contains(_context.getInformationService().getHostname()),
            __ -> {}) // (do nothing if it's not for me)
        .match(
            BucketActionOfferMessage.class,
            m -> {
              _logger.info(
                  ErrorUtils.get(
                      "Actor {0} received message {1} from {2} bucket {3}",
                      this.self(),
                      m.getClass().getSimpleName(),
                      this.sender(),
                      m.bucket().full_name()));

              final ActorRef closing_sender = this.sender();
              final ActorRef closing_self = this.self();

              final String hostname = _context.getInformationService().getHostname();

              // (this isn't async so doesn't require any futures)

              final boolean accept_or_ignore =
                  new File(_globals.local_yarn_config_dir() + File.separator + "storm.yaml")
                      .exists();

              final BucketActionReplyMessage reply =
                  accept_or_ignore
                      ? new BucketActionReplyMessage.BucketActionWillAcceptMessage(hostname)
                      : new BucketActionReplyMessage.BucketActionIgnoredMessage(hostname);

              closing_sender.tell(reply, closing_self);
            })
        .match(
            BucketActionMessage.class,
            m -> {
              _logger.info(
                  ErrorUtils.get(
                      "Actor {0} received message {1} from {2} bucket={3}",
                      this.self(),
                      m.getClass().getSimpleName(),
                      this.sender(),
                      m.bucket().full_name()));

              final ActorRef closing_sender = this.sender();
              final ActorRef closing_self = this.self();

              final String hostname = _context.getInformationService().getHostname();

              // (cacheJars can't throw checked or unchecked in this thread, only from within
              // exceptions)
              cacheJars(
                      m.bucket(),
                      _management_db,
                      _globals,
                      _fs,
                      _context.getServiceContext(),
                      hostname,
                      m)
                  .thenComposeAsync(
                      err_or_map -> {
                        final StreamingEnrichmentContext e_context =
                            _context.getNewStreamingEnrichmentContext();

                        final Validation<BasicMessageBean, IEnrichmentStreamingTopology>
                            err_or_tech_module =
                                getStreamingTopology(m.bucket(), m, hostname, err_or_map);

                        final CompletableFuture<BucketActionReplyMessage> ret =
                            talkToStream(
                                _storm_controller,
                                m.bucket(),
                                m,
                                err_or_tech_module,
                                err_or_map,
                                hostname,
                                e_context,
                                _globals.local_yarn_config_dir(),
                                _globals.local_cached_jar_dir());
                        return ret;
                      })
                  .thenAccept(
                      reply -> { // (reply can contain an error or successful reply, they're the
                        // same bean type)
                        // Some information logging:
                        Patterns.match(reply)
                            .andAct()
                            .when(
                                BucketActionHandlerMessage.class,
                                msg ->
                                    _logger.info(
                                        ErrorUtils.get(
                                            "Standard reply to message={0}, bucket={1}, success={2}",
                                            m.getClass().getSimpleName(),
                                            m.bucket().full_name(),
                                            msg.reply().success())))
                            .when(
                                BucketActionReplyMessage.BucketActionWillAcceptMessage.class,
                                msg ->
                                    _logger.info(
                                        ErrorUtils.get(
                                            "Standard reply to message={0}, bucket={1}",
                                            m.getClass().getSimpleName(), m.bucket().full_name())))
                            .otherwise(
                                msg ->
                                    _logger.info(
                                        ErrorUtils.get(
                                            "Unusual reply to message={0}, type={2}, bucket={1}",
                                            m.getClass().getSimpleName(),
                                            m.bucket().full_name(),
                                            msg.getClass().getSimpleName())));

                        closing_sender.tell(reply, closing_self);
                      })
                  .exceptionally(
                      e -> { // another bit of error handling that shouldn't ever be called but is a
                        // useful backstop
                        // Some information logging:
                        _logger.warn(
                            "Unexpected error replying to '{0}': error = {1}, bucket={2}",
                            BeanTemplateUtils.toJson(m).toString(),
                            ErrorUtils.getLongForm("{0}", e),
                            m.bucket().full_name());

                        final BasicMessageBean error_bean =
                            SharedErrorUtils.buildErrorMessage(
                                hostname,
                                m,
                                ErrorUtils.getLongForm(
                                    StreamErrorUtils.STREAM_UNKNOWN_ERROR,
                                    e,
                                    m.bucket().full_name()));
                        closing_sender.tell(
                            new BucketActionHandlerMessage(hostname, error_bean), closing_self);
                        return null;
                      });
            })
        .build();
  }