Ejemplo n.º 1
0
 @Override
 protected void doShutdown() throws Exception {
   if (workerPool != null) {
     workerPool.shutdown();
   }
   super.doShutdown();
 }
Ejemplo n.º 2
0
 /*
  * (non-Javadoc)
  * @see org.apache.camel.impl.DefaultConsumer#doStart()
  */
 @Override
 protected void doStart() throws Exception {
   super.doStart();
   if (LOG.isInfoEnabled()) {
     LOG.info("Kafka Producer Component started");
   }
 }
Ejemplo n.º 3
0
 @Override
 protected void doStart() throws Exception {
   super.doStart();
   if (!lazySessionCreation) {
     openConnection();
   }
 }
Ejemplo n.º 4
0
  @Override
  protected void doStart() throws Exception {
    super.doStart();
    LOG.debug("Starting Nats Producer");

    LOG.debug("Getting Nats Connection");
    connection = getConnection();
  }
Ejemplo n.º 5
0
 @Override
 protected void doStop() throws Exception {
   if (LOG.isDebugEnabled()) {
     LOG.debug("Stopping connector: {} at address: {}", connector, address);
   }
   closeConnection();
   super.doStop();
 }
Ejemplo n.º 6
0
  @Override
  protected void doStop() throws Exception {
    if (client != null) {
      client.close();
    }

    super.doStop();
  }
Ejemplo n.º 7
0
 @Override
 protected void doStop() throws Exception {
   super.doStop();
   if (scheduler != null) {
     getEndpoint().getCamelContext().getExecutorServiceManager().shutdown(scheduler);
     scheduler = null;
   }
   if (ostream != null) {
     IOHelper.close(ostream, "output stream", log);
     ostream = null;
   }
 }
Ejemplo n.º 8
0
  @Override
  protected void doStop() throws Exception {
    super.doStop();
    LOG.debug("Stopping Nats Producer");

    LOG.debug("Closing Nats Connection");
    if (connection != null && !connection.isClosed()) {
      if (getEndpoint().getNatsConfiguration().isFlushConnection()) {
        LOG.debug("Flushing Nats Connection");
        connection.flush(getEndpoint().getNatsConfiguration().getFlushTimeout());
      }
      connection.close();
    }
  }
Ejemplo n.º 9
0
  @Override
  protected void doStart() throws Exception {
    // need to remember auth as Hadoop will override that, which otherwise means the Auth is broken
    // afterwards
    Configuration auth = HdfsComponent.getJAASConfiguration();
    try {
      super.doStart();

      // setup hdfs if configured to do on startup
      if (getEndpoint().getConfig().isConnectOnStartup()) {
        ostream = setupHdfs(true);
      }

      SplitStrategy idleStrategy = null;
      for (SplitStrategy strategy : config.getSplitStrategies()) {
        if (strategy.type == SplitStrategyType.IDLE) {
          idleStrategy = strategy;
          break;
        }
      }
      if (idleStrategy != null) {
        scheduler =
            getEndpoint()
                .getCamelContext()
                .getExecutorServiceManager()
                .newSingleThreadScheduledExecutor(this, "HdfsIdleCheck");
        log.debug(
            "Creating IdleCheck task scheduled to run every {} millis",
            config.getCheckIdleInterval());
        scheduler.scheduleAtFixedRate(
            new IdleCheck(idleStrategy),
            config.getCheckIdleInterval(),
            config.getCheckIdleInterval(),
            TimeUnit.MILLISECONDS);
      }
    } finally {
      HdfsComponent.setJAASConfiguration(auth);
    }
  }
 @Override
 protected void doStop() throws Exception {
   ServiceHelper.stopService(locks);
   super.doStop();
 }
 @Override
 protected void doStart() throws Exception {
   super.doStart();
   ServiceHelper.startService(locks);
 }