示例#1
0
  @Test
  public void shouldIndexFiveEventsOverThreeBatches() throws Exception {
    parameters.put(BATCH_SIZE, "2");
    Configurables.configure(fixture, new Context(parameters));
    Channel channel = bindAndStartChannel(fixture);

    int numberOfEvents = 5;
    Event[] events = new Event[numberOfEvents];

    Transaction tx = channel.getTransaction();
    tx.begin();
    for (int i = 0; i < numberOfEvents; i++) {
      String body = "event #" + i + " of " + numberOfEvents;
      Event event = EventBuilder.withBody(body.getBytes());
      events[i] = event;
      channel.put(event);
    }
    tx.commit();
    tx.close();

    int count = 0;
    Status status = Status.READY;
    while (status != Status.BACKOFF) {
      count++;
      status = fixture.process();
    }
    fixture.stop();

    assertEquals(3, count);

    client.admin().indices().refresh(Requests.refreshRequest(timestampedIndexName)).actionGet();
    assertMatchAllQuery(numberOfEvents, events);
    assertBodyQuery(5, events);
  }
  @Test(groups = "dev")
  public void autoWrapTest() throws EventDeliveryException {
    ctx.put(MongoSink.AUTO_WRAP, Boolean.toString(true));
    ctx.put(MongoSink.DB_NAME, "test_wrap");

    MongoSink sink = new MongoSink();
    Configurables.configure(sink, ctx);

    sink.setChannel(channel);
    sink.start();

    Transaction tx = channel.getTransaction();
    tx.begin();
    String msg =
        "2012/10/26 11:23:08 [error] 7289#0: *6430831 open() \"/usr/local/nginx/html/50x.html\" failed (2: No such file or directory), client: 10.160.105.161, server: sg15.redatoms.com, request: \"POST /mojo/ajax/embed HTTP/1.0\", upstream: \"fastcgi://unix:/tmp/php-fpm.sock:\", host: \"sg15.redatoms.com\", referrer: \"http://sg15.redatoms.com/mojo/mobile/package\"";

    Event e = EventBuilder.withBody(msg.getBytes());
    channel.put(e);
    tx.commit();
    tx.close();

    sink.process();
    sink.stop();

    DB db = mongo.getDB("test_wrap");
    DBCollection collection = db.getCollection("test_log");
    DBCursor cursor = collection.find(new BasicDBObject(MongoSink.DEFAULT_WRAP_FIELD, msg));
    assertTrue(cursor.hasNext());
    DBObject dbObject = cursor.next();
    assertNotNull(dbObject);
    assertEquals(dbObject.get(MongoSink.DEFAULT_WRAP_FIELD), msg);
    mongo.dropDatabase("test_wrap");
  }
示例#3
0
  @Test
  public void shouldAllowCustomElasticSearchIndexRequestBuilderFactory() throws Exception {
    parameters.put(SERIALIZER, CustomElasticSearchIndexRequestBuilderFactory.class.getName());

    fixture.configure(new Context(parameters));

    Channel channel = bindAndStartChannel(fixture);
    Transaction tx = channel.getTransaction();
    tx.begin();
    String body = "{ foo: \"bar\" }";
    Event event = EventBuilder.withBody(body.getBytes());
    channel.put(event);
    tx.commit();
    tx.close();

    fixture.process();
    fixture.stop();

    assertEquals(
        fixture.getIndexName() + "-05_17_36_789",
        CustomElasticSearchIndexRequestBuilderFactory.actualIndexName);
    assertEquals(
        fixture.getIndexType(), CustomElasticSearchIndexRequestBuilderFactory.actualIndexType);
    assertArrayEquals(
        event.getBody(), CustomElasticSearchIndexRequestBuilderFactory.actualEventBody);
    assertTrue(CustomElasticSearchIndexRequestBuilderFactory.hasContext);
  }
  @Test(groups = "dev")
  public void sinkSingleModelTest() throws EventDeliveryException {
    ctx.put(MongoSink.MODEL, MongoSink.CollectionModel.single.name());

    MongoSink sink = new MongoSink();
    Configurables.configure(sink, ctx);

    sink.setChannel(channel);
    sink.start();

    Transaction tx = channel.getTransaction();
    tx.begin();
    JSONObject msg = new JSONObject();
    msg.put("name", "test");
    msg.put("age", 11);
    msg.put("birthday", new Date().getTime());

    Event e = EventBuilder.withBody(msg.toJSONString().getBytes());
    channel.put(e);
    tx.commit();
    tx.close();

    sink.process();
    sink.stop();

    DB db = mongo.getDB("test_events");
    DBCollection collection = db.getCollection("test_log");
    DBCursor cursor = collection.find(new BasicDBObject(msg));
    assertTrue(cursor.hasNext());
    DBObject dbObject = cursor.next();
    assertNotNull(dbObject);
    assertEquals(dbObject.get("name"), msg.get("name"));
    assertEquals(dbObject.get("age"), msg.get("age"));
    assertEquals(dbObject.get("birthday"), msg.get("birthday"));
  }
示例#5
0
  @Test
  public void shouldIndexFiveEvents() throws Exception {
    // Make it so we only need to call process once
    parameters.put(BATCH_SIZE, "5");
    Configurables.configure(fixture, new Context(parameters));
    Channel channel = bindAndStartChannel(fixture);

    int numberOfEvents = 5;
    Event[] events = new Event[numberOfEvents];

    Transaction tx = channel.getTransaction();
    tx.begin();
    for (int i = 0; i < numberOfEvents; i++) {
      String body = "event #" + i + " of " + numberOfEvents;
      Event event = EventBuilder.withBody(body.getBytes());
      events[i] = event;
      channel.put(event);
    }
    tx.commit();
    tx.close();

    fixture.process();
    fixture.stop();
    client.admin().indices().refresh(Requests.refreshRequest(timestampedIndexName)).actionGet();

    assertMatchAllQuery(numberOfEvents, events);
    assertBodyQuery(5, events);
  }
  @Test(groups = "dev")
  public void timestampExistingFieldTest() throws EventDeliveryException, ParseException {
    ctx.put(MongoSink.MODEL, MongoSink.CollectionModel.dynamic.name());
    String tsField = "createdOn";
    ctx.put(MongoSink.TIMESTAMP_FIELD, tsField);
    MongoSink sink = new MongoSink();
    Configurables.configure(sink, ctx);

    sink.setChannel(channel);
    sink.start();

    JSONObject msg = new JSONObject();
    msg.put("age", 11);
    msg.put("birthday", new Date().getTime());
    String dateText = "2013-02-19T14:20:53+08:00";
    msg.put(tsField, dateText);

    Transaction tx;

    for (int i = 0; i < 10; i++) {
      tx = channel.getTransaction();
      tx.begin();
      msg.put("name", "test" + i);
      JSONObject header = new JSONObject();
      header.put(MongoSink.COLLECTION, "my_events");
      header.put(MongoSink.DB_NAME, "dynamic_db");

      Event e = EventBuilder.withBody(msg.toJSONString().getBytes(), header);
      channel.put(e);
      tx.commit();
      tx.close();
    }
    sink.process();
    sink.stop();

    msg.put(tsField, MongoSink.dateTimeFormatter.parseDateTime(dateText).toDate());
    for (int i = 0; i < 10; i++) {
      msg.put("name", "test" + i);

      System.out.println("i = " + i);

      DB db = mongo.getDB("dynamic_db");
      DBCollection collection = db.getCollection("my_events");
      DBCursor cursor = collection.find(new BasicDBObject(msg));
      assertTrue(cursor.hasNext());
      DBObject dbObject = cursor.next();
      assertNotNull(dbObject);
      assertEquals(dbObject.get("name"), msg.get("name"));
      assertEquals(dbObject.get("age"), msg.get("age"));
      assertEquals(dbObject.get("birthday"), msg.get("birthday"));
      assertTrue(dbObject.get(tsField) instanceof Date);
      System.out.println("ts = " + dbObject.get(tsField));
    }
  }
  @Test(groups = "dev")
  public void sinkDynamicDbTest() throws EventDeliveryException {
    ctx.put(MongoSink.MODEL, MongoSink.CollectionModel.dynamic.name());
    MongoSink sink = new MongoSink();
    Configurables.configure(sink, ctx);

    sink.setChannel(channel);
    sink.start();

    JSONObject msg = new JSONObject();
    msg.put("age", 11);
    msg.put("birthday", new Date().getTime());

    Transaction tx;

    for (int i = 0; i < 10; i++) {
      tx = channel.getTransaction();
      tx.begin();
      msg.put("name", "test" + i);
      JSONObject header = new JSONObject();
      header.put(MongoSink.COLLECTION, "my_events");
      header.put(MongoSink.DB_NAME, "dynamic_db");

      Event e = EventBuilder.withBody(msg.toJSONString().getBytes(), header);
      channel.put(e);
      tx.commit();
      tx.close();
    }
    sink.process();
    sink.stop();

    for (int i = 0; i < 10; i++) {
      msg.put("name", "test" + i);

      System.out.println("i = " + i);

      DB db = mongo.getDB("dynamic_db");
      DBCollection collection = db.getCollection("my_events");
      DBCursor cursor = collection.find(new BasicDBObject(msg));
      assertTrue(cursor.hasNext());
      DBObject dbObject = cursor.next();
      assertNotNull(dbObject);
      assertEquals(dbObject.get("name"), msg.get("name"));
      assertEquals(dbObject.get("age"), msg.get("age"));
      assertEquals(dbObject.get("birthday"), msg.get("birthday"));
    }
  }
  @Test
  public void testRollback() throws Exception {
    // put an item and commit
    putEvents(channel, "rollback", 1, 50);

    Transaction transaction;
    // put an item we will rollback
    transaction = channel.getTransaction();
    transaction.begin();
    channel.put(EventBuilder.withBody("this is going to be rolledback".getBytes(Charsets.UTF_8)));
    transaction.rollback();
    transaction.close();

    // simulate crash
    channel.stop();
    channel = createFileChannel();

    // get the item which was not rolled back
    transaction = channel.getTransaction();
    transaction.begin();
    Event event = channel.take();
    transaction.commit();
    transaction.close();
    Assert.assertNotNull(event);
    Assert.assertEquals("rollback-0-0", new String(event.getBody(), Charsets.UTF_8));
  }
示例#9
0
  @Test
  public void shouldIndexOneEvent() throws Exception {
    Configurables.configure(fixture, new Context(parameters));
    Channel channel = bindAndStartChannel(fixture);

    Transaction tx = channel.getTransaction();
    tx.begin();
    Event event = EventBuilder.withBody("event #1 or 1".getBytes());
    channel.put(event);
    tx.commit();
    tx.close();

    fixture.process();
    fixture.stop();
    client.admin().indices().refresh(Requests.refreshRequest(timestampedIndexName)).actionGet();

    assertMatchAllQuery(1, event);
    assertBodyQuery(1, event);
  }
示例#10
0
  @Test
  public void testCensor() {

    MemoryChannel memCh = new MemoryChannel();
    memCh.configure(new Context());
    memCh.start();

    ChannelSelector cs = new ReplicatingChannelSelector();
    cs.setChannels(Lists.<Channel>newArrayList(memCh));
    ChannelProcessor cp = new ChannelProcessor(cs);

    // source config
    Map<String, String> cfgMap = Maps.newHashMap();
    cfgMap.put("interceptors", "a");
    String builderClass = CensoringInterceptor.Builder.class.getName();
    cfgMap.put("interceptors.a.type", builderClass);
    Context ctx = new Context(cfgMap);

    // setup
    cp.configure(ctx);
    cp.initialize();

    Map<String, String> headers = Maps.newHashMap();
    String badWord = "scribe";
    headers.put("Bad-Words", badWord);
    Event event1 = EventBuilder.withBody("test", Charsets.UTF_8, headers);
    Assert.assertEquals(badWord, event1.getHeaders().get("Bad-Words"));
    cp.processEvent(event1);

    Transaction tx = memCh.getTransaction();
    tx.begin();

    Event event1a = memCh.take();
    Assert.assertNull(event1a.getHeaders().get("Bad-Words"));

    tx.commit();
    tx.close();

    // cleanup / shutdown
    cp.close();
    memCh.stop();
  }
  @Test
  public void testRollbackWithSink() throws Exception {
    final NullSink nullSink = new NullSink();
    Context ctx = new Context();
    ctx.put("batchSize", "1");
    nullSink.configure(ctx);
    nullSink.setChannel(channel);
    final int numItems = 99;
    Thread t =
        new Thread() {
          @Override
          public void run() {
            int count = 0;
            while (count++ < numItems) {
              try {
                nullSink.process();
                Thread.sleep(1);
              } catch (EventDeliveryException e) {
                break;
              } catch (Exception e) {
                Throwables.propagate(e);
              }
            }
          }
        };
    t.setDaemon(true);
    t.setName("NullSink");
    t.start();

    putEvents(channel, "rollback", 10, 100);

    Transaction transaction;
    // put an item we will rollback
    transaction = channel.getTransaction();
    transaction.begin();
    channel.put(EventBuilder.withBody("this is going to be rolledback".getBytes(Charsets.UTF_8)));
    transaction.rollback();
    transaction.close();

    while (t.isAlive()) {
      Thread.sleep(1);
    }

    // simulate crash
    channel.stop();
    channel = createFileChannel();

    // get the item which was not rolled back
    transaction = channel.getTransaction();
    transaction.begin();
    Event event = channel.take();
    transaction.commit();
    transaction.close();
    Assert.assertNotNull(event);
    Assert.assertEquals("rollback-90-9", new String(event.getBody(), Charsets.UTF_8));
  }
 private static List<String> takeEvents(Channel channel, int batchSize, int numEvents)
     throws Exception {
   List<String> result = Lists.newArrayList();
   for (int i = 0; i < numEvents; i += batchSize) {
     for (int j = 0; j < batchSize; j++) {
       Transaction transaction = channel.getTransaction();
       transaction.begin();
       try {
         Event event = channel.take();
         if (event == null) {
           transaction.commit();
           return result;
         }
         result.add(new String(event.getBody(), Charsets.UTF_8));
         transaction.commit();
       } catch (Exception ex) {
         transaction.rollback();
         throw ex;
       } finally {
         transaction.close();
       }
     }
   }
   return result;
 }
示例#13
0
  //    @Ignore
  @Test
  public void test() throws EventDeliveryException, UnsupportedEncodingException {
    Transaction tx = channel.getTransaction();
    tx.begin();

    ObjectNode jsonBody = new ObjectNode(JsonNodeFactory.instance);
    jsonBody.put("myString", "foo");
    jsonBody.put("myInt32", 32);

    Map<String, String> headers = new HashMap<String, String>();
    headers.put("myString", "bar");
    headers.put("myInt64", "64");
    headers.put("myBoolean", "true");
    headers.put("myDouble", "1.0");
    headers.put("myNull", "foobar");

    Event event = EventBuilder.withBody(jsonBody.toString().getBytes(Charsets.UTF_8), headers);
    channel.put(event);

    tx.commit();
    tx.close();

    kafkaSink.process();

    kafka.api.FetchRequest req =
        new FetchRequestBuilder().clientId(CLIENT_ID).addFetch("test", 0, 0L, 100).build();
    FetchResponse fetchResponse = simpleConsumer.fetch(req);
    ByteBufferMessageSet messageSet = fetchResponse.messageSet("test", 0);

    //        Assert.assertTrue(messageSet.sizeInBytes() > 0);
    for (MessageAndOffset messageAndOffset : messageSet) {
      ByteBuffer payload = messageAndOffset.message().payload();
      byte[] bytes = new byte[payload.limit()];
      payload.get(bytes);
      String message = new String(bytes, "UTF-8");
      Assert.assertNotNull(message);
      Assert.assertEquals(message, "{\"myString\":\"foo\",\"myInt32\":32}");
    }
  }
 private static List<String> putEvents(
     Channel channel, String prefix, int batchSize, int numEvents) throws Exception {
   List<String> result = Lists.newArrayList();
   for (int i = 0; i < numEvents; i += batchSize) {
     for (int j = 0; j < batchSize; j++) {
       Transaction transaction = channel.getTransaction();
       transaction.begin();
       try {
         String s = prefix + "-" + i + "-" + j;
         Event event = EventBuilder.withBody(s.getBytes(Charsets.UTF_8));
         result.add(s);
         channel.put(event);
         transaction.commit();
       } catch (Exception ex) {
         transaction.rollback();
         throw ex;
       } finally {
         transaction.close();
       }
     }
   }
   return result;
 }
示例#15
0
  private Status parseEvents() throws EventDeliveryException {
    Status status = Status.READY;
    Channel channel = getChannel();
    Transaction tx = null;
    Map<String, List<DBObject>> eventMap = new HashMap<String, List<DBObject>>();
    try {
      tx = channel.getTransaction();
      tx.begin();

      for (int i = 0; i < batchSize; i++) {
        Event event = channel.take();
        if (event == null) {
          status = Status.BACKOFF;
          break;
        } else {
          processEvent(eventMap, event);
        }
      }
      if (!eventMap.isEmpty()) {
        saveEvents(eventMap);
      }

      tx.commit();
    } catch (Exception e) {
      logger.error("can't process events, drop it!", e);
      if (tx != null) {
        tx.commit(); // commit to drop bad event, otherwise it will enter dead loop.
      }

      throw new EventDeliveryException(e);
    } finally {
      if (tx != null) {
        tx.close();
      }
    }
    return status;
  }
示例#16
0
  @Test
  public void shouldIndexComplexJsonEvent() throws Exception {
    Configurables.configure(fixture, new Context(parameters));
    Channel channel = bindAndStartChannel(fixture);

    Transaction tx = channel.getTransaction();
    tx.begin();
    Event event = EventBuilder.withBody("{\"event\":\"json content\",\"num\":1}".getBytes());
    channel.put(event);
    tx.commit();
    tx.close();

    fixture.process();
    fixture.stop();
    client.admin().indices().refresh(Requests.refreshRequest(timestampedIndexName)).actionGet();

    Map<String, Object> expectedBody = new HashMap<String, Object>();
    expectedBody.put("event", "json content");
    expectedBody.put("num", 1);

    assertSearch(1, performSearch(QueryBuilders.matchAllQuery()), expectedBody, event);
    assertSearch(
        1, performSearch(QueryBuilders.fieldQuery("@message.event", "json")), expectedBody, event);
  }
  /** {@inheritDoc} */
  @Override
  public Status process() throws EventDeliveryException {

    Status status = Status.BACKOFF;
    final Channel channel = getChannel();
    final Transaction txn = channel.getTransaction();
    txn.begin();
    try {
      Event event = channel.take();
      if (event != null) {
        final List<LogEntry> logEntries = new ArrayList<LogEntry>(batchSize);
        logEntries.add(create(event));
        int count = 1;
        while (count < batchSize && (event = channel.take()) != null) {
          count++;
          logEntries.add(create(event));
        }

        client.Log(logEntries);
        sinkCounter.incrementBatchCompleteCount();
        status = Status.READY;
      } else {
        sinkCounter.incrementBatchEmptyCount();
      }
      txn.commit();
    } catch (final TTransportException e) {
      txn.rollback();
      LOGGER.error(
          "Got a TTransportException. Will close current Transport and create new connection/client.");
      try {
        connect();
        LOGGER.info("Reconnect succeeded.");
      } catch (final TTransportException e1) {
        LOGGER.error("Trying to reconnect failed.", e1);
      }
    } catch (final Throwable e) {
      txn.rollback();
      throw new EventDeliveryException(e);
    } finally {
      txn.close();
    }

    return status;
  }
示例#18
0
  public void doTestTextBatchAppend() throws Exception {
    LOG.debug("Starting...");

    final long rollCount = 10;
    final long batchSize = 2;
    final String fileName = "PageView";

    String newPath = testPath + "/singleTextBucket";
    int totalEvents = 0;
    int i = 1, j = 1;

    // clear the test directory
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    Path dirPath = new Path(newPath);
    fs.delete(dirPath, true);
    fs.mkdirs(dirPath);

    Context context = new Context();
    context.put("hdfs.path", newPath);
    context.put("hdfs.rollCount", String.valueOf(rollCount));
    context.put("hdfs.batchSize", String.valueOf(batchSize));
    context.put("hdfs.filePrefix", "pageview");

    Channel channel = new MemoryChannel();
    Configurables.configure(channel, context);
    sink.setChannel(channel);
    sink.start();

    Calendar eventDate = Calendar.getInstance();
    Date currentDate = new Date();
    Map<String, String> header = new HashMap<String, String>();
    header.put("topic", "PageView");

    List<String> bodies = Lists.newArrayList();

    // 将测试的事件推入到通道中
    for (i = 1; i <= (rollCount * 10) / batchSize; i++) {
      Transaction txn = channel.getTransaction();
      txn.begin();
      for (j = 1; j <= batchSize; j++) {
        header.put("timestamp", String.valueOf(currentDate.getTime()));
        Event event = new SimpleEvent();
        eventDate.clear();
        eventDate.set(2014, i, i, i, 0);
        String body = "Test." + i + "." + j;

        event.setHeaders(header);
        event.setBody(body.getBytes());
        bodies.add(body);
        channel.put(event);
        totalEvents++;
      }
      txn.commit();
      txn.close();

      // execute sink to process the events
      sink.process();
    }
    sink.stop();

    FileStatus[] dirStat = fs.listStatus(dirPath);
    Path fList[] = FileUtil.stat2Paths(dirStat);

    long expectedFiles = totalEvents / rollCount;
    if (totalEvents % rollCount > 0) {
      expectedFiles++;
    }

    Assert.assertEquals(
        "num files wrong, found: " + Lists.newArrayList(fList), expectedFiles, fList.length);
    // 检查所有写入文件的内容
    verifyOutputTextFiles(fs, conf, dirPath.toUri().getPath(), fileName, bodies);
  }
示例#19
0
  @Override
  public Status process() throws EventDeliveryException {
    // TODO Auto-generated method stub
    Status result = Status.READY;
    Channel channel = getChannel();
    Transaction transaction = null;
    Event event = null;
    String eventTopic = null;
    String eventKey = null;
    try {
      long processedEvent = 0;
      transaction = channel.getTransaction();
      transaction.begin(); // 事务开始
      messageList.clear();
      for (; processedEvent < batchSize; processedEvent++) {
        event = channel.take(); // 从channel取出一个事件
        if (event == null) {
          break;
        }
        // Event对象有头和体之分
        Map<String, String> headers = event.getHeaders();
        byte[] eventBody = event.getBody();
        if ((eventTopic = headers.get(TOPIC_HDR)) == null) { // 判断event头部中的topic是否为null
          eventTopic = topic;
        }
        eventKey = headers.get(KEY_HDR);

        if (log.isDebugEnabled()) {
          log.debug("{Event}" + eventTopic + ":" + eventKey + ":" + new String(eventBody, CHARSET));
          log.debug("event #{}", processedEvent);
        }

        KeyedMessage<String, byte[]> data =
            new KeyedMessage<String, byte[]>(eventTopic, eventKey, eventBody);
        messageList.add(data);
      }
      if (processedEvent > 0) {
        producer.send(messageList);
      }
      transaction.commit(); // batchSize个事件处理完成,一次事务提交

    } catch (Exception e) {
      String errorMsg = "Failed to publish events !";
      log.error(errorMsg, e);
      result = Status.BACKOFF;
      if (transaction != null) {
        try {
          transaction.rollback();
          log.debug("transaction rollback success !");
        } catch (Exception ex) {
          log.error(errorMsg, ex);
          throw Throwables.propagate(ex);
        }
      }
      throw new EventDeliveryException(errorMsg, e);
    } finally {
      if (transaction != null) {
        transaction.close();
      }
    }
    return result;
  }
  @Override
  public Status process() throws EventDeliveryException {
    logger.debug("processing...");
    Status status = Status.READY;
    Channel channel = getChannel();
    Transaction txn = channel.getTransaction();
    try {
      txn.begin();
      int count = 0;
      for (; count < batchSize; ++count) {
        Event event = channel.take();

        if (event == null) {
          break;
        }

        if (logger.isDebugEnabled()) {
          try {
            logger.debug(
                "----------------------ElasticSearchSink defaultCharset is {}",
                Charset.defaultCharset());
            logger.debug(
                "----------------------ElasticSearchSink events body #{}",
                new String(event.getBody(), "utf-8"));
            logger.debug(
                "----------------------ElasticSearchSink events head #{}", event.getHeaders());
          } catch (UnsupportedEncodingException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
          }
        }

        String realIndexType = BucketPath.escapeString(indexType, event.getHeaders());
        client.addEvent(event, indexNameBuilder, realIndexType, ttlMs);
      }

      if (count <= 0) {
        sinkCounter.incrementBatchEmptyCount();
        counterGroup.incrementAndGet("channel.underflow");
        status = Status.BACKOFF;
      } else {
        if (count < batchSize) {
          sinkCounter.incrementBatchUnderflowCount();
          status = Status.BACKOFF;
        } else {
          sinkCounter.incrementBatchCompleteCount();
        }

        sinkCounter.addToEventDrainAttemptCount(count);
        client.execute();
      }
      txn.commit();
      sinkCounter.addToEventDrainSuccessCount(count);
      counterGroup.incrementAndGet("transaction.success");
    } catch (Throwable ex) {
      try {
        txn.rollback();
        counterGroup.incrementAndGet("transaction.rollback");
      } catch (Exception ex2) {
        logger.error("Exception in rollback. Rollback might not have been successful.", ex2);
      }

      if (ex instanceof Error || ex instanceof RuntimeException) {
        logger.error("Failed to commit transaction. Transaction rolled back.", ex);
        Throwables.propagate(ex);
      } else {
        logger.error("Failed to commit transaction. Transaction rolled back.", ex);
        throw new EventDeliveryException(
            "Failed to commit transaction. Transaction rolled back.", ex);
      }
    } finally {
      txn.close();
    }
    return status;
  }