/**
  * Generate log event.
  *
  * @param logEventPack the log event pack
  * @param header the header
  * @return the list
  * @throws IOException the io exception
  */
 protected List<LogEventDto> generateLogEvent(LogEventPack logEventPack, RecordHeader header)
     throws IOException {
   LOG.debug(
       "Generate LogEventDto objects from LogEventPack [{}] and header [{}]",
       logEventPack,
       header);
   List<LogEventDto> events = new ArrayList<>(logEventPack.getEvents().size());
   GenericAvroConverter<GenericRecord> eventConverter =
       getConverter(logEventPack.getLogSchema().getSchema());
   GenericAvroConverter<GenericRecord> headerConverter =
       getConverter(header.getSchema().toString());
   try {
     for (LogEvent logEvent : logEventPack.getEvents()) {
       LOG.debug("Convert log events [{}] to dto objects.", logEvent);
       if (logEvent == null | logEvent.getLogData() == null) {
         continue;
       }
       LOG.trace(
           "Avro record converter [{}] with log data [{}]", eventConverter, logEvent.getLogData());
       GenericRecord decodedLog = eventConverter.decodeBinary(logEvent.getLogData());
       LOG.trace("Avro header record converter [{}]", headerConverter);
       String encodedJsonLogHeader = headerConverter.encodeToJson(header);
       String encodedJsonLog = eventConverter.encodeToJson(decodedLog);
       events.add(new LogEventDto(encodedJsonLogHeader, encodedJsonLog));
     }
   } catch (IOException e) {
     LOG.error("Unexpected IOException while decoding LogEvents", e);
     throw e;
   }
   return events;
 }
 /**
  * Generate header.
  *
  * @param logEventPack the log event pack
  * @return the log header
  */
 private RecordHeader generateHeader(LogEventPack logEventPack) {
   RecordHeader logHeader = null;
   if (header != null) {
     logHeader = new RecordHeader();
     for (LogHeaderStructureDto field : header) {
       switch (field) {
         case KEYHASH:
           logHeader.setEndpointKeyHash(logEventPack.getEndpointKey());
           break;
         case TIMESTAMP:
           logHeader.setTimestamp(System.currentTimeMillis());
           break;
         case TOKEN:
           logHeader.setApplicationToken(applicationToken);
           break;
         case VERSION:
           logHeader.setHeaderVersion(LOG_HEADER_VERSION);
           break;
         case LSVERSION:
           logHeader.setLogSchemaVersion(logEventPack.getLogSchema().getVersion());
           break;
         default:
           if (LOG.isWarnEnabled()) {
             LOG.warn("Current header field [{}] doesn't support", field);
           }
           break;
       }
     }
   }
   return logHeader;
 }
  @Before
  public void beforeTest() throws IOException {
    endpointKeyHash = UUID.randomUUID().toString();
    appToken = String.valueOf(RANDOM.nextInt(Integer.MAX_VALUE));

    appenderDto = new LogAppenderDto();
    appenderDto.setId("Test_id");
    appenderDto.setApplicationToken(appToken);
    appenderDto.setName("Test Name");
    appenderDto.setTenantId(String.valueOf(RANDOM.nextInt()));
    appenderDto.setHeaderStructure(Arrays.asList(LogHeaderStructureDto.values()));
    appenderDto.setApplicationToken(appToken);

    header = new RecordHeader();
    header.setApplicationToken(appToken);
    header.setEndpointKeyHash(endpointKeyHash);
    header.setHeaderVersion(1);
    header.setTimestamp(System.currentTimeMillis());

    logEventPack = new LogEventPack();
    logEventPack.setDateCreated(System.currentTimeMillis());
    logEventPack.setEndpointKey(endpointKeyHash);

    CassandraServer server = new CassandraServer("127.0.0.1", 9142);
    configuration = new CassandraConfig();
    configuration.setCassandraBatchType(CassandraBatchType.UNLOGGED);
    configuration.setKeySpace(KEY_SPACE_NAME);
    configuration.setTableNamePattern("logs_$app_token_$config_hash");
    configuration.setCassandraExecuteRequestType(CassandraExecuteRequestType.ASYNC);
    configuration.setCassandraServers(Arrays.asList(server));
    configuration.setCallbackThreadPoolSize(3);
    configuration.setExecutorThreadPoolSize(3);

    List<ColumnMappingElement> columnMapping = new ArrayList<ColumnMappingElement>();
    columnMapping.add(
        new ColumnMappingElement(
            ColumnMappingElementType.HEADER_FIELD,
            "endpointKeyHash",
            "endpointKeyHash",
            ColumnType.TEXT,
            true,
            false));
    columnMapping.add(
        new ColumnMappingElement(
            ColumnMappingElementType.EVENT_JSON, "", "event_json", ColumnType.TEXT, false, false));
    columnMapping.add(
        new ColumnMappingElement(
            ColumnMappingElementType.UUID, "", "binid", ColumnType.UUID, false, true));

    configuration.setColumnMapping(columnMapping);

    List<ClusteringElement> clusteringMapping = new ArrayList<ClusteringElement>();
    clusteringMapping.add(new ClusteringElement("binid", OrderType.DESC));
    configuration.setClusteringMapping(clusteringMapping);

    AvroByteArrayConverter<CassandraConfig> converter =
        new AvroByteArrayConverter<>(CassandraConfig.class);
    byte[] rawConfiguration = converter.toByteArray(configuration);
    appenderDto.setRawConfiguration(rawConfiguration);

    logAppender = new CassandraLogAppender();
    logAppender.init(appenderDto);
    logAppender.setApplicationToken(appToken);
  }