@Override
  public void configure(Context context) {
    if (morphlineContext == null) {
      FaultTolerance faultTolerance =
          new FaultTolerance(
              context.getBoolean(FaultTolerance.IS_PRODUCTION_MODE, false),
              context.getBoolean(FaultTolerance.IS_IGNORING_RECOVERABLE_EXCEPTIONS, false),
              context.getString(FaultTolerance.RECOVERABLE_EXCEPTION_CLASSES));

      morphlineContext =
          new MorphlineContext.Builder()
              .setExceptionHandler(faultTolerance)
              .setMetricRegistry(new MetricRegistry())
              .build();
    }

    String morphlineFile = context.getString(MORPHLINE_FILE_PARAM);
    String morphlineId = context.getString(MORPHLINE_ID_PARAM);
    if (morphlineFile == null || morphlineFile.trim().length() == 0) {
      throw new MorphlineCompilationException("Missing parameter: " + MORPHLINE_FILE_PARAM, null);
    }
    Config override =
        ConfigFactory.parseMap(context.getSubProperties(MORPHLINE_VARIABLE_PARAM + "."));
    morphline =
        new Compiler()
            .compile(new File(morphlineFile), morphlineId, morphlineContext, finalChild, override);
    morphlineFileAndId = morphlineFile + "@" + morphlineId;
  }
 @Override
 public void configure(Context context) {
   hours = context.getString("hours");
   Preconditions.checkArgument(StringUtils.isNotEmpty(hours), "Must configure with a valid hours");
   outputPattern = context.getString("outputPattern");
   Preconditions.checkArgument(
       StringUtils.isNotEmpty(outputPattern), "Must configure with a valid outputPattern");
 }
  /**
   * Verify that the required configuration is set
   *
   * @param context
   */
  private void ensureConfigCompleteness(Context context) {

    if (StringUtils.isEmpty(context.getString(RabbitMQConstants.CONFIG_EXCHANGENAME))
        && StringUtils.isEmpty(context.getString(RabbitMQConstants.CONFIG_QUEUENAME))) {

      throw new IllegalArgumentException(
          "You must configure at least one of queue name or exchange name parameters");
    }
  }
  /**
   * The initialization method for the Source. The context contains all the Flume configuration
   * info, and can be used to retrieve any configuration values necessary to set up the Source.
   */
  @Override
  public void configure(Context context) {
    consumerKey = context.getString(TwitterSourceConstants.CONSUMER_KEY_KEY);
    consumerSecret = context.getString(TwitterSourceConstants.CONSUMER_SECRET_KEY);
    accessToken = context.getString(TwitterSourceConstants.ACCESS_TOKEN_KEY);
    accessTokenSecret = context.getString(TwitterSourceConstants.ACCESS_TOKEN_SECRET_KEY);

    String keywordString = context.getString(TwitterSourceConstants.KEYWORDS_KEY, "");
    keywords = keywordString.split(",");
    for (int i = 0; i < keywords.length; i++) {
      keywords[i] = keywords[i].trim();
    }
  }
  /** Configures the keys required for accessing Twitter. Also gets the productKeys */
  @Override
  public void configure(Context context) {
    consumerKey = context.getString("consumerKey");
    consumerSecret = context.getString("consumerSecret");
    accessToken = context.getString("accessToken");
    accessTokenSecret = context.getString("accessTokenSecret");

    String prodctKeys = context.getString("Product", "");
    searchFor = prodctKeys.split(",");
    for (int i = 0; i < searchFor.length; i++) {
      searchFor[i] = searchFor[i].trim();
    }
  }
 @Override
 public void configure(Context context) throws FlumeException {
   preserveExisting = context.getBoolean(PRESERVE, PRESERVE_DEFAULT);
   key = context.getString(KEY, KEY_DEFAULT);
   file = context.getString(FILE);
   period = context.getInteger(PERIOD, new Integer(PERIOD_DEFAULT));
   if (file != null) {
     value = readHeader(file);
   } else {
     logger.error("CSVHeaderInterceptor - file not specified");
     throw new FlumeException("CSVHeaderInterceptor - file not specified");
   }
 }
  @Test
  public void checkStatusFileCorrectlyUpdated() throws Exception {

    File file = File.createTempFile("statusFileName", ".txt");

    when(context.getString("status.file.path", "/var/lib/flume")).thenReturn(file.getParent());
    when(context.getString("status.file.name")).thenReturn(file.getName());

    SQLSourceHelper sqlSourceHelper = new SQLSourceHelper(context, "Source Name");
    sqlSourceHelper.setCurrentIndex(10);

    sqlSourceHelper.updateStatusFile();

    SQLSourceHelper sqlSourceHelper2 = new SQLSourceHelper(context, "Source Name");
    assertEquals(10L, sqlSourceHelper2.getCurrentIndex());
  }
 @Override
 public void doConfigure(Context context) {
   final String regex = context.getString(CONFIG_REGULAR_EXPRESSION, REGEX_DEFAULT);
   final boolean regexIgnoreCase = context.getBoolean(IGNORE_CASE_CONFIG, IGNORE_CASE_DEFAULT);
   inputPattern =
       Pattern.compile(regex, Pattern.DOTALL + (regexIgnoreCase ? Pattern.CASE_INSENSITIVE : 0));
 }
Example #9
0
  @Override
  public void configure(Context context) {
    String resolutionsStr = context.getString("resolutions", "month,day,hour,minute,second");
    String[] resolutionsArray = resolutionsStr.split(",");

    for (String resolution : resolutionsArray) {
      if (resolution.trim().equals("month")) {
        resolutions[4] = true;
      } else if (resolution.trim().equals("day")) {
        resolutions[3] = true;
      } else if (resolution.trim().equals("hour")) {
        resolutions[2] = true;
      } else if (resolution.trim().equals("minute")) {
        resolutions[1] = true;
      } else if (resolution.trim().equals("second")) {
        resolutions[0] = true;
      } else {
        LOGGER.warn("[" + this.getName() + "] Unknown resolution " + resolution);
      } // if else
    } // for

    LOGGER.debug(
        "[" + this.getName() + "] Reading configuration (resolutions=" + resolutionsStr + ")");
    super.configure(context);
  } // configure
Example #10
0
  @Override
  public void configure(Context context) {
    port = Integer.parseInt(context.getString("port"));
    bindAddress = context.getString("bind");
    try {
      maxThreads = context.getInteger(THREADS, 0);
    } catch (NumberFormatException e) {
      logger.warn(
          "AVRO source\'s \"threads\" property must specify an integer value.",
          context.getString(THREADS));
    }

    if (sourceCounter == null) {
      sourceCounter = new SourceCounter(getName());
    }
  }
 @Override
 public void configure(Context context) {
   /*
    * Default is to listen on UDP port 162 on all IPv4 interfaces.
    * Since 162 is a privileged port, snmptrapd must typically be run as root.
    * Or change to non-privileged port > 1024.
    */
   bindAddress = context.getString("bind", DEFAULT_BIND);
   bindPort = context.getInteger("port", DEFAULT_PORT);
 }
Example #12
0
  @Override
  public void configure(Context context) {
    logger.info("Configuring thrift source.");
    port = context.getInteger(CONFIG_PORT);
    Preconditions.checkNotNull(port, "Port must be specified for Thrift " + "Source.");
    bindAddress = context.getString(CONFIG_BIND);
    Preconditions.checkNotNull(
        bindAddress, "Bind address must be specified " + "for Thrift Source.");

    try {
      maxThreads = context.getInteger(CONFIG_THREADS, 0);
    } catch (NumberFormatException e) {
      logger.warn(
          "Thrift source\'s \"threads\" property must specify an "
              + "integer value: "
              + context.getString(CONFIG_THREADS));
    }

    if (sourceCounter == null) {
      sourceCounter = new SourceCounter(getName());
    }
  }
  /**
   * The initialization method for the Source. The context contains all the Flume configuration
   * info, and can be used to retrieve any configuration values necessary to set up the Source.
   */
  @Override
  public void configure(Context context) {
    consumerKey = context.getString(TwitterSourceConstants.CONSUMER_KEY_KEY);
    consumerSecret = context.getString(TwitterSourceConstants.CONSUMER_SECRET_KEY);
    accessToken = context.getString(TwitterSourceConstants.ACCESS_TOKEN_KEY);
    accessTokenSecret = context.getString(TwitterSourceConstants.ACCESS_TOKEN_SECRET_KEY);

    String keywordString = context.getString(TwitterSourceConstants.KEYWORDS_KEY, "");
    keywords = keywordString.split(",");
    for (int i = 0; i < keywords.length; i++) {
      keywords[i] = keywords[i].trim();
    }

    ConfigurationBuilder cb = new ConfigurationBuilder();
    cb.setOAuthConsumerKey(consumerKey);
    cb.setOAuthConsumerSecret(consumerSecret);
    cb.setOAuthAccessToken(accessToken);
    cb.setOAuthAccessTokenSecret(accessTokenSecret);
    cb.setJSONStoreEnabled(true);
    cb.setIncludeEntitiesEnabled(true);

    twitterStream = new TwitterStreamFactory(cb.build()).getInstance();
  }
  @Before
  public void setup() {

    when(context.getString("status.file.name")).thenReturn("statusFileName.txt");
    when(context.getString("connection.url")).thenReturn("jdbc:mysql://host:3306/database");
    when(context.getString("table")).thenReturn("table");
    when(context.getString("incremental.column.name")).thenReturn("incrementalColumName");
    when(context.getString("user")).thenReturn("user");
    when(context.getString("password")).thenReturn("password");
    when(context.getString("status.file.path", "/var/lib/flume")).thenReturn("/tmp/flume");
    when(context.getString("columns.to.select", "*")).thenReturn("*");
    when(context.getInteger("run.query.delay", 10000)).thenReturn(10000);
    when(context.getInteger("batch.size", 100)).thenReturn(100);
    when(context.getInteger("max.rows", 10000)).thenReturn(10000);
    when(context.getLong("incremental.value", 0L)).thenReturn(0L);
  }
  /** {@inheritDoc} */
  @Override
  public void configure(final Context context) {
    hostName = context.getString(HOSTNAME_CONFIG_PROP_NAME);
    port = context.getInteger(PORT_CONFIG_PROP_NAME);
    batchSize = context.getInteger(BATCH_SIZE_PROP_NAME, DEFAULT_BATCH_SIZE);

    if (sinkCounter == null) {
      sinkCounter = new SinkCounter(getName());
    }

    LOGGER.info(
        "Configuring ZipkinSpanCollectorSink. hostname: {}, port: {}, batchsize: {}",
        hostName,
        port,
        batchSize);
  }
Example #16
0
 @Override
 public void configure(Context context) {
   // TODO Auto-generated method stub
   batchSize = context.getInteger(Constants.BATCH_SIZE, Constants.DEFAULT_BATCH_SIZE);
   messageList = new ArrayList<KeyedMessage<String, byte[]>>(batchSize);
   log.debug("Using batch size: {}", batchSize);
   topic = context.getString(Constants.TOPIC, Constants.DEFAULT_TOPIC);
   if (topic.equals(Constants.DEFAULT_TOPIC)) {
     log.warn(
         "The property 'topic' is not set .  Using the default topic name ["
             + Constants.DEFAULT_TOPIC
             + "]");
   } else {
     log.info(
         "Using the configured topic:[" + topic + "] this may be over-ridden by event headers");
   }
   kafkaProps = KafkaUtil.getKafkaConfig(context);
   if (log.isDebugEnabled()) {
     log.debug("Kafka producer properties : " + kafkaProps);
   }
 }
Example #17
0
  /**
   * The initialization method for the Source. The context contains all the Flume configuration
   * info, and can be used to retrieve any configuration values necessary to set up the Source.
   */
  public void configure(Context context) {

    try {

      Properties properties = PropertiesHelper.loadProperties();

      consumerKey = properties.getProperty("twitter.consumerkey");
      consumerSecret = properties.getProperty("twitter.consumersecret");
      accessToken = properties.getProperty("twitter.accesstoken");
      accessTokenSecret = properties.getProperty("twitter.accesstokensecret");
    } catch (IOException ex) {
      logger.error(
          "Unable to load properties file. Do you have bigdata.properties on your classpath?", ex);
    }

    String keywordString = context.getString("london", "");

    keywords = keywordString.split(",");
    for (int i = 0; i < keywords.length; i++) {
      keywords[i] = keywords[i].trim();
    }
  }
  @Override
  public void configure(Context context) {
    setName(NAME_PREFIX + counter.getAndIncrement());

    host = context.getString(HOST, DEFAULT_HOST);
    port = context.getInteger(PORT, DEFAULT_PORT);
    username = context.getString(USERNAME);
    password = context.getString(PASSWORD);
    model = CollectionModel.valueOf(context.getString(MODEL, CollectionModel.single.name()));
    dbName = context.getString(DB_NAME, DEFAULT_DB);
    collectionName = context.getString(COLLECTION, DEFAULT_COLLECTION);
    batchSize = context.getInteger(BATCH_SIZE, DEFAULT_BATCH);
    autoWrap = context.getBoolean(AUTO_WRAP, DEFAULT_AUTO_WRAP);
    wrapField = context.getString(WRAP_FIELD, DEFAULT_WRAP_FIELD);

    logger.info(
        "MongoSink {} context { host:{}, port:{}, username:{}, password:{}, model:{}, dbName:{}, collectionName:{}, batch: {} }",
        new Object[] {
          getName(), host, port, username, password, model, dbName, collectionName, batchSize
        });
  }
Example #19
0
  public void configure(Context context) throws ConfigurationException {
    super.configure(context);
    try {
      String channelList = context.getString(BasicConfigurationConstants.CONFIG_CHANNELS);
      if (channelList != null) {
        this.channels = new HashSet<String>(Arrays.asList(channelList.split("\\s+")));
      }
      if (channels.isEmpty()) {
        errors.add(
            new FlumeConfigurationError(
                componentName,
                ComponentType.CHANNEL.getComponentType(),
                FlumeConfigurationErrorType.PROPERTY_VALUE_NULL,
                ErrorOrWarning.ERROR));
        throw new ConfigurationException("No channels set for " + this.getComponentName());
      }
      Map<String, String> selectorParams =
          context.getSubProperties(
              BasicConfigurationConstants.CONFIG_SOURCE_CHANNELSELECTOR_PREFIX);
      String selType;
      if (selectorParams != null && !selectorParams.isEmpty()) {
        selType = selectorParams.get(BasicConfigurationConstants.CONFIG_TYPE);
        System.out.println("Loading selector: " + selType);
      } else {
        selType = ChannelSelectorConfigurationType.REPLICATING.toString();
      }

      if (selType == null || selType.isEmpty()) {
        selType = ChannelSelectorConfigurationType.REPLICATING.toString();
      }
      ChannelSelectorType selectorType = this.getKnownChannelSelector(selType);
      Context selectorContext = new Context();
      selectorContext.putAll(selectorParams);
      String config = null;
      if (selectorType == null) {
        config = selectorContext.getString(BasicConfigurationConstants.CONFIG_CONFIG);
        if (config == null || config.isEmpty()) {
          config = "OTHER";
        }
      } else {
        config = selectorType.toString().toUpperCase();
      }

      this.selectorConf =
          (ChannelSelectorConfiguration)
              ComponentConfigurationFactory.create(
                  ComponentType.CHANNELSELECTOR.getComponentType(),
                  config,
                  ComponentType.CHANNELSELECTOR);
      selectorConf.setChannels(channels);
      selectorConf.configure(selectorContext);
    } catch (Exception e) {
      errors.add(
          new FlumeConfigurationError(
              componentName,
              ComponentType.CHANNELSELECTOR.getComponentType(),
              FlumeConfigurationErrorType.CONFIG_ERROR,
              ErrorOrWarning.ERROR));
      throw new ConfigurationException("Failed to configure component!", e);
    }
  }
  @Override
  public void configure(Context context) {
    if (!isLocal) {
      if (StringUtils.isNotBlank(context.getString(HOSTNAMES))) {
        serverAddresses = StringUtils.deleteWhitespace(context.getString(HOSTNAMES)).split(",");
      }
      Preconditions.checkState(
          serverAddresses != null && serverAddresses.length > 0, "Missing Param:" + HOSTNAMES);
    }

    if (StringUtils.isNotBlank(context.getString(INDEX_NAME))) {
      this.indexName = context.getString(INDEX_NAME);
    }

    if (StringUtils.isNotBlank(context.getString(INDEX_TYPE))) {
      this.indexType = context.getString(INDEX_TYPE);
    }

    if (StringUtils.isNotBlank(context.getString(CLUSTER_NAME))) {
      this.clusterName = context.getString(CLUSTER_NAME);
    }

    if (StringUtils.isNotBlank(context.getString(BATCH_SIZE))) {
      this.batchSize = Integer.parseInt(context.getString(BATCH_SIZE));
    }

    if (StringUtils.isNotBlank(context.getString(TTL))) {
      this.ttlMs = parseTTL(context.getString(TTL));
      Preconditions.checkState(ttlMs > 0, TTL + " must be greater than 0 or not set.");
    }

    if (StringUtils.isNotBlank(context.getString(CLIENT_TYPE))) {
      clientType = context.getString(CLIENT_TYPE);
    }

    elasticSearchClientContext = new Context();
    elasticSearchClientContext.putAll(context.getSubProperties(CLIENT_PREFIX));

    String serializerClazz = DEFAULT_SERIALIZER_CLASS;
    if (StringUtils.isNotBlank(context.getString(SERIALIZER))) {
      serializerClazz = context.getString(SERIALIZER);
    }

    Context serializerContext = new Context();
    serializerContext.putAll(context.getSubProperties(SERIALIZER_PREFIX));

    try {
      @SuppressWarnings("unchecked")
      Class<? extends Configurable> clazz =
          (Class<? extends Configurable>) Class.forName(serializerClazz);
      Configurable serializer = clazz.newInstance();

      if (serializer instanceof ElasticSearchIndexRequestBuilderFactory) {
        indexRequestFactory = (ElasticSearchIndexRequestBuilderFactory) serializer;
        indexRequestFactory.configure(serializerContext);
      } else if (serializer instanceof ElasticSearchEventSerializer) {
        eventSerializer = (ElasticSearchEventSerializer) serializer;
        eventSerializer.configure(serializerContext);
      } else {
        throw new IllegalArgumentException(
            serializerClazz + " is not an ElasticSearchEventSerializer");
      }
    } catch (Exception e) {
      logger.error("Could not instantiate event serializer.", e);
      Throwables.propagate(e);
    }

    if (sinkCounter == null) {
      sinkCounter = new SinkCounter(getName());
    }

    String indexNameBuilderClass = DEFAULT_INDEX_NAME_BUILDER_CLASS;
    if (StringUtils.isNotBlank(context.getString(INDEX_NAME_BUILDER))) {
      indexNameBuilderClass = context.getString(INDEX_NAME_BUILDER);
    }

    Context indexnameBuilderContext = new Context();
    indexnameBuilderContext.putAll(context.getSubProperties(INDEX_NAME_BUILDER_PREFIX));

    try {
      @SuppressWarnings("unchecked")
      Class<? extends IndexNameBuilder> clazz =
          (Class<? extends IndexNameBuilder>) Class.forName(indexNameBuilderClass);
      indexNameBuilder = clazz.newInstance();
      indexnameBuilderContext.put(INDEX_NAME, indexName);
      indexNameBuilder.configure(indexnameBuilderContext);
    } catch (Exception e) {
      logger.error("Could not instantiate index name builder.", e);
      Throwables.propagate(e);
    }

    if (sinkCounter == null) {
      sinkCounter = new SinkCounter(getName());
    }

    Preconditions.checkState(StringUtils.isNotBlank(indexName), "Missing Param:" + INDEX_NAME);
    Preconditions.checkState(StringUtils.isNotBlank(indexType), "Missing Param:" + INDEX_TYPE);
    Preconditions.checkState(StringUtils.isNotBlank(clusterName), "Missing Param:" + CLUSTER_NAME);
    Preconditions.checkState(batchSize >= 1, BATCH_SIZE + " must be greater than 0");
  }
 public void configure(Context context) {
   // May throw IllegalArgumentException for unsupported charsets.
   charset = Charset.forName(context.getString(CHARSET_KEY, CHARSET_DEFAULT));
   pattern = context.getString(PATTERN_KEY, PATTERN_DEFAULT);
 }
  @Override
  public synchronized void configure(Context context) {
    spoolDirectory = context.getString(SPOOL_DIRECTORY);
    Preconditions.checkState(
        spoolDirectory != null, "Configuration must specify a spooling directory");

    completedSuffix = context.getString(SPOOLED_FILE_SUFFIX, DEFAULT_SPOOLED_FILE_SUFFIX);
    deletePolicy = context.getString(DELETE_POLICY, DEFAULT_DELETE_POLICY);
    fileHeader = context.getBoolean(FILENAME_HEADER, DEFAULT_FILE_HEADER);
    fileHeaderKey = context.getString(FILENAME_HEADER_KEY, DEFAULT_FILENAME_HEADER_KEY);
    basenameHeader = context.getBoolean(BASENAME_HEADER, DEFAULT_BASENAME_HEADER);
    basenameHeaderKey = context.getString(BASENAME_HEADER_KEY, DEFAULT_BASENAME_HEADER_KEY);
    batchSize = context.getInteger(BATCH_SIZE, DEFAULT_BATCH_SIZE);
    inputCharset = context.getString(INPUT_CHARSET, DEFAULT_INPUT_CHARSET);
    decodeErrorPolicy =
        DecodeErrorPolicy.valueOf(
            context
                .getString(DECODE_ERROR_POLICY, DEFAULT_DECODE_ERROR_POLICY)
                .toUpperCase(Locale.ENGLISH));

    ignorePattern = context.getString(IGNORE_PAT, DEFAULT_IGNORE_PAT);
    trackerDirPath = context.getString(TRACKER_DIR, DEFAULT_TRACKER_DIR);

    deserializerType = context.getString(DESERIALIZER, "ZipDeserializer");
    deserializerContext = new Context(context.getSubProperties(DESERIALIZER + "."));

    consumeOrder =
        ConsumeOrder.valueOf(
            context
                .getString(CONSUME_ORDER, DEFAULT_CONSUME_ORDER.toString())
                .toUpperCase(Locale.ENGLISH));

    // "Hack" to support backwards compatibility with previous generation of
    // spooling directory source, which did not support deserializers
    Integer bufferMaxLineLength = context.getInteger(BUFFER_MAX_LINE_LENGTH);
    if (bufferMaxLineLength != null
        && deserializerType != null
        && deserializerType.equalsIgnoreCase(DEFAULT_DESERIALIZER)) {
      deserializerContext.put(LineDeserializer.MAXLINE_KEY, bufferMaxLineLength.toString());
    }

    maxBackoff = context.getInteger(MAX_BACKOFF, DEFAULT_MAX_BACKOFF);
    if (sourceCounter == null) {
      sourceCounter = new SourceCounter(getName());
    }
  }
 @Test(expected = ConfigurationException.class)
 public void passwordNotSet() {
   when(context.getString("password")).thenReturn(null);
   new SQLSourceHelper(context, "Source Name");
 }
 @Test(expected = ConfigurationException.class)
 public void checkStatusFileNameNotSet() {
   when(context.getString("status.file.name")).thenReturn(null);
   new SQLSourceHelper(context, "Source Name");
 }
 @Test
 public void getCustomQuery() {
   when(context.getString("custom.query")).thenReturn("SELECT column FROM table");
   SQLSourceHelper sqlSourceHelper = new SQLSourceHelper(context, "Source Name");
   assertEquals("SELECT column FROM table", sqlSourceHelper.getQuery());
 }