Esempio n. 1
0
  @SuppressWarnings({"rawtypes", "unchecked"})
  @Override
  public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
    this.collector = collector;

    eventCounter =
        context.registerMetric(this.getClass().getSimpleName(), new MultiCountMetric(), 10);

    parseFilters = ParseFilters.emptyParseFilter;

    String parseconfigfile =
        ConfUtils.getString(conf, "parsefilters.config.file", "parsefilters.json");
    if (parseconfigfile != null) {
      try {
        parseFilters = new ParseFilters(conf, parseconfigfile);
      } catch (IOException e) {
        LOG.error("Exception caught while loading the ParseFilters");
        throw new RuntimeException("Exception caught while loading the ParseFilters", e);
      }
    }

    urlFilters = URLFilters.emptyURLFilters;
    emitOutlinks = ConfUtils.getBoolean(conf, "parser.emitOutlinks", true);

    if (emitOutlinks) {
      String urlconfigfile = ConfUtils.getString(conf, "urlfilters.config.file", "urlfilters.json");

      if (urlconfigfile != null) {
        try {
          urlFilters = new URLFilters(conf, urlconfigfile);
        } catch (IOException e) {
          LOG.error("Exception caught while loading the URLFilters");
          throw new RuntimeException("Exception caught while loading the URLFilters", e);
        }
      }
    }

    trackAnchors = ConfUtils.getBoolean(conf, "track.anchors", true);

    robots_noFollow_strict = ConfUtils.getBoolean(conf, RobotsTags.ROBOTS_NO_FOLLOW_STRICT, true);

    metadataTransfer = MetadataTransfer.getInstance(conf);
  }
  @SuppressWarnings({"rawtypes", "unchecked"})
  @Override
  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {

    _collector = collector;
    this.conf = new Config();
    this.conf.putAll(stormConf);

    checkConfiguration();

    this.taskIndex = context.getThisTaskIndex();

    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.ENGLISH);
    long start = System.currentTimeMillis();
    LOG.info("[Fetcher #{}] : starting at {}", taskIndex, sdf.format(start));

    // Register a "MultiCountMetric" to count different events in this bolt
    // Storm will emit the counts every n seconds to a special bolt via a
    // system stream
    // The data can be accessed by registering a "MetricConsumer" in the
    // topology
    this.eventCounter = context.registerMetric("fetcher_counter", new MultiCountMetric(), 10);

    this.averagedMetrics =
        context.registerMetric("fetcher_average", new MultiReducedMetric(new MeanReducer()), 10);

    this.perSecMetrics =
        context.registerMetric(
            "fetcher_average_persec", new MultiReducedMetric(new PerSecondReducer()), 10);

    protocolFactory = new ProtocolFactory(conf);

    String urlconfigfile = ConfUtils.getString(conf, "urlfilters.config.file", "urlfilters.json");

    if (urlconfigfile != null)
      try {
        urlFilters = new URLFilters(conf, urlconfigfile);
      } catch (IOException e) {
        LOG.error("Exception caught while loading the URLFilters");
        throw new RuntimeException("Exception caught while loading the URLFilters", e);
      }

    metadataTransfer = MetadataTransfer.getInstance(stormConf);

    allowRedirs =
        ConfUtils.getBoolean(
            stormConf, com.digitalpebble.storm.crawler.Constants.AllowRedirParamName, true);

    sitemapsAutoDiscovery = ConfUtils.getBoolean(stormConf, "sitemap.discovery", false);

    queueMode = ConfUtils.getString(conf, "fetcher.queue.mode", QUEUE_MODE_HOST);
    // check that the mode is known
    if (!queueMode.equals(QUEUE_MODE_IP)
        && !queueMode.equals(QUEUE_MODE_DOMAIN)
        && !queueMode.equals(QUEUE_MODE_HOST)) {
      LOG.error("Unknown partition mode : {} - forcing to byHost", queueMode);
      queueMode = QUEUE_MODE_HOST;
    }
    LOG.info("Using queue mode : {}", queueMode);

    this.crawlDelay = (long) (ConfUtils.getFloat(conf, "fetcher.server.delay", 1.0f) * 1000);

    this.maxCrawlDelay = (long) ConfUtils.getInt(conf, "fetcher.max.crawl.delay", 30) * 1000;
  }