Example #1
0
  public HttpResponse doProxyConfigure(StaplerRequest req) throws IOException, ServletException {
    Jenkins jenkins = Jenkins.getInstance();
    jenkins.checkPermission(CONFIGURE_UPDATECENTER);

    ProxyConfiguration pc = req.bindJSON(ProxyConfiguration.class, req.getSubmittedForm());
    if (pc.name == null) {
      jenkins.proxy = null;
      ProxyConfiguration.getXmlFile().delete();
    } else {
      jenkins.proxy = pc;
      jenkins.proxy.save();
    }
    return new HttpRedirect("advanced");
  }
Example #2
0
  public HttpDestination(HttpClient client, Origin origin) {
    this.client = client;
    this.origin = origin;

    this.exchanges = newExchangeQueue(client);

    this.requestNotifier = new RequestNotifier(client);
    this.responseNotifier = new ResponseNotifier();

    ProxyConfiguration proxyConfig = client.getProxyConfiguration();
    proxy = proxyConfig.match(origin);
    ClientConnectionFactory connectionFactory = client.getTransport();
    if (proxy != null) {
      connectionFactory = proxy.newClientConnectionFactory(connectionFactory);
    } else {
      if (HttpScheme.HTTPS.is(getScheme()))
        connectionFactory = newSslClientConnectionFactory(connectionFactory);
    }
    this.connectionFactory = connectionFactory;

    String host = getHost();
    if (!client.isDefaultPort(getScheme(), getPort())) host += ":" + getPort();
    hostField = new HttpField(HttpHeader.HOST, host);
  }
/** Configures the {@link Crawler}. Set it up using the {@link #builderFor(String)} function. */
public final class CrawljaxConfiguration {

  public static class CrawljaxConfigurationBuilder {

    private final ImmutableList.Builder<Plugin> pluginBuilder = ImmutableList.builder();
    private final CrawljaxConfiguration config;
    private final CrawlRulesBuilder crawlRules = CrawlRules.builder();

    private CrawljaxConfigurationBuilder(URL url) {
      Preconditions.checkNotNull(url);
      config = new CrawljaxConfiguration();
      config.url = url;
    }

    /**
     * @param states The maximum number of states the Crawler should crawl. The default is
     *     unlimited.
     */
    public CrawljaxConfigurationBuilder setMaximumStates(int states) {
      checkArgument(states > 1, "Number of maximum states should be largen than 1");
      config.maximumStates = states;
      return this;
    }

    /** Crawl without a maximum state limit. */
    public CrawljaxConfigurationBuilder setUnlimitedStates() {
      config.maximumStates = 0;
      return this;
    }

    /** @param time The maximum time the crawler should run. Default is one hour. */
    public CrawljaxConfigurationBuilder setMaximumRunTime(long time, TimeUnit unit) {
      checkArgument(time >= 0, "Time should be larger than 0, or 0 for infinate.");
      config.maximumRuntime = unit.toMillis(time);
      return this;
    }

    /** Set the maximum runtime to unlimited. */
    public CrawljaxConfigurationBuilder setUnlimitedRuntime() {
      config.maximumRuntime = 0;
      return this;
    }

    /** @param time The maximum depth the crawler can reach. The default is <code>2</code>. */
    public CrawljaxConfigurationBuilder setMaximumDepth(int depth) {
      Preconditions.checkArgument(
          depth >= 0, "Depth should be 0 for infinite, or larger for a certain depth.");
      config.maximumDepth = depth;
      return this;
    }

    /** Set the crawl depth to unlimited. */
    public CrawljaxConfigurationBuilder setUnlimitedCrawlDepth() {
      config.maximumDepth = 0;
      return this;
    }

    /**
     * Add plugins to Crawljax. Note that without plugins, Crawljax won't give any ouput. For basic
     * output at least enable the CrawlOverviewPlugin.
     *
     * <p>You can call this method several times to add multiple plugins
     *
     * @param plugins the plugins you would like to enable.
     */
    public CrawljaxConfigurationBuilder addPlugin(Plugin... plugins) {
      pluginBuilder.add(plugins);
      return this;
    }

    /**
     * @param configuration The proxy configuration. Default is {@link ProxyConfiguration#noProxy()}
     */
    public CrawljaxConfigurationBuilder setProxyConfig(ProxyConfiguration configuration) {
      Preconditions.checkNotNull(configuration);
      config.proxyConfiguration = configuration;
      return this;
    }

    /**
     * @return The {@link CrawlRulesBuilder} to define crawling rules. If no specified, Crawljax
     *     will do {@link CrawlRulesBuilder#}
     */
    public CrawlRulesBuilder crawlRules() {
      return crawlRules;
    }

    /**
     * @param URL Used to specify external domains that should also be crawled if they are linked-to
     *     by the seed url. These whitelisted urls are added to the configuration object.
     */
    public void alsoCrawl(String url) {
      config.alsoCrawl(url);
    }

    public void alsoCrawl(URL url) {
      config.alsoCrawl(url);
    }
    /**
     * @param configuration a custom {@link BrowserConfiguration}. The default is a single {@link
     *     BrowserType#firefox} browser.
     */
    public CrawljaxConfigurationBuilder setBrowserConfig(BrowserConfiguration configuration) {
      Preconditions.checkNotNull(configuration);
      config.browserConfig = configuration;
      return this;
    }

    public CrawljaxConfiguration build() {
      config.plugins = new Plugins(pluginBuilder.build());
      config.crawlRules = crawlRules.build();
      return config;
    }
  }

  /**
   * @param url The url you want to setup a configuration for
   * @return The builder to configure the crawler.
   */
  public static CrawljaxConfigurationBuilder builderFor(URL url) {
    Preconditions.checkNotNull(url, "URL was null");
    return new CrawljaxConfigurationBuilder(url);
  }

  /**
   * @param url The url you want to setup a configuration for
   * @return The builder to configure the crawler.
   */
  public static CrawljaxConfigurationBuilder builderFor(String url) {
    try {
      return new CrawljaxConfigurationBuilder(new URL(url));
    } catch (MalformedURLException e) {
      throw new CrawljaxException("Could not read that URL", e);
    }
  }

  private URL url;
  private Deque<URL> whitelist = new ArrayDeque<URL>();

  private BrowserConfiguration browserConfig = new BrowserConfiguration(BrowserType.firefox);
  private Plugins plugins;
  private ProxyConfiguration proxyConfiguration = ProxyConfiguration.noProxy();

  private CrawlRules crawlRules;

  private int maximumStates = 0;
  private long maximumRuntime = TimeUnit.HOURS.toMillis(1);;
  private int maximumDepth = 2;

  private CrawljaxConfiguration() {}

  public URL getUrl() {
    return url;
  }

  public BrowserConfiguration getBrowserConfig() {
    return browserConfig;
  }

  public Plugins getPlugins() {
    return plugins;
  }

  public ProxyConfiguration getProxyConfiguration() {
    return proxyConfiguration;
  }

  public CrawlRules getCrawlRules() {
    return crawlRules;
  }

  public void alsoCrawl(String url) {
    try {
      this.alsoCrawl(new URL(UrlUtils.getBaseUrl(url)));
    } catch (MalformedURLException e) {
      throw new CrawljaxException("Could not read that URL", e);
    } catch (StringIndexOutOfBoundsException e) {
      try {
        this.alsoCrawl(new URL(url));
      } catch (MalformedURLException e1) {
        throw new CrawljaxException("Could not read that URL", e);
      }
    }
  }

  public void alsoCrawl(URL url) {
    whitelist.add(url);
  }

  public int getMaximumStates() {
    return maximumStates;
  }

  public long getMaximumRuntime() {
    return maximumRuntime;
  }

  public int getMaximumDepth() {
    return maximumDepth;
  }

  public Deque<URL> getWhitelist() {
    return whitelist;
  }

  @Override
  public int hashCode() {
    return Objects.hashCode(
        url,
        browserConfig,
        plugins,
        proxyConfiguration,
        crawlRules,
        maximumStates,
        maximumRuntime,
        maximumDepth);
  }

  @Override
  public boolean equals(Object object) {
    if (object instanceof CrawljaxConfiguration) {
      CrawljaxConfiguration that = (CrawljaxConfiguration) object;
      return Objects.equal(this.url, that.url)
          && Objects.equal(this.browserConfig, that.browserConfig)
          && Objects.equal(this.plugins, that.plugins)
          && Objects.equal(this.proxyConfiguration, that.proxyConfiguration)
          && Objects.equal(this.crawlRules, that.crawlRules)
          && Objects.equal(this.maximumStates, that.maximumStates)
          && Objects.equal(this.maximumRuntime, that.maximumRuntime)
          && Objects.equal(this.maximumDepth, that.maximumDepth);
    }
    return false;
  }

  @Override
  public String toString() {
    return Objects.toStringHelper(this)
        .add("url", url)
        .add("browserConfig", browserConfig)
        .add("plugins", plugins)
        .add("proxyConfiguration", proxyConfiguration)
        .add("crawlRules", crawlRules)
        .add("maximumStates", maximumStates)
        .add("maximumRuntime", maximumRuntime)
        .add("maximumDepth", maximumDepth)
        .toString();
  }
}