public static serverObjects respond( final RequestHeader header, final serverObjects post, final serverSwitch env) { // return variable that accumulates replacements final Switchboard sb = (Switchboard) env; // clean up all search events SearchEventCache.cleanupEvents(true); sb.index.clearCaches(); // every time the ranking is changed we need to remove old orderings // inital values for AJAX Elements (without JavaScript) final serverObjects prop = new serverObjects(); prop.put("rejected", 0); Segment segment = sb.index; Fulltext fulltext = segment.fulltext(); String localSolr = "/solr/select?core=collection1&q=*:*&start=0&rows=3"; String remoteSolr = env.getConfig(SwitchboardConstants.FEDERATED_SERVICE_SOLR_INDEXING_URL, localSolr); if (!remoteSolr.endsWith("/")) remoteSolr = remoteSolr + "/"; prop.put( "urlpublictextSolrURL", fulltext.connectedLocalSolr() ? localSolr : remoteSolr + "collection1/select?&q=*:*&start=0&rows=3"); prop.putNum("urlpublictextSize", fulltext.collectionSize()); prop.putNum("urlpublictextSegmentCount", fulltext.getDefaultConnector().getSegmentCount()); prop.put( "webgraphSolrURL", fulltext.connectedLocalSolr() ? localSolr.replace("collection1", "webgraph") : remoteSolr + "webgraph/select?&q=*:*&start=0&rows=3"); prop.putNum("webgraphSize", fulltext.useWebgraph() ? fulltext.webgraphSize() : 0); prop.putNum( "webgraphSegmentCount", fulltext.useWebgraph() ? fulltext.getWebgraphConnector().getSegmentCount() : 0); prop.putNum("citationSize", segment.citationCount()); prop.putNum("citationSegmentCount", segment.citationSegmentCount()); prop.putNum("rwipublictextSize", segment.RWICount()); prop.putNum("rwipublictextSegmentCount", segment.RWISegmentCount()); prop.put("list", "0"); prop.put("loaderSize", 0); prop.put("loaderMax", 0); prop.put("list-loader", 0); int coreCrawlJobSize = sb.crawlQueues.coreCrawlJobSize(); int limitCrawlJobSize = sb.crawlQueues.limitCrawlJobSize(); int remoteTriggeredCrawlJobSize = sb.crawlQueues.remoteTriggeredCrawlJobSize(); int noloadCrawlJobSize = sb.crawlQueues.noloadCrawlJobSize(); int allsize = coreCrawlJobSize + limitCrawlJobSize + remoteTriggeredCrawlJobSize + noloadCrawlJobSize; prop.put("localCrawlSize", coreCrawlJobSize); prop.put("localCrawlState", ""); prop.put("limitCrawlSize", limitCrawlJobSize); prop.put("limitCrawlState", ""); prop.put("remoteCrawlSize", remoteTriggeredCrawlJobSize); prop.put("remoteCrawlState", ""); prop.put("noloadCrawlSize", noloadCrawlJobSize); prop.put("noloadCrawlState", ""); prop.put("terminate-button", allsize == 0 ? 0 : 1); prop.put("list-remote", 0); prop.put("forwardToCrawlStart", "0"); prop.put("info", "0"); boolean debug = (post != null && post.containsKey("debug")); if (post != null) { String c = post.toString(); if (c.length() < 1000) ConcurrentLog.info("Crawl Start", c); } if (post != null && post.containsKey("queues_terminate_all")) { // terminate crawls individually sb.crawlQueues.noticeURL.clear(); for (final byte[] h : sb.crawler.getActive()) { CrawlProfile p = sb.crawler.getActive(h); if (CrawlSwitchboard.DEFAULT_PROFILES.contains(p.name())) continue; if (p != null) sb.crawler.putPassive(h, p); sb.crawler.removeActive(h); sb.crawler.removePassive(h); try { sb.crawlQueues.noticeURL.removeByProfileHandle(p.handle(), 10000); } catch (SpaceExceededException e) { } } // clear stacks for (StackType stackType : StackType.values()) sb.crawlQueues.noticeURL.clear(stackType); try { sb.cleanProfiles(); } catch (final InterruptedException e) { /* ignore this */ } // remove pause sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL); sb.setConfig(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL + "_isPaused_cause", ""); sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL); sb.setConfig(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL + "_isPaused_cause", ""); prop.put("terminate-button", 0); } if (post != null && post.containsKey("continue")) { // continue queue final String queue = post.get("continue", ""); if ("localcrawler".equals(queue)) { sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL); sb.setConfig(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL + "_isPaused_cause", ""); } else if ("remotecrawler".equals(queue)) { sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL); sb.setConfig(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL + "_isPaused_cause", ""); } } if (post != null && post.containsKey("pause")) { // pause queue final String queue = post.get("pause", ""); if ("localcrawler".equals(queue)) { sb.pauseCrawlJob( SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL, "user request in Crawler_p from " + header.refererHost()); } else if ("remotecrawler".equals(queue)) { sb.pauseCrawlJob( SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL, "user request in Crawler_p from " + header.refererHost()); } } String queuemessage = sb.getConfig(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL + "_isPaused_cause", ""); if (queuemessage.length() == 0) { prop.put("info-queue", 0); } else { prop.put("info-queue", 1); prop.putHTML("info-queue_message", "pause reason: " + queuemessage); } if (post != null && post.containsKey("terminate")) try { final String handle = post.get("handle", ""); // termination of a crawl: shift the crawl from active to passive final CrawlProfile p = sb.crawler.getActive(handle.getBytes()); if (p != null) sb.crawler.putPassive(handle.getBytes(), p); // delete all entries from the crawl queue that are deleted here sb.crawler.removeActive(handle.getBytes()); sb.crawler.removePassive(handle.getBytes()); sb.crawlQueues.noticeURL.removeByProfileHandle(handle, 10000); } catch (final SpaceExceededException e) { ConcurrentLog.logException(e); } if (post != null && post.containsKey("crawlingstart")) { // init crawl if (sb.peers == null) { prop.put("info", "3"); } else { // remove crawlingFileContent before we record the call String crawlingFileName = post.get("crawlingFile"); final File crawlingFile; if (crawlingFileName == null || crawlingFileName.isEmpty()) { crawlingFile = null; } else { if (crawlingFileName.startsWith("file://")) crawlingFileName = crawlingFileName.substring(7); crawlingFile = new File(crawlingFileName); } if (crawlingFile != null && crawlingFile.exists()) { post.remove("crawlingFile$file"); } // prepare some filter that are adjusted in case that this is wanted boolean storeHTCache = "on".equals(post.get("storeHTCache", "off")); String newcrawlingMustMatch = post.get("mustmatch", CrawlProfile.MATCH_ALL_STRING); String newcrawlingMustNotMatch = post.get("mustnotmatch", CrawlProfile.MATCH_NEVER_STRING); if (newcrawlingMustMatch.length() < 2) newcrawlingMustMatch = CrawlProfile .MATCH_ALL_STRING; // avoid that all urls are filtered out if bad value was // submitted boolean fullDomain = "domain".equals(post.get("range", "wide")); // special property in simple crawl start boolean subPath = "subpath".equals(post.get("range", "wide")); // special property in simple crawl start final boolean restrictedcrawl = fullDomain || subPath || !CrawlProfile.MATCH_ALL_STRING.equals(newcrawlingMustMatch); final boolean deleteage = restrictedcrawl && "age".equals(post.get("deleteold", "off")); Date deleteageDate = null; if (deleteage) { deleteageDate = timeParser( true, post.getInt("deleteIfOlderNumber", -1), post.get("deleteIfOlderUnit", "year")); // year, month, day, hour } final boolean deleteold = (deleteage && deleteageDate != null) || (restrictedcrawl && post.getBoolean("deleteold")); final String sitemapURLStr = post.get("sitemapURL", ""); String crawlingStart0 = post.get("crawlingURL", "").trim(); // the crawljob start url String[] rootURLs0 = crawlingStart0.indexOf('\n') > 0 || crawlingStart0.indexOf('\r') > 0 ? crawlingStart0.split("[\\r\\n]+") : crawlingStart0.split(Pattern.quote("|")); Set<DigestURL> rootURLs = new HashSet<DigestURL>(); String crawlName = ""; if (crawlingFile == null) for (String crawlingStart : rootURLs0) { if (crawlingStart == null || crawlingStart.length() == 0) continue; // add the prefix http:// if necessary int pos = crawlingStart.indexOf("://", 0); if (pos == -1) { if (crawlingStart.startsWith("ftp")) crawlingStart = "ftp://" + crawlingStart; else crawlingStart = "http://" + crawlingStart; } try { DigestURL crawlingStartURL = new DigestURL(crawlingStart); rootURLs.add(crawlingStartURL); crawlName += ((crawlingStartURL.getHost() == null) ? crawlingStartURL.toNormalform(true) : crawlingStartURL.getHost()) + ','; if (crawlingStartURL != null && (crawlingStartURL.isFile() || crawlingStartURL.isSMB())) storeHTCache = false; } catch (final MalformedURLException e) { ConcurrentLog.logException(e); } } else { crawlName = crawlingFile.getName(); } if (crawlName.endsWith(",")) crawlName = crawlName.substring(0, crawlName.length() - 1); if (crawlName.length() > 64) { crawlName = "crawl_for_" + rootURLs.size() + "_start_points_" + Integer.toHexString(crawlName.hashCode()); int p = crawlName.lastIndexOf(','); if (p >= 8) crawlName = crawlName.substring(0, p); } if (crawlName.length() == 0 && sitemapURLStr.length() > 0) crawlName = "sitemap loader for " + sitemapURLStr; // in case that a root url has a file protocol, then the site filter does not work, patch // that: if (fullDomain) { for (DigestURL u : rootURLs) if (u.isFile()) { fullDomain = false; subPath = true; break; } } // delete old robots entries for (DigestURL ru : rootURLs) { sb.robots.delete(ru); try { if (ru.getHost() != null) { // might be null for file:// Cache.delete(RobotsTxt.robotsURL(RobotsTxt.getHostPort(ru)).hash()); } } catch (IOException e) { } } try { sb.robots.clear(); } catch (IOException e) { } // to be safe: clear all. // set the crawl filter String ipMustMatch = post.get("ipMustmatch", CrawlProfile.MATCH_ALL_STRING); final String ipMustNotMatch = post.get("ipMustnotmatch", CrawlProfile.MATCH_NEVER_STRING); if (ipMustMatch.length() < 2) ipMustMatch = CrawlProfile.MATCH_ALL_STRING; final String countryMustMatch = post.getBoolean("countryMustMatchSwitch") ? post.get("countryMustMatchList", "") : ""; sb.setConfig("crawlingIPMustMatch", ipMustMatch); sb.setConfig("crawlingIPMustNotMatch", ipMustNotMatch); if (countryMustMatch.length() > 0) sb.setConfig("crawlingCountryMustMatch", countryMustMatch); String crawlerNoDepthLimitMatch = post.get("crawlingDepthExtension", CrawlProfile.MATCH_NEVER_STRING); final String indexUrlMustMatch = post.get("indexmustmatch", CrawlProfile.MATCH_ALL_STRING); final String indexUrlMustNotMatch = post.get("indexmustnotmatch", CrawlProfile.MATCH_NEVER_STRING); final String indexContentMustMatch = post.get("indexcontentmustmatch", CrawlProfile.MATCH_ALL_STRING); final String indexContentMustNotMatch = post.get("indexcontentmustnotmatch", CrawlProfile.MATCH_NEVER_STRING); final boolean crawlOrder = post.get("crawlOrder", "off").equals("on"); env.setConfig("crawlOrder", crawlOrder); if (crawlOrder) crawlerNoDepthLimitMatch = CrawlProfile.MATCH_NEVER_STRING; // without limitation the crawl order does not work int newcrawlingdepth = post.getInt("crawlingDepth", 8); env.setConfig("crawlingDepth", Integer.toString(newcrawlingdepth)); if ((crawlOrder) && (newcrawlingdepth > 8)) newcrawlingdepth = 8; boolean directDocByURL = "on" .equals( post.get( "directDocByURL", "off")); // catch also all linked media documents without loading them env.setConfig("crawlingDirectDocByURL", directDocByURL); final String collection = post.get("collection", "user"); env.setConfig("collection", collection); // recrawl final String recrawl = post.get("recrawl", "nodoubles"); // nodoubles, reload, scheduler Date crawlingIfOlder = null; if ("reload".equals(recrawl)) { crawlingIfOlder = timeParser( true, post.getInt("reloadIfOlderNumber", -1), post.get("reloadIfOlderUnit", "year")); // year, month, day, hour } env.setConfig( "crawlingIfOlder", crawlingIfOlder == null ? Long.MAX_VALUE : crawlingIfOlder.getTime()); // store this call as api call sb.tables.recordAPICall( post, "Crawler_p.html", WorkTables.TABLE_API_TYPE_CRAWLER, "crawl start for " + ((rootURLs.size() == 0) ? post.get("crawlingFile", "") : rootURLs.iterator().next().toNormalform(true))); final boolean crawlingDomMaxCheck = "on".equals(post.get("crawlingDomMaxCheck", "off")); final int crawlingDomMaxPages = (crawlingDomMaxCheck) ? post.getInt("crawlingDomMaxPages", -1) : -1; env.setConfig("crawlingDomMaxPages", Integer.toString(crawlingDomMaxPages)); boolean followFrames = "on".equals(post.get("followFrames", "false")); env.setConfig("followFrames", followFrames); boolean obeyHtmlRobotsNoindex = "on".equals(post.get("obeyHtmlRobotsNoindex", "false")); env.setConfig("obeyHtmlRobotsNoindex", obeyHtmlRobotsNoindex); boolean obeyHtmlRobotsNofollow = "on".equals(post.get("obeyHtmlRobotsNofollow", "false")); env.setConfig("obeyHtmlRobotsNofollow", obeyHtmlRobotsNofollow); final boolean indexText = "on".equals(post.get("indexText", "false")); env.setConfig("indexText", indexText); final boolean indexMedia = "on".equals(post.get("indexMedia", "false")); env.setConfig("indexMedia", indexMedia); env.setConfig("storeHTCache", storeHTCache); String defaultAgentName = sb.isIntranetMode() ? ClientIdentification.yacyIntranetCrawlerAgentName : ClientIdentification.yacyInternetCrawlerAgentName; String agentName = post.get("agentName", defaultAgentName); ClientIdentification.Agent agent = ClientIdentification.getAgent(agentName); if (agent == null) agent = ClientIdentification.getAgent(defaultAgentName); CacheStrategy cachePolicy = CacheStrategy.parse(post.get("cachePolicy", "iffresh")); if (cachePolicy == null) cachePolicy = CacheStrategy.IFFRESH; String crawlingMode = post.get("crawlingMode", "url"); if ("file".equals(crawlingMode) && post.containsKey("crawlingFile")) { newcrawlingMustNotMatch = CrawlProfile.MATCH_NEVER_STRING; directDocByURL = false; } if ("sitemap".equals(crawlingMode)) { newcrawlingMustMatch = CrawlProfile.MATCH_ALL_STRING; newcrawlingMustNotMatch = CrawlProfile.MATCH_NEVER_STRING; newcrawlingdepth = 0; directDocByURL = false; } if ("sitelist".equals(crawlingMode)) { newcrawlingMustNotMatch = CrawlProfile.MATCH_NEVER_STRING; Set<DigestURL> newRootURLs = new HashSet<DigestURL>(); for (DigestURL sitelistURL : rootURLs) { // download document Document scraper; try { scraper = sb.loader.loadDocument( sitelistURL, CacheStrategy.IFFRESH, BlacklistType.CRAWLER, agent); // get links and generate filter for (DigestURL u : scraper.getHyperlinks().keySet()) { newRootURLs.add(u); } } catch (final IOException e) { ConcurrentLog.logException(e); } } rootURLs = newRootURLs; crawlingMode = "url"; if ((fullDomain || subPath) && newcrawlingdepth > 0) newcrawlingMustMatch = CrawlProfile .MATCH_ALL_STRING; // to prevent that there is a restriction on the original // urls } // delete all error urls for that domain // and all urls for that host from the crawl queue Set<String> hosthashes = new HashSet<String>(); boolean anysmbftporpdf = false; for (DigestURL u : rootURLs) { sb.index.fulltext().remove(u.hash()); hosthashes.add(u.hosthash()); if ("smb.ftp".indexOf(u.getProtocol()) >= 0 || "pdf".equals(MultiProtocolURL.getFileExtension(u.getFileName()))) anysmbftporpdf = true; } sb.crawlQueues.removeHosts(hosthashes); sb.index.fulltext().commit(true); boolean crawlingQ = anysmbftporpdf || "on".equals(post.get("crawlingQ", "off")) || "sitemap".equals(crawlingMode); env.setConfig("crawlingQ", crawlingQ); // compute mustmatch filter according to rootURLs if ((fullDomain || subPath) && newcrawlingdepth > 0) { String siteFilter = ".*"; if (fullDomain) { siteFilter = CrawlProfile.siteFilter(rootURLs); if (deleteold) { sb.index.fulltext().deleteStaleDomainHashes(hosthashes, deleteageDate); } } else if (subPath) { siteFilter = CrawlProfile.subpathFilter(rootURLs); if (deleteold) { for (DigestURL u : rootURLs) { String basepath = u.toNormalform(true); if (!basepath.endsWith("/")) { int p = basepath.lastIndexOf("/"); if (p > 0) basepath = basepath.substring(0, p + 1); } int count = sb.index.fulltext().remove(basepath, deleteageDate); if (count > 0) ConcurrentLog.info( "Crawler_p", "deleted " + count + " documents for host " + u.getHost()); } } } if (CrawlProfile.MATCH_ALL_STRING.equals(newcrawlingMustMatch)) { newcrawlingMustMatch = siteFilter; } else if (!CrawlProfile.MATCH_ALL_STRING.equals(siteFilter)) { // combine both newcrawlingMustMatch = "(" + newcrawlingMustMatch + ")|(" + siteFilter + ")"; } } // check if the crawl filter works correctly try { Pattern mmp = Pattern.compile(newcrawlingMustMatch); for (DigestURL u : rootURLs) { assert mmp.matcher(u.toNormalform(true)).matches() : "pattern " + mmp.toString() + " does not match url " + u.toNormalform(true); } } catch (final PatternSyntaxException e) { prop.put("info", "4"); // crawlfilter does not match url prop.putHTML("info_newcrawlingfilter", newcrawlingMustMatch); prop.putHTML("info_error", e.getMessage()); } boolean hasCrawlstartDataOK = !crawlName.isEmpty(); if (hasCrawlstartDataOK) { // check crawlurl was given in sitecrawl if ("url".equals(crawlingMode) && rootURLs.size() == 0) hasCrawlstartDataOK = false; } String snapshotsMaxDepthString = post.get("snapshotsMaxDepth", "-1"); int snapshotsMaxDepth = Integer.parseInt(snapshotsMaxDepthString); boolean snapshotsLoadImage = post.getBoolean("snapshotsLoadImage"); boolean snapshotsReplaceOld = post.getBoolean("snapshotsReplaceOld"); String snapshotsMustnotmatch = post.get("snapshotsMustnotmatch", ""); // get vocabulary scraper info JSONObject vocabulary_scraper = new JSONObject(); // key = vocabulary_name, value = properties with key = type (i.e. // 'class') and value = keyword in context for (String key : post.keySet()) { if (key.startsWith("vocabulary_")) { if (key.endsWith("_class")) { String vocabulary = key.substring(11, key.length() - 6); String value = post.get(key); if (value != null && value.length() > 0) { JSONObject props; try { props = vocabulary_scraper.getJSONObject(vocabulary); } catch (JSONException e) { props = new JSONObject(); vocabulary_scraper.put(vocabulary, props); } props.put("class", value); } } } } int timezoneOffset = post.getInt("timezoneOffset", 0); // in case that we crawl from a file, load that file and re-compute mustmatch pattern List<AnchorURL> hyperlinks_from_file = null; if ("file".equals(crawlingMode) && post.containsKey("crawlingFile") && crawlingFile != null) { final String crawlingFileContent = post.get("crawlingFile$file", ""); try { // check if the crawl filter works correctly final ContentScraper scraper = new ContentScraper( new DigestURL(crawlingFile), 10000000, new VocabularyScraper(), timezoneOffset); final Writer writer = new TransformerWriter(null, null, scraper, null, false); if (crawlingFile != null && crawlingFile.exists()) { FileUtils.copy(new FileInputStream(crawlingFile), writer); } else { FileUtils.copy(crawlingFileContent, writer); } writer.close(); // get links and generate filter hyperlinks_from_file = scraper.getAnchors(); if (newcrawlingdepth > 0) { if (fullDomain) { newcrawlingMustMatch = CrawlProfile.siteFilter(hyperlinks_from_file); } else if (subPath) { newcrawlingMustMatch = CrawlProfile.subpathFilter(hyperlinks_from_file); } } } catch (final Exception e) { // mist prop.put("info", "7"); // Error with file prop.putHTML("info_crawlingStart", crawlingFileName); prop.putHTML("info_error", e.getMessage()); ConcurrentLog.logException(e); } sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL); } // prepare a new crawling profile final CrawlProfile profile; byte[] handle; if (hasCrawlstartDataOK) { profile = new CrawlProfile( crawlName, newcrawlingMustMatch, newcrawlingMustNotMatch, ipMustMatch, ipMustNotMatch, countryMustMatch, crawlerNoDepthLimitMatch, indexUrlMustMatch, indexUrlMustNotMatch, indexContentMustMatch, indexContentMustNotMatch, newcrawlingdepth, directDocByURL, crawlingIfOlder, crawlingDomMaxPages, crawlingQ, followFrames, obeyHtmlRobotsNoindex, obeyHtmlRobotsNofollow, indexText, indexMedia, storeHTCache, crawlOrder, snapshotsMaxDepth, snapshotsLoadImage, snapshotsReplaceOld, snapshotsMustnotmatch, cachePolicy, collection, agentName, new VocabularyScraper(vocabulary_scraper), timezoneOffset); handle = ASCII.getBytes(profile.handle()); // before we fire up a new crawl, we make sure that another crawl with the same name is // not running sb.crawler.removeActive(handle); sb.crawler.removePassive(handle); try { sb.crawlQueues.noticeURL.removeByProfileHandle(profile.handle(), 10000); } catch (final SpaceExceededException e1) { } } else { profile = null; handle = null; } // start the crawl if ("url".equals(crawlingMode)) { if (rootURLs.size() == 0) { prop.put("info", "5"); // Crawling failed prop.putHTML("info_crawlingURL", "(no url given)"); prop.putHTML("info_reasonString", "you must submit at least one crawl url"); } else { // stack requests sb.crawler.putActive(handle, profile); final Set<DigestURL> successurls = new HashSet<DigestURL>(); final Map<DigestURL, String> failurls = new HashMap<DigestURL, String>(); sb.stackURLs(rootURLs, profile, successurls, failurls); if (failurls.size() == 0) { // liftoff! prop.put("info", "8"); prop.putHTML("info_crawlingURL", post.get("crawlingURL")); // generate a YaCyNews if the global flag was set if (!sb.isRobinsonMode() && crawlOrder) { final Map<String, String> m = new HashMap<String, String>(profile); // must be cloned m.remove("specificDepth"); m.remove("indexText"); m.remove("indexMedia"); m.remove("remoteIndexing"); m.remove("xsstopw"); m.remove("xpstopw"); m.remove("xdstopw"); m.remove("storeTXCache"); m.remove("storeHTCache"); m.remove("generalFilter"); m.remove("specificFilter"); m.put("intention", post.get("intention", "").replace(',', '/')); sb.peers.newsPool.publishMyNews( sb.peers.mySeed(), NewsPool.CATEGORY_CRAWL_START, m); } } else { StringBuilder fr = new StringBuilder(); for (Map.Entry<DigestURL, String> failure : failurls.entrySet()) { sb.crawlQueues.errorURL.push( failure.getKey(), 0, null, FailCategory.FINAL_LOAD_CONTEXT, failure.getValue(), -1); fr.append(failure.getValue()).append('/'); } prop.put("info", "5"); // Crawling failed prop.putHTML("info_crawlingURL", (post.get("crawlingURL"))); prop.putHTML("info_reasonString", fr.toString()); } if (successurls.size() > 0) sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL); } } else if ("sitemap".equals(crawlingMode)) { try { final DigestURL sitemapURL = sitemapURLStr.indexOf("//") > 0 ? new DigestURL(sitemapURLStr) : new DigestURL( rootURLs.iterator().next(), sitemapURLStr); // fix for relative paths which should not exist but are // used anyway sb.crawler.putActive(handle, profile); final SitemapImporter importer = new SitemapImporter(sb, sitemapURL, profile); importer.start(); sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL); } catch (final Exception e) { // mist prop.put("info", "6"); // Error with url prop.putHTML("info_crawlingStart", sitemapURLStr); prop.putHTML("info_error", e.getMessage()); ConcurrentLog.logException(e); } } else if ("file".equals(crawlingMode)) { if (post.containsKey("crawlingFile") && crawlingFile != null && hyperlinks_from_file != null) { try { if (newcrawlingdepth > 0) { if (fullDomain) { newcrawlingMustMatch = CrawlProfile.siteFilter(hyperlinks_from_file); } else if (subPath) { newcrawlingMustMatch = CrawlProfile.subpathFilter(hyperlinks_from_file); } } sb.crawler.putActive(handle, profile); sb.crawlStacker.enqueueEntriesAsynchronous( sb.peers.mySeed().hash.getBytes(), profile.handle(), hyperlinks_from_file, profile.timezoneOffset()); } catch (final PatternSyntaxException e) { prop.put("info", "4"); // crawlfilter does not match url prop.putHTML("info_newcrawlingfilter", newcrawlingMustMatch); prop.putHTML("info_error", e.getMessage()); } catch (final Exception e) { // mist prop.put("info", "7"); // Error with file prop.putHTML("info_crawlingStart", crawlingFileName); prop.putHTML("info_error", e.getMessage()); ConcurrentLog.logException(e); } sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL); } } } } /* * <input id="customPPM" name="customPPM" type="number" min="10" max="30000" style="width:46px" value="#[customPPMdefault]#" />PPM <input id="latencyFactor" name="latencyFactor" type="number" min="0.1" max="3.0" step="0.1" style="width:32px" value="#[latencyFactorDefault]#" />LF <input id="MaxSameHostInQueue" name="MaxSameHostInQueue" type="number" min="1" max="30" style="width:32px" value="#[MaxSameHostInQueueDefault]#" />MH <input type="submit" name="crawlingPerformance" value="set" /> (<a href="/Crawler_p.html?crawlingPerformance=minimum">min</a>/<a href="/Crawler_p.html?crawlingPerformance=maximum">max</a>) </td> */ if (post != null && post.containsKey("crawlingPerformance")) { final String crawlingPerformance = post.get("crawlingPerformance", "custom"); final long LCbusySleep1 = sb.getConfigLong(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP, 1000L); int wantedPPM = (LCbusySleep1 == 0) ? 30000 : (int) (60000L / LCbusySleep1); try { wantedPPM = post.getInt("customPPM", wantedPPM); } catch (final NumberFormatException e) { } if ("minimum".equals(crawlingPerformance.toLowerCase())) wantedPPM = 10; if ("maximum".equals(crawlingPerformance.toLowerCase())) wantedPPM = 30000; int wPPM = wantedPPM; if (wPPM <= 0) { wPPM = 1; } if (wPPM >= 30000) { wPPM = 30000; } final int newBusySleep = 60000 / wPPM; // for wantedPPM = 10: 6000; for wantedPPM = 1000: 60 final float loadprereq = wantedPPM <= 10 ? 1.0f : wantedPPM <= 100 ? 2.0f : wantedPPM >= 1000 ? 8.0f : 3.0f; BusyThread thread; thread = sb.getThread(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL); if (thread != null) { sb.setConfig( SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP, thread.setBusySleep(newBusySleep)); sb.setConfig( SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_LOADPREREQ, thread.setLoadPreReqisite(loadprereq)); thread.setLoadPreReqisite(loadprereq); thread.setIdleSleep(2000); } float latencyFactor = post.getFloat("latencyFactor", 0.5f); int MaxSameHostInQueue = post.getInt("MaxSameHostInQueue", 20); env.setConfig(SwitchboardConstants.CRAWLER_LATENCY_FACTOR, latencyFactor); env.setConfig(SwitchboardConstants.CRAWLER_MAX_SAME_HOST_IN_QUEUE, MaxSameHostInQueue); } // performance settings final long LCbusySleep = env.getConfigLong(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP, 1000L); final int LCppm = (int) (60000L / Math.max(1, LCbusySleep)); prop.put("customPPMdefault", Integer.toString(LCppm)); prop.put( "latencyFactorDefault", env.getConfigFloat(SwitchboardConstants.CRAWLER_LATENCY_FACTOR, 0.5f)); prop.put( "MaxSameHostInQueueDefault", env.getConfigInt(SwitchboardConstants.CRAWLER_MAX_SAME_HOST_IN_QUEUE, 20)); // generate crawl profile table int count = 0; boolean dark = true; final int domlistlength = (post == null) ? 160 : post.getInt("domlistlength", 160); CrawlProfile profile; // put active crawls into list String hosts = ""; for (final byte[] h : sb.crawler.getActive()) { profile = sb.crawler.getActive(h); if (CrawlSwitchboard.DEFAULT_PROFILES.contains(profile.name())) continue; profile.putProfileEntry("crawlProfilesShow_list_", prop, true, dark, count, domlistlength); prop.put("crawlProfilesShow_list_" + count + "_debug", debug ? 1 : 0); if (debug) { RowHandleSet urlhashes = sb.crawler.getURLHashes(h); prop.put( "crawlProfilesShow_list_" + count + "_debug_count", urlhashes == null ? "unknown" : Integer.toString(urlhashes.size())); } hosts = hosts + "," + profile.name(); dark = !dark; count++; } prop.put("crawlProfilesShow_debug", debug ? 1 : 0); prop.put("crawlProfilesShow_list", count); prop.put("crawlProfilesShow_count", count); prop.put("crawlProfilesShow", count == 0 ? 0 : 1); prop.put("crawlProfilesShow_linkstructure", 0); if (post != null) { // handle config button to display graphic if (post.get("hidewebstructuregraph") != null) sb.setConfig(SwitchboardConstants.DECORATION_GRAFICS_LINKSTRUCTURE, false); if (post.get("showwebstructuregraph") != null) sb.setConfig(SwitchboardConstants.DECORATION_GRAFICS_LINKSTRUCTURE, true); } if (count > 0 && sb.getConfigBool(SwitchboardConstants.DECORATION_GRAFICS_LINKSTRUCTURE, true)) { // collect the host names for 'wide' crawls which can be visualized boolean showLinkstructure = hosts.length() > 0 && !hosts.contains("file:"); if (showLinkstructure) { StringBuilder q = new StringBuilder(); hosts = hosts.substring(1); q.append(CollectionSchema.host_s.getSolrFieldName()) .append(':') .append(hosts) .append(" OR ") .append(CollectionSchema.host_s.getSolrFieldName()) .append(':') .append("www.") .append(hosts); try { prop.put( "crawlProfilesShow_linkstructure", count == 1 && sb.index.fulltext().getDefaultConnector().getCountByQuery(q.toString()) > 0 ? 1 : 2); prop.put("crawlProfilesShow_linkstructure_hosts", hosts); } catch (IOException e) { } } } // return rewrite properties return prop; }
public static serverObjects respond( final RequestHeader header, final serverObjects post, final serverSwitch env) { // return variable that accumulates replacements final serverObjects prop = new serverObjects(); final Switchboard sb = (Switchboard) env; // set if this should be visible if (yacyBuildProperties.isPkgManager()) { prop.put("candeploy", "2"); return prop; } else if (OS.canExecUnix || OS.isWindows) { // we can deploy a new system with (i.e.) // cd DATA/RELEASE;tar xfz $1;cp -Rf yacy/* ../../;rm -Rf yacy prop.put("candeploy", "1"); } else { prop.put("candeploy", "0"); } prop.put("candeploy_configCommit", "0"); prop.put("candeploy_autoUpdate", "0"); prop.put("candeploy_downloadsAvailable", "0"); if (post != null) { // check if update is supposed to be installed and a release is defined if (post.containsKey("update") && !post.get("releaseinstall", "").isEmpty()) { prop.put("forwardToSteering", "1"); prop.putHTML("forwardToSteering_release", post.get("releaseinstall", "")); prop.put("deploys", "1"); prop.put("candeploy", "2"); // display nothing else return prop; } if (post.containsKey("downloadRelease")) { // download a release final String release = post.get("releasedownload", ""); if (!release.isEmpty()) { try { yacyRelease versionToDownload = new yacyRelease(new DigestURI(release)); // replace this version with version which contains public key final yacyRelease.DevAndMainVersions allReleases = yacyRelease.allReleases(false, false); final Set<yacyRelease> mostReleases = versionToDownload.isMainRelease() ? allReleases.main : allReleases.dev; for (final yacyRelease rel : mostReleases) { if (rel.equals(versionToDownload)) { versionToDownload = rel; break; } } versionToDownload.downloadRelease(); } catch (final IOException e) { // TODO Auto-generated catch block Log.logException(e); } } } if (post.containsKey("checkRelease")) { yacyRelease.allReleases(true, false); } if (post.containsKey("deleteRelease")) { final String release = post.get("releaseinstall", ""); if (!release.isEmpty()) { try { FileUtils.deletedelete(new File(sb.releasePath, release)); FileUtils.deletedelete(new File(sb.releasePath, release + ".sig")); } catch (final NullPointerException e) { sb.getLog() .logSevere( "AUTO-UPDATE: could not delete release " + release + ": " + e.getMessage()); } } } if (post.containsKey("autoUpdate")) { final yacyRelease updateVersion = yacyRelease.rulebasedUpdateInfo(true); if (updateVersion == null) { prop.put("candeploy_autoUpdate", "2"); // no more recent release found } else { // there is a version that is more recent. Load it and re-start with it sb.getLog() .logInfo("AUTO-UPDATE: downloading more recent release " + updateVersion.getUrl()); final File downloaded = updateVersion.downloadRelease(); prop.putHTML("candeploy_autoUpdate_downloadedRelease", updateVersion.getName()); final boolean devenvironment = new File(sb.getAppPath(), ".svn").exists(); if (devenvironment) { sb.getLog() .logInfo("AUTO-UPDATE: omitting update because this is a development environment"); prop.put("candeploy_autoUpdate", "3"); } else if ((downloaded == null) || (!downloaded.exists()) || (downloaded.length() == 0)) { sb.getLog() .logInfo( "AUTO-UPDATE: omitting update because download failed (file cannot be found, is too small or signature was bad)"); prop.put("candeploy_autoUpdate", "4"); } else { yacyRelease.deployRelease(downloaded); sb.terminate(10, "manual release update to " + downloaded.getName()); sb.getLog().logInfo("AUTO-UPDATE: deploy and restart initiated"); prop.put("candeploy_autoUpdate", "1"); } } } if (post.containsKey("configSubmit")) { prop.put("candeploy_configCommit", "1"); sb.setConfig( "update.process", ("manual".equals(post.get("updateMode", "manual"))) ? "manual" : "auto"); sb.setConfig("update.cycle", Math.max(12, post.getLong("cycle", 168))); sb.setConfig("update.blacklist", post.get("blacklist", "")); sb.setConfig( "update.concept", ("any".equals(post.get("releaseType", "any"))) ? "any" : "main"); sb.setConfig( "update.onlySignedFiles", (post.getBoolean("onlySignedFiles", false)) ? "1" : "0"); } } // version information final String versionstring = yacyBuildProperties.getVersion() + "/" + yacyBuildProperties.getSVNRevision(); prop.putHTML("candeploy_versionpp", versionstring); final boolean devenvironment = new File(sb.getAppPath(), ".svn").exists(); float thisVersion = Float.parseFloat(yacyBuildProperties.getVersion()); // cut off the SVN Rev in the Version try { thisVersion = (float) (Math.round(thisVersion * 1000.0) / 1000.0); } catch (final NumberFormatException e) { } // list downloaded releases final File[] downloadedFiles = sb.releasePath.listFiles(); // list can be null if RELEASE directory has been deleted manually final int downloadedFilesNum = (downloadedFiles == null) ? 0 : downloadedFiles.length; prop.put( "candeploy_deployenabled", (downloadedFilesNum == 0) ? "0" : ((devenvironment) ? "1" : "2")); // prevent that a developer-version is over-deployed final NavigableSet<yacyRelease> downloadedReleases = new TreeSet<yacyRelease>(); for (final File downloaded : downloadedFiles) { try { final yacyRelease release = new yacyRelease(downloaded); downloadedReleases.add(release); } catch (final RuntimeException e) { // not a valid release // can be also a restart- or deploy-file final File invalid = downloaded; if (!(invalid.getName().endsWith(".bat") || invalid.getName().endsWith(".sh") || invalid .getName() .endsWith(".sig"))) { // Windows & Linux don't like deleted scripts while execution! invalid.deleteOnExit(); } } } // latest downloaded release final yacyVersion dflt = (downloadedReleases.isEmpty()) ? null : downloadedReleases.last(); // check if there are any downloaded releases and if there are enable the update buttons prop.put("candeploy_downloadsAvailable", (downloadedReleases.isEmpty()) ? "0" : "1"); prop.put( "candeploy_deployenabled_buttonsActive", (downloadedReleases.isEmpty() || devenvironment) ? "0" : "1"); int relcount = 0; for (final yacyRelease release : downloadedReleases) { prop.put( "candeploy_downloadedreleases_" + relcount + "_name", ((release.isMainRelease()) ? "main" : "dev") + " " + release.getReleaseNr() + "/" + release.getSvn()); prop.put( "candeploy_downloadedreleases_" + relcount + "_signature", (release.getSignatureFile().exists() ? "1" : "0")); prop.putHTML("candeploy_downloadedreleases_" + relcount + "_file", release.getName()); prop.put( "candeploy_downloadedreleases_" + relcount + "_selected", (release == dflt) ? "1" : "0"); relcount++; } prop.put("candeploy_downloadedreleases", relcount); // list remotely available releases final yacyRelease.DevAndMainVersions releasess = yacyRelease.allReleases(false, false); relcount = 0; final ArrayList<yacyRelease> rlist = new ArrayList<yacyRelease>(); final Set<yacyRelease> remoteDevReleases = releasess.dev; remoteDevReleases.removeAll(downloadedReleases); for (final yacyRelease release : remoteDevReleases) { rlist.add(release); } final Set<yacyRelease> remoteMainReleases = releasess.main; remoteMainReleases.removeAll(downloadedReleases); for (final yacyRelease release : remoteMainReleases) { rlist.add(release); } yacyRelease release; for (int i = rlist.size() - 1; i >= 0; i--) { release = rlist.get(i); prop.put( "candeploy_availreleases_" + relcount + "_name", ((release.isMainRelease()) ? "main" : "dev") + " " + release.getReleaseNr() + "/" + release.getSvn()); prop.put("candeploy_availreleases_" + relcount + "_url", release.getUrl().toString()); prop.put( "candeploy_availreleases_" + relcount + "_signatures", (release.getPublicKey() != null ? "1" : "0")); prop.put("candeploy_availreleases_" + relcount + "_selected", (relcount == 0) ? "1" : "0"); relcount++; } prop.put("candeploy_availreleases", relcount); // properties for automated system update prop.put( "candeploy_manualUpdateChecked", ("manual".equals(sb.getConfig("update.process", "manual"))) ? "1" : "0"); prop.put( "candeploy_autoUpdateChecked", ("auto".equals(sb.getConfig("update.process", "manual"))) ? "1" : "0"); prop.put("candeploy_cycle", sb.getConfigLong("update.cycle", 168)); prop.putHTML("candeploy_blacklist", sb.getConfig("update.blacklist", "")); prop.put( "candeploy_releaseTypeMainChecked", ("any".equals(sb.getConfig("update.concept", "any"))) ? "0" : "1"); prop.put( "candeploy_releaseTypeAnyChecked", ("any".equals(sb.getConfig("update.concept", "any"))) ? "1" : "0"); prop.put("candeploy_lastlookup", (sb.getConfigLong("update.time.lookup", 0) == 0) ? "0" : "1"); prop.put( "candeploy_lastlookup_time", new Date(sb.getConfigLong("update.time.lookup", 0)).toString()); prop.put( "candeploy_lastdownload", (sb.getConfigLong("update.time.download", 0) == 0) ? "0" : "1"); prop.put( "candeploy_lastdownload_time", new Date(sb.getConfigLong("update.time.download", 0)).toString()); prop.put("candeploy_lastdeploy", (sb.getConfigLong("update.time.deploy", 0) == 0) ? "0" : "1"); prop.put( "candeploy_lastdeploy_time", new Date(sb.getConfigLong("update.time.deploy", 0)).toString()); prop.put( "candeploy_onlySignedFiles", ("1".equals(sb.getConfig("update.onlySignedFiles", "1"))) ? "1" : "0"); /* if ((adminaccess) && (yacyVersion.latestRelease >= (thisVersion+0.01))) { // only new Versions(not new SVN) if ((yacyVersion.latestMainRelease != null) || (yacyVersion.latestDevRelease != null)) { prop.put("hintVersionDownload", 1); } else if ((post != null) && (post.containsKey("aquirerelease"))) { yacyVersion.aquireLatestReleaseInfo(); prop.put("hintVersionDownload", 1); } else { prop.put("hintVersionAvailable", 1); } } prop.put("hintVersionAvailable", 1); // for testing prop.putASIS("hintVersionDownload_versionResMain", (yacyVersion.latestMainRelease == null) ? "-" : yacyVersion.latestMainRelease.toAnchor()); prop.putASIS("hintVersionDownload_versionResDev", (yacyVersion.latestDevRelease == null) ? "-" : yacyVersion.latestDevRelease.toAnchor()); prop.put("hintVersionAvailable_latestVersion", Float.toString(yacyVersion.latestRelease)); */ return prop; }