Esempio n. 1
0
 /**
  * clear the RAM and BLOB part, deletes everything in the cell
  *
  * @throws IOException
  */
 @Override
 public synchronized void clear() throws IOException {
   this.countCache.clear();
   this.removeDelayedURLs.clear();
   this.ram.clear();
   this.array.clear();
   if (Switchboard.getSwitchboard() != null
       && Switchboard.getSwitchboard().peers != null
       && Switchboard.getSwitchboard().peers.mySeed() != null)
     Switchboard.getSwitchboard().peers.mySeed().resetCounters();
 }
Esempio n. 2
0
  public static serverObjects respond(
      @SuppressWarnings("unused") final RequestHeader header,
      final serverObjects post,
      final serverSwitch env) {
    // return variable that accumulates replacements
    final Switchboard sb = (Switchboard) env;
    final serverObjects prop = new serverObjects();
    final Segment segment = sb.index;
    final SolrConnector connector = segment.fulltext().getDefaultConnector();

    // avoid UNRESOLVED PATTERN
    prop.put("url", "");
    prop.put("citations", 0);
    prop.put("sentences", 0);

    DigestURL uri = null;
    String url = "";
    String hash = "";
    int ch = 10;
    boolean filter = false; // show cited sentences only
    if (post != null) {
      if (post.containsKey("url")) {
        url = post.get("url");
        if (!url.startsWith("http://")
            && !url.startsWith("https://")
            && !url.startsWith("ftp://")
            && !url.startsWith("smb://")
            && !url.startsWith("file://")) {
          url = "http://" + url;
        }
      }
      if (post.containsKey("hash")) {
        hash = post.get("hash");
      }
      if (post.containsKey("ch")) {
        ch = post.getInt("ch", ch);
      }
      filter = post.getBoolean("filter");
    }
    prop.put("filter", filter);
    if (url.length() > 0) {
      try {
        uri = new DigestURL(url, null);
        hash = ASCII.String(uri.hash());
      } catch (final MalformedURLException e) {
      }
    }
    if (uri == null && hash.length() > 0) {
      try {
        uri = sb.getURL(ASCII.getBytes(hash));
        if (uri == null) {
          connector.commit(true); // try again, that url can be fresh
          uri = sb.getURL(ASCII.getBytes(hash));
        }
      } catch (IOException e) {
        ConcurrentLog.logException(e);
      }
    }
    if (uri == null) return prop; // no proper url addressed
    url = uri.toNormalform(true);
    prop.put("url", url);

    // get the document from the index
    SolrDocument doc;
    try {
      doc =
          segment
              .fulltext()
              .getDefaultConnector()
              .getDocumentById(
                  hash,
                  CollectionSchema.title.getSolrFieldName(),
                  CollectionSchema.text_t.getSolrFieldName());
    } catch (final IOException e1) {
      return prop;
    }
    @SuppressWarnings("unchecked")
    ArrayList<String> title =
        (ArrayList<String>) doc.getFieldValue(CollectionSchema.title.getSolrFieldName());
    String text = (String) doc.getFieldValue(CollectionSchema.text_t.getSolrFieldName());

    ArrayList<String> sentences = new ArrayList<String>();
    if (title != null) for (String s : title) if (s.length() > 0) sentences.add(s);
    if (text != null && !text.isEmpty()) {
      SentenceReader sr = new SentenceReader(text);
      StringBuilder line;
      while (sr.hasNext()) {
        line = sr.next();
        if (line.length() > 0) sentences.add(line.toString());
      }
    }

    // for each line make a statistic about the number of occurrences somewhere else
    OrderedScoreMap<String> scores =
        new OrderedScoreMap<String>(null); // accumulates scores for citating urls
    LinkedHashMap<String, Set<DigestURL>> sentenceOcc = new LinkedHashMap<String, Set<DigestURL>>();
    for (String sentence : sentences) {
      if (sentence == null || sentence.length() < 40) {
        // do not count the very short sentences
        sentenceOcc.put(sentence, null);
        continue;
      }
      try {
        sentence = sentence.replace('"', '\'');
        SolrDocumentList doclist =
            connector.getDocumentListByQuery(
                "text_t:\"" + sentence + "\"",
                CollectionSchema.url_chars_i.getSolrFieldName() + " asc",
                0,
                100,
                CollectionSchema.sku.getSolrFieldName());
        int count = (int) doclist.getNumFound();
        if (count > 0) {
          Set<DigestURL> list = new TreeSet<DigestURL>();
          for (SolrDocument d : doclist) {
            String u = (String) d.getFieldValue(CollectionSchema.sku.getSolrFieldName());
            if (u == null || u.equals(url)) continue;
            scores.inc(u);
            try {
              list.add(new DigestURL(u, null));
            } catch (final MalformedURLException e) {
            }
          }
          sentenceOcc.put(sentence, list);
        }
      } catch (final Throwable ee) {

      }
    }
    sentences.clear(); // we do not need this again

    // iterate the sentences
    int i = 0;
    int sentenceNr = 0;
    for (Map.Entry<String, Set<DigestURL>> se : sentenceOcc.entrySet()) {
      Set<DigestURL> app = se.getValue();
      if (filter) { // prepare list, only include sentence with citation
        if (app != null && app.size() > 0) {
          StringBuilder dd = new StringBuilder(se.getKey());
          prop.put("sentences_" + i + "_dt", sentenceNr);
          dd.append("<br/>appears in:");
          for (DigestURL u : app) {
            if (u != null) {
              dd.append(" <a href=\"")
                  .append(u.toNormalform(false))
                  .append("\">")
                  .append(u.getHost())
                  .append("</a>");
            }
          }
          prop.put("sentences_" + i + "_dd", dd.toString());
          i++;
        }
      } else { // prepare list, include all sentences
        StringBuilder dd = new StringBuilder(se.getKey());
        prop.put("sentences_" + i + "_dt", sentenceNr);
        if (app != null && app.size() > 0) {
          dd.append("<br/>appears in:");
          for (DigestURL u : app) {
            if (u != null) {
              dd.append(" <a href=\"")
                  .append(u.toNormalform(false))
                  .append("\">")
                  .append(u.getHost())
                  .append("</a>");
            }
          }
        }
        prop.put("sentences_" + i + "_dd", dd.toString());
        i++;
      }
      sentenceNr++;
    }
    prop.put("sentences", i);

    // iterate the citations in order of number of citations
    i = 0;
    for (String u : scores.keyList(false)) {
      try {
        DigestURL uu = new DigestURL(u, null);
        prop.put("citations_" + i + "_dt", "<a href=\"" + u + "\">" + u + "</a>");
        StringBuilder dd = new StringBuilder();
        dd.append("makes ")
            .append(Integer.toString(scores.get(u)))
            .append(" citations: of ")
            .append(url);
        for (Map.Entry<String, Set<DigestURL>> se : sentenceOcc.entrySet()) {
          Set<DigestURL> occurls = se.getValue();
          if (occurls != null && occurls.contains(uu))
            dd.append("<br/><a href=\"/solr/select?q=text_t:%22")
                .append(se.getKey().replace('"', '\''))
                .append("%22&rows=100&grep=&wt=grephtml\">")
                .append(se.getKey())
                .append("</a>");
        }
        prop.put("citations_" + i + "_dd", dd.toString());
        i++;
      } catch (final MalformedURLException e) {
      }
    }
    prop.put("citations", i);

    // find similar documents from different hosts
    i = 0;
    for (String u : scores.keyList(false)) {
      if (scores.get(u) < ch) continue;
      try {
        DigestURL uu = new DigestURL(u, null);
        if (uu.getOrganization().equals(uri.getOrganization())) continue;
        prop.put("similar_links_" + i + "_url", u);
        i++;
      } catch (final MalformedURLException e) {
      }
    }
    prop.put("similar_links", i);
    prop.put("similar", i > 0 ? 1 : 0);

    // return rewrite properties
    return prop;
  }
  public static serverObjects respond(
      final RequestHeader header, final serverObjects post, final serverSwitch env) {

    // return variable that accumulates replacements
    final Switchboard sb = (Switchboard) env;

    // clean up all search events
    SearchEventCache.cleanupEvents(true);
    sb.index.clearCaches(); // every time the ranking is changed we need to remove old orderings

    // inital values for AJAX Elements (without JavaScript)
    final serverObjects prop = new serverObjects();
    prop.put("rejected", 0);

    Segment segment = sb.index;
    Fulltext fulltext = segment.fulltext();
    String localSolr = "/solr/select?core=collection1&q=*:*&start=0&rows=3";
    String remoteSolr =
        env.getConfig(SwitchboardConstants.FEDERATED_SERVICE_SOLR_INDEXING_URL, localSolr);
    if (!remoteSolr.endsWith("/")) remoteSolr = remoteSolr + "/";
    prop.put(
        "urlpublictextSolrURL",
        fulltext.connectedLocalSolr()
            ? localSolr
            : remoteSolr + "collection1/select?&q=*:*&start=0&rows=3");
    prop.putNum("urlpublictextSize", fulltext.collectionSize());
    prop.putNum("urlpublictextSegmentCount", fulltext.getDefaultConnector().getSegmentCount());
    prop.put(
        "webgraphSolrURL",
        fulltext.connectedLocalSolr()
            ? localSolr.replace("collection1", "webgraph")
            : remoteSolr + "webgraph/select?&q=*:*&start=0&rows=3");
    prop.putNum("webgraphSize", fulltext.useWebgraph() ? fulltext.webgraphSize() : 0);
    prop.putNum(
        "webgraphSegmentCount",
        fulltext.useWebgraph() ? fulltext.getWebgraphConnector().getSegmentCount() : 0);
    prop.putNum("citationSize", segment.citationCount());
    prop.putNum("citationSegmentCount", segment.citationSegmentCount());
    prop.putNum("rwipublictextSize", segment.RWICount());
    prop.putNum("rwipublictextSegmentCount", segment.RWISegmentCount());

    prop.put("list", "0");
    prop.put("loaderSize", 0);
    prop.put("loaderMax", 0);
    prop.put("list-loader", 0);

    int coreCrawlJobSize = sb.crawlQueues.coreCrawlJobSize();
    int limitCrawlJobSize = sb.crawlQueues.limitCrawlJobSize();
    int remoteTriggeredCrawlJobSize = sb.crawlQueues.remoteTriggeredCrawlJobSize();
    int noloadCrawlJobSize = sb.crawlQueues.noloadCrawlJobSize();
    int allsize =
        coreCrawlJobSize + limitCrawlJobSize + remoteTriggeredCrawlJobSize + noloadCrawlJobSize;

    prop.put("localCrawlSize", coreCrawlJobSize);
    prop.put("localCrawlState", "");
    prop.put("limitCrawlSize", limitCrawlJobSize);
    prop.put("limitCrawlState", "");
    prop.put("remoteCrawlSize", remoteTriggeredCrawlJobSize);
    prop.put("remoteCrawlState", "");
    prop.put("noloadCrawlSize", noloadCrawlJobSize);
    prop.put("noloadCrawlState", "");
    prop.put("terminate-button", allsize == 0 ? 0 : 1);
    prop.put("list-remote", 0);
    prop.put("forwardToCrawlStart", "0");

    prop.put("info", "0");
    boolean debug = (post != null && post.containsKey("debug"));

    if (post != null) {
      String c = post.toString();
      if (c.length() < 1000) ConcurrentLog.info("Crawl Start", c);
    }

    if (post != null && post.containsKey("queues_terminate_all")) {
      // terminate crawls individually
      sb.crawlQueues.noticeURL.clear();
      for (final byte[] h : sb.crawler.getActive()) {
        CrawlProfile p = sb.crawler.getActive(h);
        if (CrawlSwitchboard.DEFAULT_PROFILES.contains(p.name())) continue;
        if (p != null) sb.crawler.putPassive(h, p);
        sb.crawler.removeActive(h);
        sb.crawler.removePassive(h);
        try {
          sb.crawlQueues.noticeURL.removeByProfileHandle(p.handle(), 10000);
        } catch (SpaceExceededException e) {
        }
      }

      // clear stacks
      for (StackType stackType : StackType.values()) sb.crawlQueues.noticeURL.clear(stackType);
      try {
        sb.cleanProfiles();
      } catch (final InterruptedException e) {
        /* ignore this */
      }

      // remove pause
      sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL);
      sb.setConfig(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL + "_isPaused_cause", "");
      sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL);
      sb.setConfig(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL + "_isPaused_cause", "");
      prop.put("terminate-button", 0);
    }

    if (post != null && post.containsKey("continue")) {
      // continue queue
      final String queue = post.get("continue", "");
      if ("localcrawler".equals(queue)) {
        sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL);
        sb.setConfig(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL + "_isPaused_cause", "");
      } else if ("remotecrawler".equals(queue)) {
        sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL);
        sb.setConfig(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL + "_isPaused_cause", "");
      }
    }

    if (post != null && post.containsKey("pause")) {
      // pause queue
      final String queue = post.get("pause", "");
      if ("localcrawler".equals(queue)) {
        sb.pauseCrawlJob(
            SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL,
            "user request in Crawler_p from " + header.refererHost());
      } else if ("remotecrawler".equals(queue)) {
        sb.pauseCrawlJob(
            SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL,
            "user request in Crawler_p from " + header.refererHost());
      }
    }
    String queuemessage =
        sb.getConfig(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL + "_isPaused_cause", "");
    if (queuemessage.length() == 0) {
      prop.put("info-queue", 0);
    } else {
      prop.put("info-queue", 1);
      prop.putHTML("info-queue_message", "pause reason: " + queuemessage);
    }

    if (post != null && post.containsKey("terminate"))
      try {
        final String handle = post.get("handle", "");
        // termination of a crawl: shift the crawl from active to passive
        final CrawlProfile p = sb.crawler.getActive(handle.getBytes());
        if (p != null) sb.crawler.putPassive(handle.getBytes(), p);
        // delete all entries from the crawl queue that are deleted here
        sb.crawler.removeActive(handle.getBytes());
        sb.crawler.removePassive(handle.getBytes());
        sb.crawlQueues.noticeURL.removeByProfileHandle(handle, 10000);
      } catch (final SpaceExceededException e) {
        ConcurrentLog.logException(e);
      }

    if (post != null && post.containsKey("crawlingstart")) {
      // init crawl
      if (sb.peers == null) {
        prop.put("info", "3");
      } else {

        // remove crawlingFileContent before we record the call
        String crawlingFileName = post.get("crawlingFile");
        final File crawlingFile;
        if (crawlingFileName == null || crawlingFileName.isEmpty()) {
          crawlingFile = null;
        } else {
          if (crawlingFileName.startsWith("file://"))
            crawlingFileName = crawlingFileName.substring(7);
          crawlingFile = new File(crawlingFileName);
        }
        if (crawlingFile != null && crawlingFile.exists()) {
          post.remove("crawlingFile$file");
        }

        // prepare some filter that are adjusted in case that this is wanted
        boolean storeHTCache = "on".equals(post.get("storeHTCache", "off"));
        String newcrawlingMustMatch = post.get("mustmatch", CrawlProfile.MATCH_ALL_STRING);
        String newcrawlingMustNotMatch = post.get("mustnotmatch", CrawlProfile.MATCH_NEVER_STRING);
        if (newcrawlingMustMatch.length() < 2)
          newcrawlingMustMatch =
              CrawlProfile
                  .MATCH_ALL_STRING; // avoid that all urls are filtered out if bad value was
        // submitted
        boolean fullDomain =
            "domain".equals(post.get("range", "wide")); // special property in simple crawl start
        boolean subPath =
            "subpath".equals(post.get("range", "wide")); // special property in simple crawl start

        final boolean restrictedcrawl =
            fullDomain || subPath || !CrawlProfile.MATCH_ALL_STRING.equals(newcrawlingMustMatch);
        final boolean deleteage = restrictedcrawl && "age".equals(post.get("deleteold", "off"));
        Date deleteageDate = null;
        if (deleteage) {
          deleteageDate =
              timeParser(
                  true,
                  post.getInt("deleteIfOlderNumber", -1),
                  post.get("deleteIfOlderUnit", "year")); // year, month, day, hour
        }
        final boolean deleteold =
            (deleteage && deleteageDate != null)
                || (restrictedcrawl && post.getBoolean("deleteold"));

        final String sitemapURLStr = post.get("sitemapURL", "");
        String crawlingStart0 = post.get("crawlingURL", "").trim(); // the crawljob start url
        String[] rootURLs0 =
            crawlingStart0.indexOf('\n') > 0 || crawlingStart0.indexOf('\r') > 0
                ? crawlingStart0.split("[\\r\\n]+")
                : crawlingStart0.split(Pattern.quote("|"));
        Set<DigestURL> rootURLs = new HashSet<DigestURL>();
        String crawlName = "";
        if (crawlingFile == null)
          for (String crawlingStart : rootURLs0) {
            if (crawlingStart == null || crawlingStart.length() == 0) continue;
            // add the prefix http:// if necessary
            int pos = crawlingStart.indexOf("://", 0);
            if (pos == -1) {
              if (crawlingStart.startsWith("ftp")) crawlingStart = "ftp://" + crawlingStart;
              else crawlingStart = "http://" + crawlingStart;
            }
            try {
              DigestURL crawlingStartURL = new DigestURL(crawlingStart);
              rootURLs.add(crawlingStartURL);
              crawlName +=
                  ((crawlingStartURL.getHost() == null)
                          ? crawlingStartURL.toNormalform(true)
                          : crawlingStartURL.getHost())
                      + ',';
              if (crawlingStartURL != null
                  && (crawlingStartURL.isFile() || crawlingStartURL.isSMB())) storeHTCache = false;

            } catch (final MalformedURLException e) {
              ConcurrentLog.logException(e);
            }
          }
        else {
          crawlName = crawlingFile.getName();
        }
        if (crawlName.endsWith(",")) crawlName = crawlName.substring(0, crawlName.length() - 1);
        if (crawlName.length() > 64) {
          crawlName =
              "crawl_for_"
                  + rootURLs.size()
                  + "_start_points_"
                  + Integer.toHexString(crawlName.hashCode());
          int p = crawlName.lastIndexOf(',');
          if (p >= 8) crawlName = crawlName.substring(0, p);
        }
        if (crawlName.length() == 0 && sitemapURLStr.length() > 0)
          crawlName = "sitemap loader for " + sitemapURLStr;
        // in case that a root url has a file protocol, then the site filter does not work, patch
        // that:
        if (fullDomain) {
          for (DigestURL u : rootURLs)
            if (u.isFile()) {
              fullDomain = false;
              subPath = true;
              break;
            }
        }

        // delete old robots entries
        for (DigestURL ru : rootURLs) {
          sb.robots.delete(ru);
          try {
            if (ru.getHost() != null) { // might be null for file://
              Cache.delete(RobotsTxt.robotsURL(RobotsTxt.getHostPort(ru)).hash());
            }
          } catch (IOException e) {
          }
        }
        try {
          sb.robots.clear();
        } catch (IOException e) {
        } // to be safe: clear all.

        // set the crawl filter
        String ipMustMatch = post.get("ipMustmatch", CrawlProfile.MATCH_ALL_STRING);
        final String ipMustNotMatch = post.get("ipMustnotmatch", CrawlProfile.MATCH_NEVER_STRING);
        if (ipMustMatch.length() < 2) ipMustMatch = CrawlProfile.MATCH_ALL_STRING;
        final String countryMustMatch =
            post.getBoolean("countryMustMatchSwitch") ? post.get("countryMustMatchList", "") : "";
        sb.setConfig("crawlingIPMustMatch", ipMustMatch);
        sb.setConfig("crawlingIPMustNotMatch", ipMustNotMatch);
        if (countryMustMatch.length() > 0)
          sb.setConfig("crawlingCountryMustMatch", countryMustMatch);

        String crawlerNoDepthLimitMatch =
            post.get("crawlingDepthExtension", CrawlProfile.MATCH_NEVER_STRING);
        final String indexUrlMustMatch = post.get("indexmustmatch", CrawlProfile.MATCH_ALL_STRING);
        final String indexUrlMustNotMatch =
            post.get("indexmustnotmatch", CrawlProfile.MATCH_NEVER_STRING);
        final String indexContentMustMatch =
            post.get("indexcontentmustmatch", CrawlProfile.MATCH_ALL_STRING);
        final String indexContentMustNotMatch =
            post.get("indexcontentmustnotmatch", CrawlProfile.MATCH_NEVER_STRING);

        final boolean crawlOrder = post.get("crawlOrder", "off").equals("on");
        env.setConfig("crawlOrder", crawlOrder);

        if (crawlOrder)
          crawlerNoDepthLimitMatch =
              CrawlProfile.MATCH_NEVER_STRING; // without limitation the crawl order does not work

        int newcrawlingdepth = post.getInt("crawlingDepth", 8);
        env.setConfig("crawlingDepth", Integer.toString(newcrawlingdepth));
        if ((crawlOrder) && (newcrawlingdepth > 8)) newcrawlingdepth = 8;

        boolean directDocByURL =
            "on"
                .equals(
                    post.get(
                        "directDocByURL",
                        "off")); // catch also all linked media documents without loading them
        env.setConfig("crawlingDirectDocByURL", directDocByURL);

        final String collection = post.get("collection", "user");
        env.setConfig("collection", collection);

        // recrawl
        final String recrawl = post.get("recrawl", "nodoubles"); // nodoubles, reload, scheduler
        Date crawlingIfOlder = null;
        if ("reload".equals(recrawl)) {
          crawlingIfOlder =
              timeParser(
                  true,
                  post.getInt("reloadIfOlderNumber", -1),
                  post.get("reloadIfOlderUnit", "year")); // year, month, day, hour
        }
        env.setConfig(
            "crawlingIfOlder",
            crawlingIfOlder == null ? Long.MAX_VALUE : crawlingIfOlder.getTime());

        // store this call as api call
        sb.tables.recordAPICall(
            post,
            "Crawler_p.html",
            WorkTables.TABLE_API_TYPE_CRAWLER,
            "crawl start for "
                + ((rootURLs.size() == 0)
                    ? post.get("crawlingFile", "")
                    : rootURLs.iterator().next().toNormalform(true)));

        final boolean crawlingDomMaxCheck = "on".equals(post.get("crawlingDomMaxCheck", "off"));
        final int crawlingDomMaxPages =
            (crawlingDomMaxCheck) ? post.getInt("crawlingDomMaxPages", -1) : -1;
        env.setConfig("crawlingDomMaxPages", Integer.toString(crawlingDomMaxPages));

        boolean followFrames = "on".equals(post.get("followFrames", "false"));
        env.setConfig("followFrames", followFrames);

        boolean obeyHtmlRobotsNoindex = "on".equals(post.get("obeyHtmlRobotsNoindex", "false"));
        env.setConfig("obeyHtmlRobotsNoindex", obeyHtmlRobotsNoindex);

        boolean obeyHtmlRobotsNofollow = "on".equals(post.get("obeyHtmlRobotsNofollow", "false"));
        env.setConfig("obeyHtmlRobotsNofollow", obeyHtmlRobotsNofollow);

        final boolean indexText = "on".equals(post.get("indexText", "false"));
        env.setConfig("indexText", indexText);

        final boolean indexMedia = "on".equals(post.get("indexMedia", "false"));
        env.setConfig("indexMedia", indexMedia);

        env.setConfig("storeHTCache", storeHTCache);

        String defaultAgentName =
            sb.isIntranetMode()
                ? ClientIdentification.yacyIntranetCrawlerAgentName
                : ClientIdentification.yacyInternetCrawlerAgentName;
        String agentName = post.get("agentName", defaultAgentName);
        ClientIdentification.Agent agent = ClientIdentification.getAgent(agentName);
        if (agent == null) agent = ClientIdentification.getAgent(defaultAgentName);

        CacheStrategy cachePolicy = CacheStrategy.parse(post.get("cachePolicy", "iffresh"));
        if (cachePolicy == null) cachePolicy = CacheStrategy.IFFRESH;

        String crawlingMode = post.get("crawlingMode", "url");

        if ("file".equals(crawlingMode) && post.containsKey("crawlingFile")) {
          newcrawlingMustNotMatch = CrawlProfile.MATCH_NEVER_STRING;
          directDocByURL = false;
        }

        if ("sitemap".equals(crawlingMode)) {
          newcrawlingMustMatch = CrawlProfile.MATCH_ALL_STRING;
          newcrawlingMustNotMatch = CrawlProfile.MATCH_NEVER_STRING;
          newcrawlingdepth = 0;
          directDocByURL = false;
        }

        if ("sitelist".equals(crawlingMode)) {
          newcrawlingMustNotMatch = CrawlProfile.MATCH_NEVER_STRING;
          Set<DigestURL> newRootURLs = new HashSet<DigestURL>();
          for (DigestURL sitelistURL : rootURLs) {
            // download document
            Document scraper;
            try {
              scraper =
                  sb.loader.loadDocument(
                      sitelistURL, CacheStrategy.IFFRESH, BlacklistType.CRAWLER, agent);
              // get links and generate filter
              for (DigestURL u : scraper.getHyperlinks().keySet()) {
                newRootURLs.add(u);
              }
            } catch (final IOException e) {
              ConcurrentLog.logException(e);
            }
          }
          rootURLs = newRootURLs;
          crawlingMode = "url";
          if ((fullDomain || subPath) && newcrawlingdepth > 0)
            newcrawlingMustMatch =
                CrawlProfile
                    .MATCH_ALL_STRING; // to prevent that there is a restriction on the original
          // urls
        }

        // delete all error urls for that domain
        // and all urls for that host from the crawl queue
        Set<String> hosthashes = new HashSet<String>();
        boolean anysmbftporpdf = false;
        for (DigestURL u : rootURLs) {
          sb.index.fulltext().remove(u.hash());
          hosthashes.add(u.hosthash());
          if ("smb.ftp".indexOf(u.getProtocol()) >= 0
              || "pdf".equals(MultiProtocolURL.getFileExtension(u.getFileName())))
            anysmbftporpdf = true;
        }
        sb.crawlQueues.removeHosts(hosthashes);
        sb.index.fulltext().commit(true);

        boolean crawlingQ =
            anysmbftporpdf
                || "on".equals(post.get("crawlingQ", "off"))
                || "sitemap".equals(crawlingMode);
        env.setConfig("crawlingQ", crawlingQ);

        // compute mustmatch filter according to rootURLs
        if ((fullDomain || subPath) && newcrawlingdepth > 0) {
          String siteFilter = ".*";
          if (fullDomain) {
            siteFilter = CrawlProfile.siteFilter(rootURLs);
            if (deleteold) {
              sb.index.fulltext().deleteStaleDomainHashes(hosthashes, deleteageDate);
            }
          } else if (subPath) {
            siteFilter = CrawlProfile.subpathFilter(rootURLs);
            if (deleteold) {
              for (DigestURL u : rootURLs) {
                String basepath = u.toNormalform(true);
                if (!basepath.endsWith("/")) {
                  int p = basepath.lastIndexOf("/");
                  if (p > 0) basepath = basepath.substring(0, p + 1);
                }
                int count = sb.index.fulltext().remove(basepath, deleteageDate);
                if (count > 0)
                  ConcurrentLog.info(
                      "Crawler_p", "deleted " + count + " documents for host " + u.getHost());
              }
            }
          }
          if (CrawlProfile.MATCH_ALL_STRING.equals(newcrawlingMustMatch)) {
            newcrawlingMustMatch = siteFilter;
          } else if (!CrawlProfile.MATCH_ALL_STRING.equals(siteFilter)) {
            // combine both
            newcrawlingMustMatch = "(" + newcrawlingMustMatch + ")|(" + siteFilter + ")";
          }
        }

        // check if the crawl filter works correctly
        try {
          Pattern mmp = Pattern.compile(newcrawlingMustMatch);
          for (DigestURL u : rootURLs) {
            assert mmp.matcher(u.toNormalform(true)).matches()
                : "pattern " + mmp.toString() + " does not match url " + u.toNormalform(true);
          }
        } catch (final PatternSyntaxException e) {
          prop.put("info", "4"); // crawlfilter does not match url
          prop.putHTML("info_newcrawlingfilter", newcrawlingMustMatch);
          prop.putHTML("info_error", e.getMessage());
        }

        boolean hasCrawlstartDataOK = !crawlName.isEmpty();
        if (hasCrawlstartDataOK) {
          // check crawlurl was given in sitecrawl
          if ("url".equals(crawlingMode) && rootURLs.size() == 0) hasCrawlstartDataOK = false;
        }

        String snapshotsMaxDepthString = post.get("snapshotsMaxDepth", "-1");
        int snapshotsMaxDepth = Integer.parseInt(snapshotsMaxDepthString);
        boolean snapshotsLoadImage = post.getBoolean("snapshotsLoadImage");
        boolean snapshotsReplaceOld = post.getBoolean("snapshotsReplaceOld");
        String snapshotsMustnotmatch = post.get("snapshotsMustnotmatch", "");

        // get vocabulary scraper info
        JSONObject vocabulary_scraper =
            new JSONObject(); // key = vocabulary_name, value = properties with key = type (i.e.
        // 'class') and value = keyword in context
        for (String key : post.keySet()) {
          if (key.startsWith("vocabulary_")) {
            if (key.endsWith("_class")) {
              String vocabulary = key.substring(11, key.length() - 6);
              String value = post.get(key);
              if (value != null && value.length() > 0) {
                JSONObject props;
                try {
                  props = vocabulary_scraper.getJSONObject(vocabulary);
                } catch (JSONException e) {
                  props = new JSONObject();
                  vocabulary_scraper.put(vocabulary, props);
                }
                props.put("class", value);
              }
            }
          }
        }

        int timezoneOffset = post.getInt("timezoneOffset", 0);

        // in case that we crawl from a file, load that file and re-compute mustmatch pattern
        List<AnchorURL> hyperlinks_from_file = null;
        if ("file".equals(crawlingMode)
            && post.containsKey("crawlingFile")
            && crawlingFile != null) {
          final String crawlingFileContent = post.get("crawlingFile$file", "");
          try {
            // check if the crawl filter works correctly
            final ContentScraper scraper =
                new ContentScraper(
                    new DigestURL(crawlingFile), 10000000, new VocabularyScraper(), timezoneOffset);
            final Writer writer = new TransformerWriter(null, null, scraper, null, false);
            if (crawlingFile != null && crawlingFile.exists()) {
              FileUtils.copy(new FileInputStream(crawlingFile), writer);
            } else {
              FileUtils.copy(crawlingFileContent, writer);
            }
            writer.close();

            // get links and generate filter
            hyperlinks_from_file = scraper.getAnchors();
            if (newcrawlingdepth > 0) {
              if (fullDomain) {
                newcrawlingMustMatch = CrawlProfile.siteFilter(hyperlinks_from_file);
              } else if (subPath) {
                newcrawlingMustMatch = CrawlProfile.subpathFilter(hyperlinks_from_file);
              }
            }
          } catch (final Exception e) {
            // mist
            prop.put("info", "7"); // Error with file
            prop.putHTML("info_crawlingStart", crawlingFileName);
            prop.putHTML("info_error", e.getMessage());
            ConcurrentLog.logException(e);
          }
          sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL);
        }

        // prepare a new crawling profile
        final CrawlProfile profile;
        byte[] handle;
        if (hasCrawlstartDataOK) {
          profile =
              new CrawlProfile(
                  crawlName,
                  newcrawlingMustMatch,
                  newcrawlingMustNotMatch,
                  ipMustMatch,
                  ipMustNotMatch,
                  countryMustMatch,
                  crawlerNoDepthLimitMatch,
                  indexUrlMustMatch,
                  indexUrlMustNotMatch,
                  indexContentMustMatch,
                  indexContentMustNotMatch,
                  newcrawlingdepth,
                  directDocByURL,
                  crawlingIfOlder,
                  crawlingDomMaxPages,
                  crawlingQ,
                  followFrames,
                  obeyHtmlRobotsNoindex,
                  obeyHtmlRobotsNofollow,
                  indexText,
                  indexMedia,
                  storeHTCache,
                  crawlOrder,
                  snapshotsMaxDepth,
                  snapshotsLoadImage,
                  snapshotsReplaceOld,
                  snapshotsMustnotmatch,
                  cachePolicy,
                  collection,
                  agentName,
                  new VocabularyScraper(vocabulary_scraper),
                  timezoneOffset);
          handle = ASCII.getBytes(profile.handle());

          // before we fire up a new crawl, we make sure that another crawl with the same name is
          // not running
          sb.crawler.removeActive(handle);
          sb.crawler.removePassive(handle);
          try {
            sb.crawlQueues.noticeURL.removeByProfileHandle(profile.handle(), 10000);
          } catch (final SpaceExceededException e1) {
          }
        } else {
          profile = null;
          handle = null;
        }

        // start the crawl
        if ("url".equals(crawlingMode)) {
          if (rootURLs.size() == 0) {
            prop.put("info", "5"); // Crawling failed
            prop.putHTML("info_crawlingURL", "(no url given)");
            prop.putHTML("info_reasonString", "you must submit at least one crawl url");
          } else {

            // stack requests
            sb.crawler.putActive(handle, profile);
            final Set<DigestURL> successurls = new HashSet<DigestURL>();
            final Map<DigestURL, String> failurls = new HashMap<DigestURL, String>();
            sb.stackURLs(rootURLs, profile, successurls, failurls);

            if (failurls.size() == 0) {
              // liftoff!
              prop.put("info", "8");
              prop.putHTML("info_crawlingURL", post.get("crawlingURL"));

              // generate a YaCyNews if the global flag was set
              if (!sb.isRobinsonMode() && crawlOrder) {
                final Map<String, String> m =
                    new HashMap<String, String>(profile); // must be cloned
                m.remove("specificDepth");
                m.remove("indexText");
                m.remove("indexMedia");
                m.remove("remoteIndexing");
                m.remove("xsstopw");
                m.remove("xpstopw");
                m.remove("xdstopw");
                m.remove("storeTXCache");
                m.remove("storeHTCache");
                m.remove("generalFilter");
                m.remove("specificFilter");
                m.put("intention", post.get("intention", "").replace(',', '/'));
                sb.peers.newsPool.publishMyNews(
                    sb.peers.mySeed(), NewsPool.CATEGORY_CRAWL_START, m);
              }
            } else {
              StringBuilder fr = new StringBuilder();
              for (Map.Entry<DigestURL, String> failure : failurls.entrySet()) {
                sb.crawlQueues.errorURL.push(
                    failure.getKey(),
                    0,
                    null,
                    FailCategory.FINAL_LOAD_CONTEXT,
                    failure.getValue(),
                    -1);
                fr.append(failure.getValue()).append('/');
              }

              prop.put("info", "5"); // Crawling failed
              prop.putHTML("info_crawlingURL", (post.get("crawlingURL")));
              prop.putHTML("info_reasonString", fr.toString());
            }
            if (successurls.size() > 0)
              sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL);
          }
        } else if ("sitemap".equals(crawlingMode)) {
          try {
            final DigestURL sitemapURL =
                sitemapURLStr.indexOf("//") > 0
                    ? new DigestURL(sitemapURLStr)
                    : new DigestURL(
                        rootURLs.iterator().next(),
                        sitemapURLStr); // fix for relative paths which should not exist but are
            // used anyway
            sb.crawler.putActive(handle, profile);
            final SitemapImporter importer = new SitemapImporter(sb, sitemapURL, profile);
            importer.start();
            sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL);
          } catch (final Exception e) {
            // mist
            prop.put("info", "6"); // Error with url
            prop.putHTML("info_crawlingStart", sitemapURLStr);
            prop.putHTML("info_error", e.getMessage());
            ConcurrentLog.logException(e);
          }
        } else if ("file".equals(crawlingMode)) {
          if (post.containsKey("crawlingFile")
              && crawlingFile != null
              && hyperlinks_from_file != null) {
            try {
              if (newcrawlingdepth > 0) {
                if (fullDomain) {
                  newcrawlingMustMatch = CrawlProfile.siteFilter(hyperlinks_from_file);
                } else if (subPath) {
                  newcrawlingMustMatch = CrawlProfile.subpathFilter(hyperlinks_from_file);
                }
              }
              sb.crawler.putActive(handle, profile);
              sb.crawlStacker.enqueueEntriesAsynchronous(
                  sb.peers.mySeed().hash.getBytes(),
                  profile.handle(),
                  hyperlinks_from_file,
                  profile.timezoneOffset());
            } catch (final PatternSyntaxException e) {
              prop.put("info", "4"); // crawlfilter does not match url
              prop.putHTML("info_newcrawlingfilter", newcrawlingMustMatch);
              prop.putHTML("info_error", e.getMessage());
            } catch (final Exception e) {
              // mist
              prop.put("info", "7"); // Error with file
              prop.putHTML("info_crawlingStart", crawlingFileName);
              prop.putHTML("info_error", e.getMessage());
              ConcurrentLog.logException(e);
            }
            sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL);
          }
        }
      }
    }

    /*
    *  <input id="customPPM" name="customPPM" type="number" min="10" max="30000" style="width:46px" value="#[customPPMdefault]#" />PPM
       <input id="latencyFactor" name="latencyFactor" type="number" min="0.1" max="3.0" step="0.1" style="width:32px" value="#[latencyFactorDefault]#" />LF
       <input id="MaxSameHostInQueue" name="MaxSameHostInQueue" type="number" min="1" max="30" style="width:32px" value="#[MaxSameHostInQueueDefault]#" />MH
       <input type="submit" name="crawlingPerformance" value="set" />
       (<a href="/Crawler_p.html?crawlingPerformance=minimum">min</a>/<a href="/Crawler_p.html?crawlingPerformance=maximum">max</a>)
       </td>
    */
    if (post != null && post.containsKey("crawlingPerformance")) {
      final String crawlingPerformance = post.get("crawlingPerformance", "custom");
      final long LCbusySleep1 =
          sb.getConfigLong(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP, 1000L);
      int wantedPPM = (LCbusySleep1 == 0) ? 30000 : (int) (60000L / LCbusySleep1);
      try {
        wantedPPM = post.getInt("customPPM", wantedPPM);
      } catch (final NumberFormatException e) {
      }
      if ("minimum".equals(crawlingPerformance.toLowerCase())) wantedPPM = 10;
      if ("maximum".equals(crawlingPerformance.toLowerCase())) wantedPPM = 30000;

      int wPPM = wantedPPM;
      if (wPPM <= 0) {
        wPPM = 1;
      }
      if (wPPM >= 30000) {
        wPPM = 30000;
      }
      final int newBusySleep = 60000 / wPPM; // for wantedPPM = 10: 6000; for wantedPPM = 1000: 60
      final float loadprereq =
          wantedPPM <= 10 ? 1.0f : wantedPPM <= 100 ? 2.0f : wantedPPM >= 1000 ? 8.0f : 3.0f;

      BusyThread thread;

      thread = sb.getThread(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL);
      if (thread != null) {
        sb.setConfig(
            SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP, thread.setBusySleep(newBusySleep));
        sb.setConfig(
            SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_LOADPREREQ,
            thread.setLoadPreReqisite(loadprereq));
        thread.setLoadPreReqisite(loadprereq);
        thread.setIdleSleep(2000);
      }

      float latencyFactor = post.getFloat("latencyFactor", 0.5f);
      int MaxSameHostInQueue = post.getInt("MaxSameHostInQueue", 20);
      env.setConfig(SwitchboardConstants.CRAWLER_LATENCY_FACTOR, latencyFactor);
      env.setConfig(SwitchboardConstants.CRAWLER_MAX_SAME_HOST_IN_QUEUE, MaxSameHostInQueue);
    }

    // performance settings
    final long LCbusySleep =
        env.getConfigLong(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP, 1000L);
    final int LCppm = (int) (60000L / Math.max(1, LCbusySleep));
    prop.put("customPPMdefault", Integer.toString(LCppm));
    prop.put(
        "latencyFactorDefault",
        env.getConfigFloat(SwitchboardConstants.CRAWLER_LATENCY_FACTOR, 0.5f));
    prop.put(
        "MaxSameHostInQueueDefault",
        env.getConfigInt(SwitchboardConstants.CRAWLER_MAX_SAME_HOST_IN_QUEUE, 20));

    // generate crawl profile table
    int count = 0;
    boolean dark = true;
    final int domlistlength = (post == null) ? 160 : post.getInt("domlistlength", 160);
    CrawlProfile profile;
    // put active crawls into list
    String hosts = "";
    for (final byte[] h : sb.crawler.getActive()) {
      profile = sb.crawler.getActive(h);
      if (CrawlSwitchboard.DEFAULT_PROFILES.contains(profile.name())) continue;
      profile.putProfileEntry("crawlProfilesShow_list_", prop, true, dark, count, domlistlength);
      prop.put("crawlProfilesShow_list_" + count + "_debug", debug ? 1 : 0);
      if (debug) {
        RowHandleSet urlhashes = sb.crawler.getURLHashes(h);
        prop.put(
            "crawlProfilesShow_list_" + count + "_debug_count",
            urlhashes == null ? "unknown" : Integer.toString(urlhashes.size()));
      }
      hosts = hosts + "," + profile.name();
      dark = !dark;
      count++;
    }
    prop.put("crawlProfilesShow_debug", debug ? 1 : 0);
    prop.put("crawlProfilesShow_list", count);
    prop.put("crawlProfilesShow_count", count);
    prop.put("crawlProfilesShow", count == 0 ? 0 : 1);

    prop.put("crawlProfilesShow_linkstructure", 0);

    if (post != null) { // handle config button to display graphic
      if (post.get("hidewebstructuregraph") != null)
        sb.setConfig(SwitchboardConstants.DECORATION_GRAFICS_LINKSTRUCTURE, false);
      if (post.get("showwebstructuregraph") != null)
        sb.setConfig(SwitchboardConstants.DECORATION_GRAFICS_LINKSTRUCTURE, true);
    }
    if (count > 0
        && sb.getConfigBool(SwitchboardConstants.DECORATION_GRAFICS_LINKSTRUCTURE, true)) {
      // collect the host names for 'wide' crawls which can be visualized
      boolean showLinkstructure = hosts.length() > 0 && !hosts.contains("file:");
      if (showLinkstructure) {
        StringBuilder q = new StringBuilder();
        hosts = hosts.substring(1);
        q.append(CollectionSchema.host_s.getSolrFieldName())
            .append(':')
            .append(hosts)
            .append(" OR ")
            .append(CollectionSchema.host_s.getSolrFieldName())
            .append(':')
            .append("www.")
            .append(hosts);
        try {
          prop.put(
              "crawlProfilesShow_linkstructure",
              count == 1
                      && sb.index.fulltext().getDefaultConnector().getCountByQuery(q.toString()) > 0
                  ? 1
                  : 2);
          prop.put("crawlProfilesShow_linkstructure_hosts", hosts);
        } catch (IOException e) {
        }
      }
    }

    // return rewrite properties
    return prop;
  }
/**
 * Instances of this class can be passed as argument to the serverCore. The generic server
 * dispatches HTTP commands and calls the method GET, HEAD or POST in this class these methods parse
 * the command line and decide wether to call a proxy servlet or a file server servlet
 */
public final class HTTPDemon {

  private static final int ERRORCASE_MESSAGE = 4;
  private static final int ERRORCASE_FILE = 5;

  // static objects
  private static volatile Switchboard switchboard = Switchboard.getSwitchboard();

  static final void sendRespondError(
      final HashMap<String, Object> conProp,
      final OutputStream respond,
      final int errorcase,
      final int httpStatusCode,
      final String httpStatusText,
      final String detailedErrorMsg,
      final Throwable stackTrace)
      throws IOException {
    sendRespondError(
        conProp,
        respond,
        errorcase,
        httpStatusCode,
        httpStatusText,
        detailedErrorMsg,
        null,
        null,
        stackTrace,
        null);
  }

  static final void sendRespondError(
      final HashMap<String, Object> conProp,
      final OutputStream respond,
      final int httpStatusCode,
      final String httpStatusText,
      final File detailedErrorMsgFile,
      final serverObjects detailedErrorMsgValues,
      final Throwable stackTrace)
      throws IOException {
    sendRespondError(
        conProp,
        respond,
        5,
        httpStatusCode,
        httpStatusText,
        null,
        detailedErrorMsgFile,
        detailedErrorMsgValues,
        stackTrace,
        null);
  }

  private static final void sendRespondError(
      final HashMap<String, Object> conProp,
      final OutputStream respond,
      final int errorcase,
      final int httpStatusCode,
      String httpStatusText,
      final String detailedErrorMsgText,
      final Object detailedErrorMsgFile,
      final serverObjects detailedErrorMsgValues,
      final Throwable stackTrace,
      ResponseHeader header)
      throws IOException {

    FileInputStream fis = null;
    ByteArrayOutputStream o = null;
    try {
      // setting the proper http status message
      String httpVersion = (String) conProp.get(HeaderFramework.CONNECTION_PROP_HTTP_VER);
      if (httpVersion == null) httpVersion = "HTTP/1.1";
      if ((httpStatusText == null) || (httpStatusText.length() == 0)) {
        // http1_1 includes http1_0 messages
        if (HeaderFramework.http1_1.containsKey(Integer.toString(httpStatusCode)))
          httpStatusText = HeaderFramework.http1_1.get(Integer.toString(httpStatusCode));
        else httpStatusText = "Unknown";
      }

      // generating the desired request url
      String host = (String) conProp.get(HeaderFramework.CONNECTION_PROP_HOST);
      String path = (String) conProp.get(HeaderFramework.CONNECTION_PROP_PATH);
      if (path == null) path = "/";
      final String args = (String) conProp.get(HeaderFramework.CONNECTION_PROP_ARGS);
      final String method = (String) conProp.get(HeaderFramework.CONNECTION_PROP_METHOD);

      final int port = Domains.stripToPort(host);
      host = Domains.stripToHostName(host);

      String urlString;
      try {
        urlString =
            (new DigestURL(
                    (method.equals(HeaderFramework.METHOD_CONNECT) ? "https" : "http"),
                    host,
                    port,
                    (args == null) ? path : path + "?" + args))
                .toString();
      } catch (final MalformedURLException e) {
        urlString = "invalid URL";
      }

      // set rewrite values
      final serverObjects tp = new serverObjects();

      String clientIP = (String) conProp.get(HeaderFramework.CONNECTION_PROP_CLIENTIP);
      if (clientIP == null) clientIP = Domains.LOCALHOST;

      tp.put("peerName", (switchboard.peers == null) ? "" : switchboard.peers.myName());
      tp.put("errorMessageType", Integer.toString(errorcase));
      tp.put("httpStatus", Integer.toString(httpStatusCode) + " " + httpStatusText);
      tp.put("requestMethod", (String) conProp.get(HeaderFramework.CONNECTION_PROP_METHOD));
      tp.put("requestURL", urlString);

      switch (errorcase) {
        case ERRORCASE_FILE:
          tp.put(
              "errorMessageType_file",
              (detailedErrorMsgFile == null) ? "" : detailedErrorMsgFile.toString());
          if ((detailedErrorMsgValues != null) && !detailedErrorMsgValues.isEmpty()) {
            // rewriting the value-names and add the proper name prefix:
            for (final Entry<String, String> entry : detailedErrorMsgValues.entrySet()) {
              tp.put("errorMessageType_" + entry.getKey(), entry.getValue());
            }
          }
          break;
        case ERRORCASE_MESSAGE:
        default:
          tp.put(
              "errorMessageType_detailedErrorMsg",
              (detailedErrorMsgText == null)
                  ? ""
                  : detailedErrorMsgText.replaceAll("\n", "<br />"));
          break;
      }

      // building the stacktrace
      if (stackTrace != null) {
        tp.put("printStackTrace", "1");
        final ByteBuffer errorMsg = new ByteBuffer(100);
        final PrintStream printStream = new PrintStream(errorMsg);
        stackTrace.printStackTrace(printStream);
        tp.put("printStackTrace_exception", stackTrace.toString());
        tp.put("printStackTrace_stacktrace", UTF8.String(errorMsg.getBytes()));
        printStream.close();
      } else {
        tp.put("printStackTrace", "0");
      }

      // Generated Tue, 23 Aug 2005 11:19:14 GMT by brain.wg (squid/2.5.STABLE3)
      // adding some system information
      final String systemDate = HeaderFramework.formatRFC1123(new Date());
      tp.put("date", systemDate);

      // rewrite the file
      final File htRootPath =
          new File(
              switchboard.getAppPath(),
              switchboard.getConfig(
                  SwitchboardConstants.HTROOT_PATH, SwitchboardConstants.HTROOT_PATH_DEFAULT));

      TemplateEngine.writeTemplate(
          "/proxymsg/error.html",
          fis = new FileInputStream(new File(htRootPath, "/proxymsg/error.html")),
          o = new ByteArrayOutputStream(512),
          tp);
      final byte[] result = o.toByteArray();
      o.close();
      o = null;

      if (header == null) header = new ResponseHeader(httpStatusCode);
      header.put(
          HeaderFramework.CONNECTION_PROP_PROXY_RESPOND_STATUS, Integer.toString(httpStatusCode));
      header.put(HeaderFramework.DATE, systemDate);
      header.put(HeaderFramework.CONTENT_TYPE, "text/html");
      header.put(HeaderFramework.CONTENT_LENGTH, Integer.toString(result.length));
      header.put(HeaderFramework.PRAGMA, "no-cache, no-store");
      sendRespondHeader(conProp, respond, httpVersion, httpStatusCode, httpStatusText, header);

      if (!method.equals(HeaderFramework.METHOD_HEAD)) {
        // write the array to the client
        FileUtils.copy(result, respond);
      }
      respond.flush();
    } finally {
      if (fis != null)
        try {
          fis.close();
        } catch (final Exception e) {
          ConcurrentLog.logException(e);
        }
      if (o != null)
        try {
          o.close();
        } catch (final Exception e) {
          ConcurrentLog.logException(e);
        }
    }
  }

  static final void sendRespondHeader(
      final HashMap<String, Object> conProp,
      final OutputStream respond,
      final String httpVersion,
      final int httpStatusCode,
      final ResponseHeader header)
      throws IOException {
    sendRespondHeader(conProp, respond, httpVersion, httpStatusCode, null, header);
  }

  static final void sendRespondHeader(
      final HashMap<String, Object> conProp,
      final OutputStream respond,
      String httpVersion,
      final int httpStatusCode,
      String httpStatusText,
      ResponseHeader responseHeader)
      throws IOException {

    if (respond == null) throw new NullPointerException("The outputstream must not be null.");
    if (conProp == null)
      throw new NullPointerException("The connection property structure must not be null.");
    if (httpVersion == null)
      httpVersion = (String) conProp.get(HeaderFramework.CONNECTION_PROP_HTTP_VER);
    if (httpVersion == null) httpVersion = HeaderFramework.HTTP_VERSION_1_1;
    if (responseHeader == null) responseHeader = new ResponseHeader(httpStatusCode);

    try {
      if ((httpStatusText == null) || (httpStatusText.length() == 0)) {
        if (HeaderFramework.http1_1.containsKey(Integer.toString(httpStatusCode)))
          // http1_1 includes http1_0 messages
          httpStatusText = HeaderFramework.http1_1.get(Integer.toString(httpStatusCode));
        else httpStatusText = "Unknown";
      }

      final StringBuilder header = new StringBuilder(560);

      // "HTTP/0.9" does not have a status line or header in the response
      if (!httpVersion.toUpperCase().equals(HeaderFramework.HTTP_VERSION_0_9)) {
        // write status line
        header
            .append(httpVersion)
            .append(" ")
            .append(Integer.toString(httpStatusCode))
            .append(" ")
            .append(httpStatusText)
            .append("\r\n");

        // prepare header
        if (!responseHeader.containsKey(HeaderFramework.DATE))
          responseHeader.put(HeaderFramework.DATE, HeaderFramework.formatRFC1123(new Date()));
        if (!responseHeader.containsKey(HeaderFramework.CONTENT_TYPE))
          responseHeader.put(HeaderFramework.CONTENT_TYPE, "text/html; charset=UTF-8"); // fix this
        if (!responseHeader.containsKey(RequestHeader.CONNECTION)
            && conProp.containsKey(HeaderFramework.CONNECTION_PROP_PERSISTENT))
          responseHeader.put(
              RequestHeader.CONNECTION,
              (String) conProp.get(HeaderFramework.CONNECTION_PROP_PERSISTENT));
        if (!responseHeader.containsKey(RequestHeader.PROXY_CONNECTION)
            && conProp.containsKey(HeaderFramework.CONNECTION_PROP_PERSISTENT))
          responseHeader.put(
              RequestHeader.PROXY_CONNECTION,
              (String) conProp.get(HeaderFramework.CONNECTION_PROP_PERSISTENT));

        if (conProp.containsKey(HeaderFramework.CONNECTION_PROP_PERSISTENT)
            && conProp.get(HeaderFramework.CONNECTION_PROP_PERSISTENT).equals("keep-alive")
            && !responseHeader.containsKey(HeaderFramework.TRANSFER_ENCODING)
            && !responseHeader.containsKey(HeaderFramework.CONTENT_LENGTH))
          responseHeader.put(HeaderFramework.CONTENT_LENGTH, "0");

        // read custom headers
        final Iterator<ResponseHeader.Entry> it =
            responseHeader.getAdditionalHeaderProperties().iterator();
        ResponseHeader.Entry e;
        while (it.hasNext()) {
          // Append user properties to the main String
          // TODO: Should we check for user properites. What if they intersect properties that are
          // already in header?
          e = it.next();
          header.append(e.getKey()).append(": ").append(e.getValue()).append("\r\n");
        }

        // write header
        final Iterator<String> i = responseHeader.keySet().iterator();
        String key;
        char tag;
        int count;
        while (i.hasNext()) {
          key = i.next();
          tag = key.charAt(0);
          if ((tag != '*')
              && (tag != '#')) { // '#' in key is reserved for proxy attributes as artificial header
            // values
            count = responseHeader.keyCount(key);
            for (int j = 0; j < count; j++) {
              header
                  .append(key)
                  .append(": ")
                  .append(responseHeader.getSingle(key, j))
                  .append("\r\n");
            }
          }
        }

        // end header
        header.append("\r\n");

        // sending headers to the client
        respond.write(UTF8.getBytes(header.toString()));

        // flush stream
        respond.flush();
      }

      conProp.put(HeaderFramework.CONNECTION_PROP_PROXY_RESPOND_HEADER, responseHeader);
      conProp.put(
          HeaderFramework.CONNECTION_PROP_PROXY_RESPOND_STATUS, Integer.toString(httpStatusCode));
    } catch (final Exception e) {
      // any interruption may be caused be network error or because the user has closed
      // the windows during transmission. We simply pass it as IOException
      throw new IOException(e.getMessage());
    }
  }
}
  private static final void sendRespondError(
      final HashMap<String, Object> conProp,
      final OutputStream respond,
      final int errorcase,
      final int httpStatusCode,
      String httpStatusText,
      final String detailedErrorMsgText,
      final Object detailedErrorMsgFile,
      final serverObjects detailedErrorMsgValues,
      final Throwable stackTrace,
      ResponseHeader header)
      throws IOException {

    FileInputStream fis = null;
    ByteArrayOutputStream o = null;
    try {
      // setting the proper http status message
      String httpVersion = (String) conProp.get(HeaderFramework.CONNECTION_PROP_HTTP_VER);
      if (httpVersion == null) httpVersion = "HTTP/1.1";
      if ((httpStatusText == null) || (httpStatusText.length() == 0)) {
        // http1_1 includes http1_0 messages
        if (HeaderFramework.http1_1.containsKey(Integer.toString(httpStatusCode)))
          httpStatusText = HeaderFramework.http1_1.get(Integer.toString(httpStatusCode));
        else httpStatusText = "Unknown";
      }

      // generating the desired request url
      String host = (String) conProp.get(HeaderFramework.CONNECTION_PROP_HOST);
      String path = (String) conProp.get(HeaderFramework.CONNECTION_PROP_PATH);
      if (path == null) path = "/";
      final String args = (String) conProp.get(HeaderFramework.CONNECTION_PROP_ARGS);
      final String method = (String) conProp.get(HeaderFramework.CONNECTION_PROP_METHOD);

      final int port = Domains.stripToPort(host);
      host = Domains.stripToHostName(host);

      String urlString;
      try {
        urlString =
            (new DigestURL(
                    (method.equals(HeaderFramework.METHOD_CONNECT) ? "https" : "http"),
                    host,
                    port,
                    (args == null) ? path : path + "?" + args))
                .toString();
      } catch (final MalformedURLException e) {
        urlString = "invalid URL";
      }

      // set rewrite values
      final serverObjects tp = new serverObjects();

      String clientIP = (String) conProp.get(HeaderFramework.CONNECTION_PROP_CLIENTIP);
      if (clientIP == null) clientIP = Domains.LOCALHOST;

      tp.put("peerName", (switchboard.peers == null) ? "" : switchboard.peers.myName());
      tp.put("errorMessageType", Integer.toString(errorcase));
      tp.put("httpStatus", Integer.toString(httpStatusCode) + " " + httpStatusText);
      tp.put("requestMethod", (String) conProp.get(HeaderFramework.CONNECTION_PROP_METHOD));
      tp.put("requestURL", urlString);

      switch (errorcase) {
        case ERRORCASE_FILE:
          tp.put(
              "errorMessageType_file",
              (detailedErrorMsgFile == null) ? "" : detailedErrorMsgFile.toString());
          if ((detailedErrorMsgValues != null) && !detailedErrorMsgValues.isEmpty()) {
            // rewriting the value-names and add the proper name prefix:
            for (final Entry<String, String> entry : detailedErrorMsgValues.entrySet()) {
              tp.put("errorMessageType_" + entry.getKey(), entry.getValue());
            }
          }
          break;
        case ERRORCASE_MESSAGE:
        default:
          tp.put(
              "errorMessageType_detailedErrorMsg",
              (detailedErrorMsgText == null)
                  ? ""
                  : detailedErrorMsgText.replaceAll("\n", "<br />"));
          break;
      }

      // building the stacktrace
      if (stackTrace != null) {
        tp.put("printStackTrace", "1");
        final ByteBuffer errorMsg = new ByteBuffer(100);
        final PrintStream printStream = new PrintStream(errorMsg);
        stackTrace.printStackTrace(printStream);
        tp.put("printStackTrace_exception", stackTrace.toString());
        tp.put("printStackTrace_stacktrace", UTF8.String(errorMsg.getBytes()));
        printStream.close();
      } else {
        tp.put("printStackTrace", "0");
      }

      // Generated Tue, 23 Aug 2005 11:19:14 GMT by brain.wg (squid/2.5.STABLE3)
      // adding some system information
      final String systemDate = HeaderFramework.formatRFC1123(new Date());
      tp.put("date", systemDate);

      // rewrite the file
      final File htRootPath =
          new File(
              switchboard.getAppPath(),
              switchboard.getConfig(
                  SwitchboardConstants.HTROOT_PATH, SwitchboardConstants.HTROOT_PATH_DEFAULT));

      TemplateEngine.writeTemplate(
          "/proxymsg/error.html",
          fis = new FileInputStream(new File(htRootPath, "/proxymsg/error.html")),
          o = new ByteArrayOutputStream(512),
          tp);
      final byte[] result = o.toByteArray();
      o.close();
      o = null;

      if (header == null) header = new ResponseHeader(httpStatusCode);
      header.put(
          HeaderFramework.CONNECTION_PROP_PROXY_RESPOND_STATUS, Integer.toString(httpStatusCode));
      header.put(HeaderFramework.DATE, systemDate);
      header.put(HeaderFramework.CONTENT_TYPE, "text/html");
      header.put(HeaderFramework.CONTENT_LENGTH, Integer.toString(result.length));
      header.put(HeaderFramework.PRAGMA, "no-cache, no-store");
      sendRespondHeader(conProp, respond, httpVersion, httpStatusCode, httpStatusText, header);

      if (!method.equals(HeaderFramework.METHOD_HEAD)) {
        // write the array to the client
        FileUtils.copy(result, respond);
      }
      respond.flush();
    } finally {
      if (fis != null)
        try {
          fis.close();
        } catch (final Exception e) {
          ConcurrentLog.logException(e);
        }
      if (o != null)
        try {
          o.close();
        } catch (final Exception e) {
          ConcurrentLog.logException(e);
        }
    }
  }
Esempio n. 6
0
  public static serverObjects respond(
      final RequestHeader header, final serverObjects post, final serverSwitch env) {

    prop.clear();
    sb = (Switchboard) env;
    user = sb.userDB.getUser(header);
    isAdmin =
        (sb.verifyAuthentication(header)
            || user != null && user.hasRight(UserDB.AccessRight.BOOKMARK_RIGHT));

    // set user name
    final String username;
    if (user != null) username = user.getUserName();
    else if (isAdmin) username = "******";
    else username = "******";
    prop.putHTML("display_user", username);

    // set peer address
    prop.put("display_address", sb.peers.mySeed().getPublicAddress());
    prop.put("display_peer", sb.peers.mySeed().getName());

    final int itemsPerPage; // items per page
    final int page; // page
    final int display; // default for JSON, XML or XHTML
    // String sortorder = "asc";
    // String sortname = "date";
    final String qtype;
    final String query;

    // check for GET parameters
    if (post != null) {
      itemsPerPage = (post.containsKey("rp")) ? post.getInt("rp", MAXRESULTS) : MAXRESULTS;
      page = (post.containsKey("page")) ? post.getInt("page", 1) : 1;
      query = (post.containsKey("query")) ? post.get("query", "") : "";
      qtype = (post.containsKey("qtype")) ? post.get("qtype", "") : "";
      // if (post.containsKey("sortorder")) sortorder = post.get("sortorder");
      if (post.containsKey("display")) {
        final String d = post.get("display");
        if ("flexigrid".equals(d) || "1".equals(d)) {
          display = DisplayType.FLEXIGRID.getValue();
        } else if ("xbel".equals(d) || "2".equals(d)) {
          display = DisplayType.XBEL.getValue();
        } else if ("rss".equals(d) || "3".equals(d)) {
          display = DisplayType.RSS.getValue();
        } else {
          display = DisplayType.XML.getValue();
        }
      } else {
        display = DisplayType.XML.getValue();
      }
      prop.put("display", display);
    } else {
      query = "";
      qtype = "";
      page = 1;
      itemsPerPage = MAXRESULTS;
      display = DisplayType.XML.getValue();
    }

    int count = 0;
    int total = 0;
    int start = 0;

    final Iterator<String> it;

    if (display == DisplayType.XBEL.getValue()) {
      String root = "/";
      if ("tags".equals(qtype) && !"".equals(query)) {
        prop.putHTML("display_folder", "1");
        prop.putHTML("display_folder_foldername", query);
        prop.putHTML("display_folder_folderhash", BookmarkHelper.tagHash(query));
        it = sb.bookmarksDB.getBookmarksIterator(query, isAdmin);
        count = print_XBEL(it, count);
        prop.put("display_xbel", count);
      } else if (query.length() > 0 && "folders".equals(qtype)) {
        root = (query.charAt(0) == '/') ? query : "/" + query;
      }
      prop.putHTML("display_folder", "0");
      R = root.replaceAll("[^/]", "").length() - 1;
      count =
          recurseFolders(
              BookmarkHelper.getFolderList(root, sb.bookmarksDB.getTagIterator(isAdmin)),
              root,
              0,
              true,
              "");
      prop.put("display_xbel", count);
    } else {
      // covers all non XBEL formats

      // set bookmark iterator according to query
      if ("tags".equals(qtype) && !"".equals(query) && !"/".equals(query)) {
        it = sb.bookmarksDB.getBookmarksIterator(query, isAdmin);
      } else {
        it = sb.bookmarksDB.getBookmarksIterator(isAdmin);
      }

      if (itemsPerPage < MAXRESULTS) {
        // skip the first entries (display next page)
        if (page > 1) {
          start = ((page - 1) * itemsPerPage) + 1;
        }
        count = 0;
        while (count < start && it.hasNext()) {
          it.next();
          count++;
        }
        total += count;
      }
      count = 0;
      BookmarksDB.Bookmark bookmark = null;
      while (count < itemsPerPage && it.hasNext()) {
        bookmark = sb.bookmarksDB.getBookmark(it.next());
        if (bookmark != null) {
          prop.put("display_bookmarks_" + count + "_id", count);
          prop.put("display_bookmarks_" + count + "_link", bookmark.getUrl());
          prop.put(
              "display_bookmarks_" + count + "_date",
              ISO8601Formatter.FORMATTER.format(new Date(bookmark.getTimeStamp())));
          prop.put(
              "display_bookmarks_" + count + "_rfc822date",
              HeaderFramework.formatRFC1123(new Date(bookmark.getTimeStamp())));
          prop.put("display_bookmarks_" + count + "_public", (bookmark.getPublic() ? "0" : "1"));
          prop.put("display_bookmarks_" + count + "_hash", bookmark.getUrlHash());
          prop.put("display_bookmarks_" + count + "_comma", ",");

          // offer HTML encoded
          prop.putHTML("display_bookmarks_" + count + "_title-html", bookmark.getTitle());
          prop.putHTML("display_bookmarks_" + count + "_desc-html", bookmark.getDescription());
          prop.putHTML(
              "display_bookmarks_" + count + "_tags-html",
              bookmark.getTagsString().replaceAll(",", ", "));
          prop.putHTML(
              "display_bookmarks_" + count + "_folders-html", (bookmark.getFoldersString()));

          // XML encoded
          prop.putXML("display_bookmarks_" + count + "_title-xml", bookmark.getTitle());
          prop.putXML("display_bookmarks_" + count + "_desc-xml", bookmark.getDescription());
          prop.putXML("display_bookmarks_" + count + "_tags-xml", bookmark.getTagsString());
          prop.putXML("display_bookmarks_" + count + "_folders-xml", (bookmark.getFoldersString()));

          // and plain text (potentially unsecure)
          prop.put("display_bookmarks_" + count + "_title", bookmark.getTitle());
          prop.put("display_bookmarks_" + count + "_desc", bookmark.getDescription());
          prop.put("display_bookmarks_" + count + "_tags", bookmark.getTagsString());
          prop.put("display_bookmarks_" + count + "_folders", (bookmark.getFoldersString()));

          count++;
        }
      }
      // eliminate the trailing comma for Json output

      prop.put("display_bookmarks_" + (itemsPerPage - 1) + "_comma", "");
      prop.put("display_bookmarks", count);

      while (it.hasNext()) {
        it.next();
        count++;
      }
      total += count;
      prop.put("display_page", page);
      prop.put("display_total", total);
    }

    // return rewrite properties
    return prop;
  }
Esempio n. 7
0
  public static serverObjects respond(
      final RequestHeader header, final serverObjects post, final serverSwitch env) {
    // return variable that accumulates replacements
    final serverObjects prop = new serverObjects();
    final Switchboard sb = (Switchboard) env;

    // set if this should be visible
    if (yacyBuildProperties.isPkgManager()) {
      prop.put("candeploy", "2");
      return prop;
    } else if (OS.canExecUnix || OS.isWindows) {
      // we can deploy a new system with (i.e.)
      // cd DATA/RELEASE;tar xfz $1;cp -Rf yacy/* ../../;rm -Rf yacy
      prop.put("candeploy", "1");
    } else {
      prop.put("candeploy", "0");
    }

    prop.put("candeploy_configCommit", "0");
    prop.put("candeploy_autoUpdate", "0");
    prop.put("candeploy_downloadsAvailable", "0");

    if (post != null) {
      // check if update is supposed to be installed and a release is defined
      if (post.containsKey("update") && !post.get("releaseinstall", "").isEmpty()) {
        prop.put("forwardToSteering", "1");
        prop.putHTML("forwardToSteering_release", post.get("releaseinstall", ""));
        prop.put("deploys", "1");
        prop.put("candeploy", "2"); // display nothing else
        return prop;
      }

      if (post.containsKey("downloadRelease")) {
        // download a release
        final String release = post.get("releasedownload", "");
        if (!release.isEmpty()) {
          try {
            yacyRelease versionToDownload = new yacyRelease(new DigestURI(release));

            // replace this version with version which contains public key
            final yacyRelease.DevAndMainVersions allReleases =
                yacyRelease.allReleases(false, false);
            final Set<yacyRelease> mostReleases =
                versionToDownload.isMainRelease() ? allReleases.main : allReleases.dev;
            for (final yacyRelease rel : mostReleases) {
              if (rel.equals(versionToDownload)) {
                versionToDownload = rel;
                break;
              }
            }
            versionToDownload.downloadRelease();
          } catch (final IOException e) {
            // TODO Auto-generated catch block
            Log.logException(e);
          }
        }
      }

      if (post.containsKey("checkRelease")) {
        yacyRelease.allReleases(true, false);
      }

      if (post.containsKey("deleteRelease")) {
        final String release = post.get("releaseinstall", "");
        if (!release.isEmpty()) {
          try {
            FileUtils.deletedelete(new File(sb.releasePath, release));
            FileUtils.deletedelete(new File(sb.releasePath, release + ".sig"));
          } catch (final NullPointerException e) {
            sb.getLog()
                .logSevere(
                    "AUTO-UPDATE: could not delete release " + release + ": " + e.getMessage());
          }
        }
      }

      if (post.containsKey("autoUpdate")) {
        final yacyRelease updateVersion = yacyRelease.rulebasedUpdateInfo(true);
        if (updateVersion == null) {
          prop.put("candeploy_autoUpdate", "2"); // no more recent release found
        } else {
          // there is a version that is more recent. Load it and re-start with it
          sb.getLog()
              .logInfo("AUTO-UPDATE: downloading more recent release " + updateVersion.getUrl());
          final File downloaded = updateVersion.downloadRelease();
          prop.putHTML("candeploy_autoUpdate_downloadedRelease", updateVersion.getName());
          final boolean devenvironment = new File(sb.getAppPath(), ".svn").exists();
          if (devenvironment) {
            sb.getLog()
                .logInfo("AUTO-UPDATE: omitting update because this is a development environment");
            prop.put("candeploy_autoUpdate", "3");
          } else if ((downloaded == null) || (!downloaded.exists()) || (downloaded.length() == 0)) {
            sb.getLog()
                .logInfo(
                    "AUTO-UPDATE: omitting update because download failed (file cannot be found, is too small or signature was bad)");
            prop.put("candeploy_autoUpdate", "4");
          } else {
            yacyRelease.deployRelease(downloaded);
            sb.terminate(10, "manual release update to " + downloaded.getName());
            sb.getLog().logInfo("AUTO-UPDATE: deploy and restart initiated");
            prop.put("candeploy_autoUpdate", "1");
          }
        }
      }

      if (post.containsKey("configSubmit")) {
        prop.put("candeploy_configCommit", "1");
        sb.setConfig(
            "update.process",
            ("manual".equals(post.get("updateMode", "manual"))) ? "manual" : "auto");
        sb.setConfig("update.cycle", Math.max(12, post.getLong("cycle", 168)));
        sb.setConfig("update.blacklist", post.get("blacklist", ""));
        sb.setConfig(
            "update.concept", ("any".equals(post.get("releaseType", "any"))) ? "any" : "main");
        sb.setConfig(
            "update.onlySignedFiles", (post.getBoolean("onlySignedFiles", false)) ? "1" : "0");
      }
    }

    // version information
    final String versionstring =
        yacyBuildProperties.getVersion() + "/" + yacyBuildProperties.getSVNRevision();
    prop.putHTML("candeploy_versionpp", versionstring);
    final boolean devenvironment = new File(sb.getAppPath(), ".svn").exists();
    float thisVersion = Float.parseFloat(yacyBuildProperties.getVersion());
    // cut off the SVN Rev in the Version
    try {
      thisVersion = (float) (Math.round(thisVersion * 1000.0) / 1000.0);
    } catch (final NumberFormatException e) {
    }

    // list downloaded releases
    final File[] downloadedFiles = sb.releasePath.listFiles();
    // list can be null if RELEASE directory has been deleted manually
    final int downloadedFilesNum = (downloadedFiles == null) ? 0 : downloadedFiles.length;

    prop.put(
        "candeploy_deployenabled",
        (downloadedFilesNum == 0)
            ? "0"
            : ((devenvironment) ? "1" : "2")); // prevent that a developer-version is over-deployed

    final NavigableSet<yacyRelease> downloadedReleases = new TreeSet<yacyRelease>();
    for (final File downloaded : downloadedFiles) {
      try {
        final yacyRelease release = new yacyRelease(downloaded);
        downloadedReleases.add(release);
      } catch (final RuntimeException e) {
        // not a valid release
        // can be also a restart- or deploy-file
        final File invalid = downloaded;
        if (!(invalid.getName().endsWith(".bat")
            || invalid.getName().endsWith(".sh")
            || invalid
                .getName()
                .endsWith(".sig"))) { // Windows & Linux don't like deleted scripts while execution!
          invalid.deleteOnExit();
        }
      }
    }
    // latest downloaded release
    final yacyVersion dflt = (downloadedReleases.isEmpty()) ? null : downloadedReleases.last();
    // check if there are any downloaded releases and if there are enable the update buttons
    prop.put("candeploy_downloadsAvailable", (downloadedReleases.isEmpty()) ? "0" : "1");
    prop.put(
        "candeploy_deployenabled_buttonsActive",
        (downloadedReleases.isEmpty() || devenvironment) ? "0" : "1");

    int relcount = 0;
    for (final yacyRelease release : downloadedReleases) {
      prop.put(
          "candeploy_downloadedreleases_" + relcount + "_name",
          ((release.isMainRelease()) ? "main" : "dev")
              + " "
              + release.getReleaseNr()
              + "/"
              + release.getSvn());
      prop.put(
          "candeploy_downloadedreleases_" + relcount + "_signature",
          (release.getSignatureFile().exists() ? "1" : "0"));
      prop.putHTML("candeploy_downloadedreleases_" + relcount + "_file", release.getName());
      prop.put(
          "candeploy_downloadedreleases_" + relcount + "_selected", (release == dflt) ? "1" : "0");
      relcount++;
    }
    prop.put("candeploy_downloadedreleases", relcount);

    // list remotely available releases
    final yacyRelease.DevAndMainVersions releasess = yacyRelease.allReleases(false, false);
    relcount = 0;

    final ArrayList<yacyRelease> rlist = new ArrayList<yacyRelease>();
    final Set<yacyRelease> remoteDevReleases = releasess.dev;
    remoteDevReleases.removeAll(downloadedReleases);
    for (final yacyRelease release : remoteDevReleases) {
      rlist.add(release);
    }
    final Set<yacyRelease> remoteMainReleases = releasess.main;
    remoteMainReleases.removeAll(downloadedReleases);
    for (final yacyRelease release : remoteMainReleases) {
      rlist.add(release);
    }
    yacyRelease release;
    for (int i = rlist.size() - 1; i >= 0; i--) {
      release = rlist.get(i);
      prop.put(
          "candeploy_availreleases_" + relcount + "_name",
          ((release.isMainRelease()) ? "main" : "dev")
              + " "
              + release.getReleaseNr()
              + "/"
              + release.getSvn());
      prop.put("candeploy_availreleases_" + relcount + "_url", release.getUrl().toString());
      prop.put(
          "candeploy_availreleases_" + relcount + "_signatures",
          (release.getPublicKey() != null ? "1" : "0"));
      prop.put("candeploy_availreleases_" + relcount + "_selected", (relcount == 0) ? "1" : "0");
      relcount++;
    }

    prop.put("candeploy_availreleases", relcount);

    // properties for automated system update
    prop.put(
        "candeploy_manualUpdateChecked",
        ("manual".equals(sb.getConfig("update.process", "manual"))) ? "1" : "0");
    prop.put(
        "candeploy_autoUpdateChecked",
        ("auto".equals(sb.getConfig("update.process", "manual"))) ? "1" : "0");
    prop.put("candeploy_cycle", sb.getConfigLong("update.cycle", 168));
    prop.putHTML("candeploy_blacklist", sb.getConfig("update.blacklist", ""));
    prop.put(
        "candeploy_releaseTypeMainChecked",
        ("any".equals(sb.getConfig("update.concept", "any"))) ? "0" : "1");
    prop.put(
        "candeploy_releaseTypeAnyChecked",
        ("any".equals(sb.getConfig("update.concept", "any"))) ? "1" : "0");
    prop.put("candeploy_lastlookup", (sb.getConfigLong("update.time.lookup", 0) == 0) ? "0" : "1");
    prop.put(
        "candeploy_lastlookup_time",
        new Date(sb.getConfigLong("update.time.lookup", 0)).toString());
    prop.put(
        "candeploy_lastdownload", (sb.getConfigLong("update.time.download", 0) == 0) ? "0" : "1");
    prop.put(
        "candeploy_lastdownload_time",
        new Date(sb.getConfigLong("update.time.download", 0)).toString());
    prop.put("candeploy_lastdeploy", (sb.getConfigLong("update.time.deploy", 0) == 0) ? "0" : "1");
    prop.put(
        "candeploy_lastdeploy_time",
        new Date(sb.getConfigLong("update.time.deploy", 0)).toString());
    prop.put(
        "candeploy_onlySignedFiles",
        ("1".equals(sb.getConfig("update.onlySignedFiles", "1"))) ? "1" : "0");

    /*
    if ((adminaccess) && (yacyVersion.latestRelease >= (thisVersion+0.01))) { // only new Versions(not new SVN)
        if ((yacyVersion.latestMainRelease != null) ||
            (yacyVersion.latestDevRelease != null)) {
            prop.put("hintVersionDownload", 1);
        } else if ((post != null) && (post.containsKey("aquirerelease"))) {
            yacyVersion.aquireLatestReleaseInfo();
            prop.put("hintVersionDownload", 1);
        } else {
            prop.put("hintVersionAvailable", 1);
        }
    }
    prop.put("hintVersionAvailable", 1); // for testing

    prop.putASIS("hintVersionDownload_versionResMain", (yacyVersion.latestMainRelease == null) ? "-" : yacyVersion.latestMainRelease.toAnchor());
    prop.putASIS("hintVersionDownload_versionResDev", (yacyVersion.latestDevRelease == null) ? "-" : yacyVersion.latestDevRelease.toAnchor());
    prop.put("hintVersionAvailable_latestVersion", Float.toString(yacyVersion.latestRelease));
     */

    return prop;
  }