// Return IP addr used by LCAP.  If specified by (misleadingly named)
 // localIPAddress prop, might not really be our address (if we are
 // behind NAT).
 String getLcapIPAddr() {
   String ip = CurrentConfig.getParam(PARAM_LOCAL_IP);
   if (ip == null || ip.length() <= 0) {
     return getLocalIPAddr();
   }
   return ip;
 }
 public void stopService() {
   // mainly important in testing to blank this
   lastPluginDir = INITIAL_PLUGIN_DIR;
   if (CurrentConfig.getBooleanParam(PARAM_CLEAR_DIR_MAP, DEFAULT_CLEAR_DIR_MAP)) {
     // This should be in RepositoryManager.stopService()
     localRepositories = new HashMap();
   }
   super.stopService();
 }
 public CrawlRuleTester(int crawlDepth, long crawlDelay, String baseUrl, ArchivalUnit au) {
   super("crawlrule tester");
   m_crawlDepth = crawlDepth;
   long minFetchDelay =
       CurrentConfig.getLongParam(
           BaseArchivalUnit.PARAM_MIN_FETCH_DELAY, BaseArchivalUnit.DEFAULT_MIN_FETCH_DELAY);
   m_crawlDelay = Math.max(crawlDelay, minFetchDelay);
   m_baseUrl = baseUrl;
   m_au = au;
 }
 public static String getRepositorySpec(ArchivalUnit au) {
   Configuration auConfig = au.getConfiguration();
   if (auConfig != null) { // can be null in unit tests
     String repoSpec = auConfig.get(PluginManager.AU_PARAM_REPOSITORY);
     if (repoSpec != null && repoSpec.startsWith("local:")) {
       return repoSpec;
     }
   }
   return "local:" + CurrentConfig.getParam(PARAM_CACHE_LOCATION);
 }
  protected RateLimiter recomputeFetchRateLimiter(RateLimiter oldLimiter) {
    String rate = CurrentConfig.getParam(PARAM_REGISTRY_FETCH_RATE, DEFAULT_REGISTRY_FETCH_RATE);
    Object limiterKey = getFetchRateLimiterKey();

    if (limiterKey == null) {
      return RateLimiter.getRateLimiter(oldLimiter, rate, DEFAULT_REGISTRY_FETCH_RATE);
    } else {
      RateLimiter.Pool pool = RateLimiter.getPool();
      return pool.findNamedRateLimiter(limiterKey, rate, DEFAULT_REGISTRY_FETCH_RATE);
    }
  }
Example #6
0
 public V3Serializer(LockssDaemon daemon) throws PollSerializerException {
   this.daemon = daemon;
   Configuration config = CurrentConfig.getCurrentConfig();
   File stateDir = PollUtil.ensurePollStateRoot();
   if (!FileUtil.ensureDirExists(stateDir)) {
     throw new PollSerializerException("Could not create state directory " + stateDir);
   }
   try {
     this.pollDir = FileUtil.createTempDir("pollstate-", "", stateDir);
   } catch (IOException ex) {
     throw new PollSerializerException("Cannot create temp dir in state directory" + stateDir, ex);
   }
 }
 private void handlePause(int entriesBetweenSleep) {
   if ((entriesBetweenSleep % sleepAfter) == 0) {
     long pauseTime = CurrentConfig.getTimeIntervalParam(PARAM_RETRY_PAUSE, DEFAULT_RETRY_PAUSE);
     Deadline pause = Deadline.in(pauseTime);
     logger.debug3("Sleeping for " + StringUtil.timeIntervalToString(pauseTime));
     while (!pause.expired()) {
       try {
         pause.sleep();
       } catch (InterruptedException ie) {
         // no action
       }
     }
   }
 }
  public void testHandleEmptyFile() throws Exception {
    MockCachedUrlSetSpec mspec = new MockCachedUrlSetSpec("http://www.example.com", null);
    CachedUrlSet mcus = new MockCachedUrlSet(mau, mspec);
    NodeStateImpl nodeState = new NodeStateImpl(mcus, -1, null, null, repository);
    nodeState.setPollHistoryList(new ArrayList());
    // storing empty vector
    repository.storePollHistories(nodeState);
    String filePath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau);
    filePath =
        LockssRepositoryImpl.mapUrlToFileLocation(
            filePath, "http://www.example.com/" + HistoryRepositoryImpl.HISTORY_FILE_NAME);
    File xmlFile = new File(filePath);
    assertTrue(xmlFile.exists());

    nodeState.setPollHistoryList(new ArrayList());
    repository.loadPollHistories(nodeState);
    assertEquals(0, nodeState.pollHistories.size());

    mspec = new MockCachedUrlSetSpec("http://www.example2.com", null);
    mcus = new MockCachedUrlSet(mau, mspec);
    nodeState = new NodeStateImpl(mcus, -1, null, null, repository);
    filePath = LockssRepositoryImpl.mapAuToFileLocation(tempDirPath, mau);
    filePath = LockssRepositoryImpl.mapUrlToFileLocation(filePath, "http://www.example2.com/");
    xmlFile = new File(filePath);
    assertFalse(xmlFile.exists());
    xmlFile.mkdirs();
    filePath += HistoryRepositoryImpl.HISTORY_FILE_NAME;
    xmlFile = new File(filePath);
    OutputStream os = new BufferedOutputStream(new FileOutputStream(xmlFile));
    os.write(new byte[0]);
    os.close();
    assertTrue(xmlFile.exists());

    nodeState.setPollHistoryList(new ArrayList());
    repository.loadPollHistories(nodeState);
    assertEquals(0, nodeState.pollHistories.size());
    assertFalse(xmlFile.exists());
    xmlFile =
        new File(
            filePath
                + CurrentConfig.getParam(
                    ObjectSerializer.PARAM_FAILED_DESERIALIZATION_EXTENSION,
                    ObjectSerializer.DEFAULT_FAILED_DESERIALIZATION_EXTENSION));
    assertTrue(xmlFile.exists());
  }
  public void loadAuConfigDescrs(Configuration config) throws ConfigurationException {
    super.loadAuConfigDescrs(config);
    this.m_registryUrl = config.get(ConfigParamDescr.BASE_URL.getKey());
    // Now we can construct a valid CC permission checker.
    m_permissionCheckers =
        //       ListUtil.list(new CreativeCommonsPermissionChecker(m_registryUrl));
        ListUtil.list(new CreativeCommonsPermissionChecker());

    paramMap.putLong(
        KEY_AU_NEW_CONTENT_CRAWL_INTERVAL,
        CurrentConfig.getTimeIntervalParam(
            PARAM_REGISTRY_CRAWL_INTERVAL, DEFAULT_REGISTRY_CRAWL_INTERVAL));
    if (log.isDebug2()) {
      log.debug2(
          "Setting Registry AU recrawl interval to "
              + StringUtil.timeIntervalToString(
                  paramMap.getLong(KEY_AU_NEW_CONTENT_CRAWL_INTERVAL)));
    }
  }
Example #10
0
  /** Common page setup. */
  protected Page newPage() {
    // Compute heading
    String heading = getHeading();
    if (heading == null) {
      heading = "Box Administration";
    }

    // Create page and layout header
    Page page = ServletUtil.doNewPage(getPageTitle(), isFramed());
    Iterator inNavIterator;
    if (myServletDescr().hasNoNavTable()) {
      inNavIterator = CollectionUtil.EMPTY_ITERATOR;
    } else {
      inNavIterator =
          new FilterIterator(
              new ObjectArrayIterator(getServletDescrs()),
              new Predicate() {
                public boolean evaluate(Object obj) {
                  return isServletInNav((ServletDescr) obj);
                }
              });
    }
    ServletUtil.layoutHeader(
        this,
        page,
        heading,
        isLargeLogo(),
        getMachineName(),
        getLockssApp().getStartDate(),
        inNavIterator);
    String warnMsg = CurrentConfig.getParam(PARAM_UI_WARNING);
    if (warnMsg != null) {
      Composite warning = new Composite();
      warning.add("<center><font color=red size=+1>");
      warning.add(warnMsg);
      warning.add("</font></center><br>");
      page.add(warning);
    }
    return page;
  }
Example #11
0
 /** Set the session timeout to the configured value */
 protected void setSessionTimeout(HttpSession session) {
   Configuration config = CurrentConfig.getCurrentConfig();
   setSessionTimeout(
       session, config.getTimeInterval(PARAM_UI_SESSION_TIMEOUT, DEFAULT_UI_SESSION_TIMEOUT));
 }
Example #12
0
 public MultiPartRequest getMultiPartRequest() throws FormDataTooLongException, IOException {
   int maxUpload =
       CurrentConfig.getIntParam(PARAM_MAX_UPLOAD_FILE_SIZE, DEFAULT_MAX_UPLOAD_FILE_SIZE);
   return getMultiPartRequest(maxUpload);
 }
 /** Set the map from the parameter in CurrentConfig. */
 private void setMap() {
   setMap(CurrentConfig.getList(PARAM_REPUTATION_TRANSFER_MAP));
 }
Example #14
0
  private Element makeForm() {
    Composite comp = new Composite();
    Form frm = new Form(srvURL(myServletDescr()));
    frm.method("POST");

    frm.add("<br><center>");
    Input reload = new Input(Input.Submit, KEY_ACTION, ACTION_RELOAD_CONFIG);
    setTabOrder(reload);
    frm.add(reload);
    frm.add(" ");
    Input backup = new Input(Input.Submit, KEY_ACTION, ACTION_MAIL_BACKUP);
    setTabOrder(backup);
    frm.add(backup);
    frm.add(" ");
    Input crawlplug = new Input(Input.Submit, KEY_ACTION, ACTION_CRAWL_PLUGINS);
    setTabOrder(crawlplug);
    frm.add(crawlplug);
    frm.add("</center>");
    ServletDescr d1 = AdminServletManager.SERVLET_HASH_CUS;
    if (isServletRunnable(d1)) {
      frm.add("<br><center>" + srvLink(d1, d1.heading) + "</center>");
    }
    Input findUrl = new Input(Input.Submit, KEY_ACTION, ACTION_FIND_URL);
    Input findUrlText = new Input(Input.Text, KEY_URL);
    findUrlText.setSize(50);
    setTabOrder(findUrl);
    setTabOrder(findUrlText);
    frm.add("<br><center>" + findUrl + " " + findUrlText + "</center>");

    Input thrw = new Input(Input.Submit, KEY_ACTION, ACTION_THROW_IOEXCEPTION);
    Input thmsg = new Input(Input.Text, KEY_MSG);
    setTabOrder(thrw);
    setTabOrder(thmsg);
    frm.add("<br><center>" + thrw + " " + thmsg + "</center>");

    frm.add("<br><center>AU Actions: select AU</center>");
    Composite ausel = ServletUtil.layoutSelectAu(this, KEY_AUID, formAuid);
    frm.add("<br><center>" + ausel + "</center>");
    setTabOrder(ausel);

    Input v3Poll =
        new Input(
            Input.Submit,
            KEY_ACTION,
            (showForcePoll ? ACTION_FORCE_START_V3_POLL : ACTION_START_V3_POLL));
    Input crawl =
        new Input(
            Input.Submit,
            KEY_ACTION,
            (showForceCrawl ? ACTION_FORCE_START_CRAWL : ACTION_START_CRAWL));

    frm.add("<br><center>");
    frm.add(v3Poll);
    frm.add(" ");
    frm.add(crawl);
    if (CurrentConfig.getBooleanParam(PARAM_ENABLE_DEEP_CRAWL, DEFAULT_ENABLE_DEEP_CRAWL)) {
      Input deepCrawl =
          new Input(
              Input.Submit,
              KEY_ACTION,
              (showForceCrawl ? ACTION_FORCE_START_DEEP_CRAWL : ACTION_START_DEEP_CRAWL));
      Input depthText = new Input(Input.Text, KEY_REFETCH_DEPTH, formDepth);
      depthText.setSize(4);
      setTabOrder(depthText);
      frm.add(" ");
      frm.add(deepCrawl);
      frm.add(depthText);
    }
    Input checkSubstance = new Input(Input.Submit, KEY_ACTION, ACTION_CHECK_SUBSTANCE);
    frm.add("<br>");
    frm.add(checkSubstance);
    if (metadataMgr != null) {
      Input reindex =
          new Input(
              Input.Submit,
              KEY_ACTION,
              (showForceReindexMetadata ? ACTION_FORCE_REINDEX_METADATA : ACTION_REINDEX_METADATA));
      frm.add(" ");
      frm.add(reindex);
      Input disableIndexing = new Input(Input.Submit, KEY_ACTION, ACTION_DISABLE_METADATA_INDEXING);
      frm.add(" ");
      frm.add(disableIndexing);
    }
    frm.add("</center>");

    comp.add(frm);
    return comp;
  }
 protected String getFetchRateLimiterSource() {
   return CurrentConfig.getParam(
       PARAM_REGISTRY_FETCH_RATE_LIMITER_SOURCE, DEFAULT_REGISTRY_FETCH_RATE_LIMITER_SOURCE);
 }
Example #16
0
  /** Explode the archive into its constituent elements */
  public void explode() throws CacheException {
    CachedUrl cachedUrl = null;
    int goodEntries = 0;
    int badEntries = 0;
    int ignoredEntries = 0;
    int entriesBetweenSleep = 0;
    ArchiveReader arcReader = null;

    logger.info(
        (storeArchive ? "Storing" : "Fetching") + " WARC file: " + origUrl + " will explode");
    try {
      if (storeArchive) {
        UrlCacher uc = au.makeUrlCacher(new UrlData(arcStream, arcProps, fetchUrl));
        BitSet bs = new BitSet();
        bs.set(UrlCacher.DONT_CLOSE_INPUT_STREAM_FLAG);
        uc.setFetchFlags(bs);
        uc.storeContent();
        archiveData.resetInputStream();
        arcStream = archiveData.input;
      }
      // Wrap it in an ArchiveReader
      logger.debug3("About to wrap stream");
      arcReader = wrapStream(fetchUrl, arcStream);
      logger.debug3("wrapStream() returns " + (arcReader == null ? "null" : "non-null"));
      // Explode it
      if (arcReader == null) {
        throw new CacheException.ExploderException("no WarcReader for " + origUrl);
      }
      ArchivalUnit au = crawlFacade.getAu();
      Set stemSet = new HashSet();
      logger.debug("Exploding " + fetchUrl);
      // Iterate through the elements in the WARC file, except the first
      Iterator i = arcReader.iterator();
      // Skip first record
      for (i.next(); i.hasNext(); ) {
        // XXX probably not necessary
        helper.pokeWDog();
        if ((++entriesBetweenSleep % sleepAfter) == 0) {
          long pauseTime =
              CurrentConfig.getTimeIntervalParam(PARAM_RETRY_PAUSE, DEFAULT_RETRY_PAUSE);
          Deadline pause = Deadline.in(pauseTime);
          logger.debug3("Sleeping for " + StringUtil.timeIntervalToString(pauseTime));
          while (!pause.expired()) {
            try {
              pause.sleep();
            } catch (InterruptedException ie) {
              // no action
            }
          }
        }
        ArchiveRecord element = (ArchiveRecord) i.next();
        // Each element is a URL to be cached in a suitable AU
        ArchiveRecordHeader elementHeader = element.getHeader();
        String elementUrl = elementHeader.getUrl();
        String elementMimeType = elementHeader.getMimetype();
        long elementLength = elementHeader.getLength();
        logger.debug2("WARC url " + elementUrl + " mime " + elementMimeType);
        if (elementUrl.startsWith("http:")) {
          ArchiveEntry ae =
              new ArchiveEntry(
                  elementUrl,
                  elementLength,
                  0, // XXX need to convert getDate string to long
                  element, // ArchiveRecord extends InputStream
                  this,
                  fetchUrl);
          ae.setHeaderFields(makeCIProperties(elementHeader));
          long bytesStored = elementLength;
          logger.debug3("ArchiveEntry: " + ae.getName() + " bytes " + bytesStored);
          try {
            helper.process(ae);
          } catch (PluginException ex) {
            throw new CacheException.ExploderException("helper.process() threw", ex);
          }
          if (ae.getBaseUrl() != null) {
            if (ae.getRestOfUrl() != null && ae.getHeaderFields() != null) {
              storeEntry(ae);
              handleAddText(ae);
              goodEntries++;
              crawlFacade.getCrawlerStatus().addContentBytesFetched(bytesStored);
            } else {
              ignoredEntries++;
            }
          } else {
            badEntries++;
            logger.debug2("Can't map " + elementUrl + " from " + archiveUrl);
          }
        }
      }
    } catch (IOException ex) {
      throw new CacheException.ExploderException(ex);
    } finally {
      if (arcReader != null)
        try {
          arcReader.close();
          arcReader = null;
        } catch (IOException ex) {
          throw new CacheException.ExploderException(ex);
        }
      if (cachedUrl != null) {
        cachedUrl.release();
      }
      IOUtil.safeClose(arcStream);
    }
    if (badEntries == 0 && goodEntries > 0) {
      // Make it look like a new crawl finished on each AU to which
      // URLs were added.
      for (Iterator it = touchedAus.iterator(); it.hasNext(); ) {
        ArchivalUnit au = (ArchivalUnit) it.next();
        logger.debug3(archiveUrl + " touching " + au.toString());
        AuUtil.getDaemon(au).getNodeManager(au).newContentCrawlFinished();
      }
    } else {
      ArchivalUnit au = crawlFacade.getAu();
      String msg = archiveUrl + ": " + badEntries + "/" + goodEntries + " bad entries";
      throw new CacheException.UnretryableException(msg);
    }
  }