public void testFunctionalFromTarHierarchy() throws Exception {
    log.debug3("in testFromTarHierarchy");
    // load the tarballs
    InputStream file_input = null;
    try {
      file_input = getResourceAsStream(realTARFile_A);
      // UrlCacher uc = au.makeUrlCacher(TAR_A_BASE);
      // uc.storeContent(file_input, tarHeader);
      UrlCacher uc = tarAu.makeUrlCacher(new UrlData(file_input, tarHeader, TAR_A_BASE));
      uc.storeContent();
      IOUtil.safeClose(file_input);

      file_input = getResourceAsStream(realTARFile_B);
      // uc = au.makeUrlCacher(TAR_B_BASE);
      // uc.storeContent(file_input, tarHeader);
      uc = tarAu.makeUrlCacher(new UrlData(file_input, tarHeader, TAR_B_BASE));
      uc.storeContent();
      IOUtil.safeClose(file_input);

    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } finally {
      IOUtil.safeClose(file_input);
    }

    CachedUrlSet cus = tarAu.getAuCachedUrlSet();
    for (CachedUrl cu : cus.getCuIterable()) {
      log.debug3("AU - cu is: " + cu.getUrl());
      cu.release();
    }

    // We need to start from the level of the ArticleMetadataExtractor
    MyListEmitter emitter = new MyListEmitter();
    ArticleMetadataExtractor amEx =
        new ElsevierDeferredArticleMetadataExtractor(ArticleFiles.ROLE_ARTICLE_METADATA);

    Iterator<ArticleFiles> it = tarAu.getArticleIterator(MetadataTarget.Any());
    while (it.hasNext()) {
      ArticleFiles af = it.next();
      log.debug3("Metadata test - articlefiles " + af.toString());
      // CachedUrl cu = af.getFullTextCu();
      CachedUrl cu = af.getRoleCu(ArticleFiles.ROLE_ARTICLE_METADATA);
      log.debug3("metadata cu is " + cu.getUrl());
      // List<ArticleMetadata> mdlist = mle.extract(MetadataTarget.Any(), cu);
      amEx.extract(MetadataTarget.Any(), af, emitter);
      List<ArticleMetadata> returnList = emitter.getAmList();

      assertNotNull(returnList);
      log.debug3("size of returnList is " + returnList.size());
      Iterator<ArticleMetadata> mdIt = returnList.iterator();
      ArticleMetadata mdRecord = null;
      while (mdIt.hasNext()) {
        mdRecord = (ArticleMetadata) mdIt.next();
        validateCompleteMetadataRecord(mdRecord);
      }
    }
  }
Esempio n. 2
0
  public void testRequestMessageCreation() throws Exception {
    V3LcapMessage reqMsg =
        new V3LcapMessage(
            "ArchivalID_2",
            "key",
            "Plug42",
            m_testBytes,
            m_testBytes,
            V3LcapMessage.MSG_REPAIR_REQ,
            987654321,
            m_testID,
            tempDir,
            theDaemon);
    reqMsg.setTargetUrl("http://foo.com/");

    for (Iterator ix = m_testVoteBlocks.iterator(); ix.hasNext(); ) {
      reqMsg.addVoteBlock((VoteBlock) ix.next());
    }

    assertEquals(3, reqMsg.getProtocolVersion());
    assertEquals("Plug42", reqMsg.getPluginVersion());
    assertTrue(m_testID == reqMsg.getOriginatorId());
    assertEquals(V3LcapMessage.MSG_REPAIR_REQ, reqMsg.getOpcode());
    assertEquals("ArchivalID_2", reqMsg.getArchivalId());
    assertEquals("http://foo.com/", reqMsg.getTargetUrl());
    assertEquals(m_testBytes, reqMsg.getPollerNonce());
    assertEquals(m_testBytes, reqMsg.getVoterNonce());
    assertEquals(null, reqMsg.getVoterNonce2());
    List aBlocks = new ArrayList();
    List bBlocks = new ArrayList();
    for (VoteBlocksIterator iter = m_testMsg.getVoteBlockIterator(); iter.hasNext(); ) {
      aBlocks.add(iter.next());
    }
    for (VoteBlocksIterator iter = reqMsg.getVoteBlockIterator(); iter.hasNext(); ) {
      bBlocks.add(iter.next());
    }
    assertEquals(aBlocks, bBlocks);

    // Actual size of test vote blocks is unpredictable
    assertTrue(reqMsg.getEstimatedEncodedLength() > V3LcapMessage.EST_ENCODED_HEADER_LENGTH);
  }
  public void testSimpleDatasetXML() throws Exception {
    log.debug3("testSimpleDatasetXML");
    String file_input = StringUtil.fromInputStream(getResourceAsStream(testDatasetFile));
    String xml_url = TAR_A_BASE + SUBDIR + "dataset.xml";

    List<ArticleMetadata> mdList =
        extractFromContent(xml_url, "text/xml", file_input, nocheck_mle, null);
    assertEquals(6, mdList.size());
    Iterator<ArticleMetadata> mdIt = mdList.iterator();
    ArticleMetadata mdRecord = null;
    while (mdIt.hasNext()) {
      mdRecord = (ArticleMetadata) mdIt.next();
      validateDatasetMetadataRecord(mdRecord);
    }
  }
Esempio n. 4
0
 // make a Schedule with one chunk per task
 Schedule sched(List tasks) {
   List events = new ArrayList();
   for (Iterator iter = tasks.iterator(); iter.hasNext(); ) {
     Object obj = iter.next();
     if (obj instanceof Schedule.Event) {
       events.add(obj);
     } else {
       SchedulableTask task = (SchedulableTask) obj;
       if (task.isBackgroundTask()) {
         events.add(bEvent((BackgroundTask) task, Schedule.EventType.START));
       } else {
         events.add(chunk((StepTask) task));
       }
     }
   }
   Schedule s = new Schedule(events);
   return s;
 }
 /**
  * Checks the consistency of the node, and continues with its children if it's consistent.
  *
  * @param node RepositoryNodeImpl the node to check
  */
 private void recurseConsistencyCheck(RepositoryNodeImpl node) {
   logger.debug2("Checking node '" + node.getNodeUrl() + "'...");
   // check consistency at each node
   // correct/deactivate as necessary
   // 'checkNodeConsistency()' will repair if possible
   if (node.checkNodeConsistency()) {
     logger.debug3("Node consistent; recursing on children...");
     List children = node.getNodeList(null, false);
     Iterator iter = children.iterator();
     while (iter.hasNext()) {
       RepositoryNodeImpl child = (RepositoryNodeImpl) iter.next();
       recurseConsistencyCheck(child);
     }
   } else {
     logger.debug3("Node inconsistent; deactivating...");
     deactivateInconsistentNode(node);
   }
 }
 public void setConfig(
     Configuration config, Configuration oldConfig, Configuration.Differences changedKeys) {
   //  Build list of repositories from list of disk (fs) paths).  Needs to
   //  be generalized if ever another repository implementation.
   if (changedKeys.contains(ConfigManager.PARAM_PLATFORM_DISK_SPACE_LIST)) {
     List lst = new ArrayList();
     String dspace = config.get(ConfigManager.PARAM_PLATFORM_DISK_SPACE_LIST, "");
     List paths = StringUtil.breakAt(dspace, ';');
     if (paths != null) {
       for (Iterator iter = paths.iterator(); iter.hasNext(); ) {
         lst.add("local:" + (String) iter.next());
       }
     }
     repoList = lst;
   }
   if (changedKeys.contains(PARAM_MAX_PER_AU_CACHE_SIZE)) {
     paramNodeCacheSize =
         config.getInt(PARAM_MAX_PER_AU_CACHE_SIZE, DEFAULT_MAX_PER_AU_CACHE_SIZE);
     for (Iterator iter = getDaemon().getAllLockssRepositories().iterator(); iter.hasNext(); ) {
       LockssRepository repo = (LockssRepository) iter.next();
       if (repo instanceof LockssRepositoryImpl) {
         LockssRepositoryImpl repoImpl = (LockssRepositoryImpl) repo;
         repoImpl.setNodeCacheSize(paramNodeCacheSize);
       }
     }
   }
   if (changedKeys.contains(PARAM_MAX_SUSPECT_VERSIONS_CACHE_SIZE)) {
     paramSuspectVersionsCacheSize =
         config.getInt(
             PARAM_MAX_SUSPECT_VERSIONS_CACHE_SIZE, DEFAULT_MAX_SUSPECT_VERSIONS_CACHE_SIZE);
     suspectVersionsCache.setMaxSize(paramSuspectVersionsCacheSize);
   }
   if (changedKeys.contains(GLOBAL_CACHE_PREFIX)) {
     paramIsGlobalNodeCache =
         config.getBoolean(PARAM_GLOBAL_CACHE_ENABLED, DEFAULT_GLOBAL_CACHE_ENABLED);
     if (paramIsGlobalNodeCache) {
       paramGlobalNodeCacheSize =
           config.getInt(PARAM_MAX_GLOBAL_CACHE_SIZE, DEFAULT_MAX_GLOBAL_CACHE_SIZE);
       log.debug("global node cache size: " + paramGlobalNodeCacheSize);
       globalNodeCache.setMaxSize(paramGlobalNodeCacheSize);
     }
   }
   if (changedKeys.contains(DISK_PREFIX)) {
     int minMB = config.getInt(PARAM_DISK_WARN_FRRE_MB, DEFAULT_DISK_WARN_FRRE_MB);
     double minPer =
         config.getPercentage(PARAM_DISK_WARN_FRRE_PERCENT, DEFAULT_DISK_WARN_FRRE_PERCENT);
     paramDFWarn = PlatformUtil.DF.makeThreshold(minMB, minPer);
     minMB = config.getInt(PARAM_DISK_FULL_FRRE_MB, DEFAULT_DISK_FULL_FRRE_MB);
     minPer = config.getPercentage(PARAM_DISK_FULL_FRRE_PERCENT, DEFAULT_DISK_FULL_FRRE_PERCENT);
     paramDFFull = PlatformUtil.DF.makeThreshold(minMB, minPer);
   }
   if (changedKeys.contains(PARAM_SIZE_CALC_MAX_LOAD)) {
     sizeCalcMaxLoad = config.getPercentage(PARAM_SIZE_CALC_MAX_LOAD, DEFAULT_SIZE_CALC_MAX_LOAD);
   }
   if (changedKeys.contains(PREFIX)) {
     maxUnusedDirSearch =
         config.getInt(PARAM_MAX_UNUSED_DIR_SEARCH, DEFAULT_MAX_UNUSED_DIR_SEARCH);
     isStatefulUnusedDirSearch =
         config.getBoolean(
             PARAM_IS_STATEFUL_UNUSED_DIR_SEARCH, DEFAULT_IS_STATEFUL_UNUSED_DIR_SEARCH);
     enableLongComponents =
         config.getBoolean(PARAM_ENABLE_LONG_COMPONENTS, DEFAULT_ENABLE_LONG_COMPONENTS);
     enableLongComponentsCompatibility =
         config.getBoolean(
             PARAM_ENABLE_LONG_COMPONENTS_COMPATIBILITY,
             DEFAULT_ENABLE_LONG_COMPONENTS_COMPATIBILITY);
     maxComponentLength = config.getInt(PARAM_MAX_COMPONENT_LENGTH, DEFAULT_MAX_COMPONENT_LENGTH);
     checkUnnormalized =
         (CheckUnnormalizedMode)
             config.getEnum(
                 CheckUnnormalizedMode.class,
                 PARAM_CHECK_UNNORMALIZED,
                 DEFAULT_CHECK_UNNORMALIZED);
   }
 }