public static V1Poll createCompletedPoll( LockssDaemon daemon, ArchivalUnit au, V1LcapMessage testmsg, int numAgree, int numDisagree, PollManager pollmanager) throws Exception { log.debug( "createCompletedPoll: au: " + au.toString() + " peer " + testmsg.getOriginatorId() + " votes " + numAgree + "/" + numDisagree); CachedUrlSetSpec cusSpec = null; if ((testmsg.getLwrBound() != null) && (testmsg.getLwrBound().equals(PollSpec.SINGLE_NODE_LWRBOUND))) { cusSpec = new SingleNodeCachedUrlSetSpec(testmsg.getTargetUrl()); } else { cusSpec = new RangeCachedUrlSetSpec( testmsg.getTargetUrl(), testmsg.getLwrBound(), testmsg.getUprBound()); } CachedUrlSet cus = au.makeCachedUrlSet(cusSpec); PollSpec spec = new PollSpec(cus, Poll.V1_CONTENT_POLL); ((MockCachedUrlSet) spec.getCachedUrlSet()).setHasContent(false); V1Poll p = null; if (testmsg.isContentPoll()) { p = new V1ContentPoll( spec, pollmanager, testmsg.getOriginatorId(), testmsg.getChallenge(), testmsg.getDuration(), testmsg.getHashAlgorithm()); } else if (testmsg.isNamePoll()) { p = new V1NamePoll( spec, pollmanager, testmsg.getOriginatorId(), testmsg.getChallenge(), testmsg.getDuration(), testmsg.getHashAlgorithm()); } else if (testmsg.isVerifyPoll()) { p = new V1VerifyPoll( spec, pollmanager, testmsg.getOriginatorId(), testmsg.getChallenge(), testmsg.getDuration(), testmsg.getHashAlgorithm(), testmsg.getVerifier()); } assertNotNull(p); p.setMessage(testmsg); p.m_tally.quorum = numAgree + numDisagree; p.m_tally.numAgree = numAgree; p.m_tally.numDisagree = numDisagree; p.m_tally.wtAgree = 2000; p.m_tally.wtDisagree = 200; p.m_tally.localEntries = makeEntries(1, 3); p.m_tally.votedEntries = makeEntries(1, 5); p.m_tally.votedEntries.remove(1); p.m_pollstate = V1Poll.PS_COMPLETE; p.m_callerID = testmsg.getOriginatorId(); log.debug3("poll " + p.toString()); p.m_tally.tallyVotes(); return p; }
/** Explode the archive into its constituent elements */ public void explode() throws CacheException { CachedUrl cachedUrl = null; int goodEntries = 0; int badEntries = 0; int ignoredEntries = 0; int entriesBetweenSleep = 0; ArchiveReader arcReader = null; logger.info( (storeArchive ? "Storing" : "Fetching") + " WARC file: " + origUrl + " will explode"); try { if (storeArchive) { UrlCacher uc = au.makeUrlCacher(new UrlData(arcStream, arcProps, fetchUrl)); BitSet bs = new BitSet(); bs.set(UrlCacher.DONT_CLOSE_INPUT_STREAM_FLAG); uc.setFetchFlags(bs); uc.storeContent(); archiveData.resetInputStream(); arcStream = archiveData.input; } // Wrap it in an ArchiveReader logger.debug3("About to wrap stream"); arcReader = wrapStream(fetchUrl, arcStream); logger.debug3("wrapStream() returns " + (arcReader == null ? "null" : "non-null")); // Explode it if (arcReader == null) { throw new CacheException.ExploderException("no WarcReader for " + origUrl); } ArchivalUnit au = crawlFacade.getAu(); Set stemSet = new HashSet(); logger.debug("Exploding " + fetchUrl); // Iterate through the elements in the WARC file, except the first Iterator i = arcReader.iterator(); // Skip first record for (i.next(); i.hasNext(); ) { // XXX probably not necessary helper.pokeWDog(); if ((++entriesBetweenSleep % sleepAfter) == 0) { long pauseTime = CurrentConfig.getTimeIntervalParam(PARAM_RETRY_PAUSE, DEFAULT_RETRY_PAUSE); Deadline pause = Deadline.in(pauseTime); logger.debug3("Sleeping for " + StringUtil.timeIntervalToString(pauseTime)); while (!pause.expired()) { try { pause.sleep(); } catch (InterruptedException ie) { // no action } } } ArchiveRecord element = (ArchiveRecord) i.next(); // Each element is a URL to be cached in a suitable AU ArchiveRecordHeader elementHeader = element.getHeader(); String elementUrl = elementHeader.getUrl(); String elementMimeType = elementHeader.getMimetype(); long elementLength = elementHeader.getLength(); logger.debug2("WARC url " + elementUrl + " mime " + elementMimeType); if (elementUrl.startsWith("http:")) { ArchiveEntry ae = new ArchiveEntry( elementUrl, elementLength, 0, // XXX need to convert getDate string to long element, // ArchiveRecord extends InputStream this, fetchUrl); ae.setHeaderFields(makeCIProperties(elementHeader)); long bytesStored = elementLength; logger.debug3("ArchiveEntry: " + ae.getName() + " bytes " + bytesStored); try { helper.process(ae); } catch (PluginException ex) { throw new CacheException.ExploderException("helper.process() threw", ex); } if (ae.getBaseUrl() != null) { if (ae.getRestOfUrl() != null && ae.getHeaderFields() != null) { storeEntry(ae); handleAddText(ae); goodEntries++; crawlFacade.getCrawlerStatus().addContentBytesFetched(bytesStored); } else { ignoredEntries++; } } else { badEntries++; logger.debug2("Can't map " + elementUrl + " from " + archiveUrl); } } } } catch (IOException ex) { throw new CacheException.ExploderException(ex); } finally { if (arcReader != null) try { arcReader.close(); arcReader = null; } catch (IOException ex) { throw new CacheException.ExploderException(ex); } if (cachedUrl != null) { cachedUrl.release(); } IOUtil.safeClose(arcStream); } if (badEntries == 0 && goodEntries > 0) { // Make it look like a new crawl finished on each AU to which // URLs were added. for (Iterator it = touchedAus.iterator(); it.hasNext(); ) { ArchivalUnit au = (ArchivalUnit) it.next(); logger.debug3(archiveUrl + " touching " + au.toString()); AuUtil.getDaemon(au).getNodeManager(au).newContentCrawlFinished(); } } else { ArchivalUnit au = crawlFacade.getAu(); String msg = archiveUrl + ": " + badEntries + "/" + goodEntries + " bad entries"; throw new CacheException.UnretryableException(msg); } }