public void testHeadNotPopular() throws Exception { VersionCounts versionCounts = VersionCounts.make(); VoteBlock vb1 = makeVoteBlock("http://test.com/foo1"); byte[] hash1 = addVersion(vb1, "content 1 for foo1"); byte[] hash2 = addVersion(vb1, "content 2 for foo1"); VoteBlock vb2 = makeVoteBlock("http://test.com/foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 2 for foo1"); VoteBlock vb3 = makeVoteBlock("http://test.com/foo1"); addVersion(vb3, "content 3 for foo1"); addVersion(vb3, "content 2 for foo1"); versionCounts.vote(vb1, participant1); versionCounts.vote(vb2, participant2); versionCounts.vote(vb3, participant3); Map<ParticipantUserData, HashResult> repairCandidates; repairCandidates = versionCounts.getRepairCandidates(0); assertSameElements( SetUtil.set(participant1, participant2, participant3), repairCandidates.keySet()); repairCandidates = versionCounts.getRepairCandidates(1); assertSameElements( SetUtil.set(participant1, participant2, participant3), repairCandidates.keySet()); repairCandidates = versionCounts.getRepairCandidates(2); assertSameElements(SetUtil.set(participant1, participant2), repairCandidates.keySet()); repairCandidates = versionCounts.getRepairCandidates(3); assertEmpty(repairCandidates.keySet()); }
public Map<String, PlatformUtil.DF> getRepositoryMap() { Map<String, PlatformUtil.DF> repoMap = new LinkedMap(); for (String repo : getRepositoryList()) { repoMap.put(repo, getRepositoryDF(repo)); } return repoMap; }
public void testMultipleIdenticalVersions() throws Exception { VersionCounts versionCounts = VersionCounts.make(); VoteBlock vb1 = makeVoteBlock("http://test.com/foo1"); byte[] hash1 = addVersion(vb1, "content 1 for foo1"); byte[] hash2 = addVersion(vb1, "content 2 for foo1"); VoteBlock vb2 = makeVoteBlock("http://test.com/foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 2 for foo1"); VoteBlock vb3 = makeVoteBlock("http://test.com/foo1"); addVersion(vb3, "content 1 for foo1"); addVersion(vb3, "content 2 for foo1"); addVersion(vb3, "content 2 for foo1"); addVersion(vb3, "content 2 for foo1"); addVersion(vb3, "content 2 for foo1"); versionCounts.vote(vb1, participant1); versionCounts.vote(vb2, participant2); versionCounts.vote(vb3, participant3); Map<ParticipantUserData, HashResult> repairCandidates; repairCandidates = versionCounts.getRepairCandidates(2); assertSameElements( SetUtil.set(participant1, participant2, participant3), repairCandidates.keySet()); // With only three candidates, no version should reach a threshold // of 4, unless counting multiples is wrong. repairCandidates = versionCounts.getRepairCandidates(4); assertEmpty(repairCandidates.keySet()); }
protected void initFeatureVersions() throws PluginException.InvalidDefinition { if (definitionMap.containsKey(KEY_PLUGIN_FEATURE_VERSION_MAP)) { Map<Plugin.Feature, String> map = new HashMap<Plugin.Feature, String>(); Map<String, String> spec = (Map<String, String>) definitionMap.getMap(KEY_PLUGIN_FEATURE_VERSION_MAP); log.debug2("features: " + spec); for (Map.Entry<String, String> ent : spec.entrySet()) { try { // Prefix version string with feature name to create separate // namespace for each feature String key = ent.getKey(); map.put(Plugin.Feature.valueOf(key), key + "_" + ent.getValue()); } catch (RuntimeException e) { log.warning( getPluginName() + " set unknown feature: " + ent.getKey() + " to version " + ent.getValue(), e); throw new PluginException.InvalidDefinition("Unknown feature: " + ent.getKey(), e); } } featureVersion = map; } else { featureVersion = null; } }
/** Create LockssKeystores from config subtree below {@link #PARAM_KEYSTORE} */ void configureKeyStores(Configuration config) { Configuration allKs = config.getConfigTree(PARAM_KEYSTORE); for (Iterator iter = allKs.nodeIterator(); iter.hasNext(); ) { String id = (String) iter.next(); Configuration oneKs = allKs.getConfigTree(id); try { LockssKeyStore lk = createLockssKeyStore(oneKs); String name = lk.getName(); if (name == null) { log.error("KeyStore definition missing name: " + oneKs); continue; } LockssKeyStore old = keystoreMap.get(name); if (old != null && !lk.equals(old)) { log.warning( "Keystore " + name + " redefined. " + "New definition may not take effect until daemon restart"); } log.debug("Adding keystore " + name); keystoreMap.put(name, lk); } catch (Exception e) { log.error("Couldn't create keystore: " + oneKs, e); } } }
/** * Return the request parameters as a Map<String,String>. Only the first value of multivalued * parameters is included. */ Map<String, String> getParamsAsMap() { Map<String, String> map = new HashMap<String, String>(); for (Enumeration en = req.getParameterNames(); en.hasMoreElements(); ) { String name = (String) en.nextElement(); map.put(name, req.getParameter(name)); } return map; }
private Map makeRow(CrawlerStatus status, String mimeType, String key) { Map row = new HashMap(); row.put(MIME_TYPE_NAME, mimeType); row.put( MIME_TYPE_NUM_URLS, makeRefIfColl(status.getMimeTypeCtr(mimeType), key, MIMETYPES_URLS_KEY + ":" + mimeType)); return row; }
// Break the line at commas, return a map of the resulting strings // broken at equals sign. (<i>Ie</i>, name value pairs.) Map getRow(String line) { Map map = new HashMap(); for (Iterator iter = StringUtil.breakAt(line, ',').iterator(); iter.hasNext(); ) { String item = (String) iter.next(); List pair = StringUtil.breakAt(item, '='); map.put(pair.get(0), pair.get(1)); } return map; }
// hack only local public synchronized LockssRepositoryImpl getRepositoryFromPath(String path) { LockssRepositoryImpl repo = (LockssRepositoryImpl) localRepos.get(path); if (repo == null) { repo = new LockssRepositoryImpl(path); repo.initService(getDaemon()); repo.startService(); localRepos.put(path, repo); } return repo; }
protected void initAuFeatureMap() { if (definitionMap.containsKey(DefinableArchivalUnit.KEY_AU_FEATURE_URL_MAP)) { Map<String, ?> featMap = definitionMap.getMap(DefinableArchivalUnit.KEY_AU_FEATURE_URL_MAP); for (Map.Entry ent : featMap.entrySet()) { Object val = ent.getValue(); if (val instanceof Map) { ent.setValue(MapUtil.expandAlternativeKeyLists((Map) val)); } } } }
void loadKeyStores() { List<LockssKeyStore> lst = new ArrayList<LockssKeyStore>(keystoreMap.values()); for (LockssKeyStore lk : lst) { try { lk.load(); } catch (Exception e) { log.error("Can't load keystore " + lk.getName(), e); keystoreMap.remove(lk.getName()); } } }
static LocalRepository getLocalRepository(String repoRoot) { synchronized (localRepositories) { LocalRepository localRepo = (LocalRepository) localRepositories.get(repoRoot); if (localRepo == null) { logger.debug2("Creating LocalRepository(" + repoRoot + ")"); localRepo = new LocalRepository(repoRoot); localRepositories.put(repoRoot, localRepo); } return localRepo; } }
public String findLeastFullRepository(Map<String, PlatformUtil.DF> repoMap) { String mostFree = null; for (String repo : repoMap.keySet()) { PlatformUtil.DF df = repoMap.get(repo); if (df != null) { if (mostFree == null || (repoMap.get(mostFree)).getAvail() < df.getAvail()) { mostFree = repo; } } } return mostFree; }
/* * When testing no-pdf-check basic XML parsing, you will get partial MD records * depending on whether the info comes from dataset.xml or from main.xml */ private void validateDatasetMetadataRecord(ArticleMetadata am) { log.debug3("valideDatasetMetadatRecord"); String doi_val = am.get(MetadataField.FIELD_DOI); assertEquals(common_issn, am.get(MetadataField.FIELD_ISSN)); log.debug3("doi val is: " + doi_val); // The dataset doesn't set this value, it'll fail over the main.xml value if (doi_val.equals("10.1016/S0140-1111(14)61865-1")) { assertEquals(null, am.get(MetadataField.FIELD_DATE)); } else { assertEquals(dateMap.get(doi_val), am.get(MetadataField.FIELD_DATE)); } assertEquals(pubTitleMap.get(doi_val), am.get(MetadataField.FIELD_PUBLICATION_TITLE)); }
static String canonRoot(String root) { synchronized (canonicalRoots) { String canon = (String) canonicalRoots.get(root); if (canon == null) { try { canon = new File(root).getCanonicalPath(); canonicalRoots.put(root, canon); } catch (IOException e) { logger.warning("Can't canonicalize: " + root, e); return root; } } return canon; } }
/** * Return the number of TdbTitles in this Tdb. * * @return the total TdbTitle count */ public int getTdbTitleCount() { int titleCount = 0; for (TdbPublisher publisher : tdbPublisherMap.values()) { titleCount += publisher.getTdbTitleCount(); } return titleCount; }
/* * You will have to tell it the DOI and the schema because those normally come from dataset */ private void validateSingleMainMetadataRecord(ArticleMetadata am, String doi_val, String schema) { log.debug3("valideSingleMainMetadatRecord"); if ("simple-article".equals(schema)) { assertEquals(common_simple_article_title, am.get(MetadataField.FIELD_ARTICLE_TITLE)); } else { assertEquals(common_article_title, am.get(MetadataField.FIELD_ARTICLE_TITLE)); } log.debug3("doi val is: " + doi_val); assertEquals(authorMap.get(doi_val), am.getList(MetadataField.FIELD_AUTHOR)); assertEquals(volMap.get(doi_val), am.get(MetadataField.FIELD_VOLUME)); assertEquals(issueMap.get(doi_val), am.get(MetadataField.FIELD_ISSUE)); assertEquals("Comment", am.getRaw(ElsevierMainDTD5XmlSchemaHelper.common_dochead)); assertEquals(doi_val, am.getRaw(ElsevierMainDTD5XmlSchemaHelper.common_doi)); assertEquals("2014", am.getRaw(ElsevierMainDTD5XmlSchemaHelper.common_copyright)); }
/** * Add TdbAus for like (starts with) the specified TdbAu name. * * @param tdbAuName the name of the AU to select * @param tdbAus the collection to add to * @return <code>true</code> if TdbAus were added to the collection */ public boolean getTdbAusLikeName(String tdbAuName, Collection<TdbAu> aus) { boolean added = false; for (TdbPublisher publisher : tdbPublisherMap.values()) { added |= publisher.getTdbAusLikeName(tdbAuName, aus); } return added; }
/** * Finds the directory for this AU. If none found in the map, designates a new dir for it. * * @param auid AU id representing the au * @param repoRoot path to the root of the repository * @return the dir String */ static String getAuDir(String auid, String repoRoot, boolean create) { String repoCachePath = extendCacheLocation(repoRoot); LocalRepository localRepo = getLocalRepository(repoRoot); synchronized (localRepo) { Map aumap = localRepo.getAuMap(); String auPathSlash = (String) aumap.get(auid); if (auPathSlash != null) { return auPathSlash; } if (!create) { return null; } logger.debug3("Creating new au directory for '" + auid + "'."); String auDir = localRepo.getPrevAuDir(); for (int cnt = RepositoryManager.getMaxUnusedDirSearch(); cnt > 0; cnt--) { // loop through looking for an available dir auDir = getNextDirName(auDir); File testDir = new File(repoCachePath, auDir); if (logger.isDebug3()) logger.debug3("Probe for unused: " + testDir); if (!testDir.exists()) { if (RepositoryManager.isStatefulUnusedDirSearch()) { localRepo.setPrevAuDir(auDir); } String auPath = testDir.toString(); logger.debug3("New au directory: " + auPath); auPathSlash = auPath + File.separator; // write the new au property file to the new dir // XXX this data should be backed up elsewhere to avoid single-point // corruption Properties idProps = new Properties(); idProps.setProperty(AU_ID_PROP, auid); saveAuIdProperties(auPath, idProps); aumap.put(auid, auPathSlash); return auPathSlash; } else { if (logger.isDebug3()) { logger.debug3("Existing directory found at '" + auDir + "'. Checking next..."); } } } } throw new RuntimeException( "Can't find unused repository dir after " + RepositoryManager.getMaxUnusedDirSearch() + " tries in " + repoCachePath); }
public void setRepoMap(Map<String, PlatformUtil.DF> repoMap) { List repos = new ArrayList(); this.repoMap = repoMap; for (String repo : repoMap.keySet()) { repos.add(repo); } setRepos(repos); }
/** * Adds to a collection of TdbTitles like (starts with) the specified title name across all * publishers. * * @param titleName the title name * @param titles a collection of matching titles * @return a collection of TdbTitles that match the title name */ public boolean getTdbTitlesLikeName(String titleName, Collection<TdbTitle> titles) { boolean added = false; if (titleName != null) { for (TdbPublisher publisher : tdbPublisherMap.values()) { added |= publisher.getTdbTitlesLikeName(titleName, titles); } } return added; }
/** Add to a collection of TdbAus for this TDB that match the ISBN. */ public boolean getTdbAusByIsbn(String isbn, Collection<TdbAu> matchingTdbAus) { boolean added = false; if (isbn != null) { for (TdbPublisher tdbPublisher : tdbPublisherMap.values()) { added |= tdbPublisher.getTdbAusByIsbn(matchingTdbAus, isbn); } } return added; }
/** If in testing mode FOO, copy values from FOO_override map, if any, to main map */ void processOverrides(TypedEntryMap map) { String testMode = getTestingMode(); if (StringUtil.isNullString(testMode)) { return; } Object o = map.getMapElement(testMode + DefinableArchivalUnit.SUFFIX_OVERRIDE); if (o == null) { return; } if (o instanceof Map) { Map overrideMap = (Map) o; for (Map.Entry entry : (Set<Map.Entry>) overrideMap.entrySet()) { String key = (String) entry.getKey(); Object val = entry.getValue(); log.debug(getDefaultPluginName() + ": Overriding " + key + " with " + val); map.setMapElement(key, val); } } }
/** * Get the title for the specified titleId. * * @param titleId the titleID * @return the title for the titleId or <code>null</code. if not found */ public TdbTitle getTdbTitleById(String titleId) { if (titleId != null) { for (TdbPublisher publisher : tdbPublisherMap.values()) { TdbTitle title = publisher.getTdbTitleById(titleId); if (title != null) { return title; } } } return null; }
public void testText() throws Exception { MockStatusAccessor statusAccessor = MockStatusAccessor.generateStatusAccessor(colArray1, rowArray1); statusAccessor.setTitle("testtbl", null); statSvc.registerStatusAccessor("testtbl", statusAccessor); WebResponse resp = getTable("testtbl", true); assertResponseOk(resp); assertEquals("Content type", "text/plain", resp.getContentType()); log.debug(resp.getText()); List lines = getLines(resp); assertEquals(rowArray1.length + 3, lines.size()); Map row0 = getRow((String) lines.get(0)); assertEquals("2.4.6.8", row0.get("host")); Map row2 = getRow((String) lines.get(2)); assertEquals("testtbl", row2.get("table")); assertEqualTables(table1, lines); }
/** * Get a title for the specified issn. * * @param issn the issn * @return the title for the titleId or <code>null</code. if not found */ public TdbTitle getTdbTitleByIssn(String issn) { if (issn != null) { for (TdbPublisher publisher : tdbPublisherMap.values()) { TdbTitle title = publisher.getTdbTitleByIssn(issn); if (title != null) { return title; } } } return null; }
public void testHeadNotAllowed() throws Exception { VersionCounts versionCounts = VersionCounts.make(); VoteBlock vb1 = makeVoteBlock("http://test.com/foo1"); byte[] hash1 = addVersion(vb1, "content 1 for foo1"); byte[] hash2 = addVersion(vb1, "content 2 for foo1"); versionCounts.vote(vb1, participant1); Map<ParticipantUserData, HashResult> repairCandidates; repairCandidates = versionCounts.getRepairCandidates(0); assertSameElements(SetUtil.set(participant1), repairCandidates.keySet()); // Same, but with an excluded version that doesn't matter. repairCandidates = versionCounts.getRepairCandidates(0, SetUtil.set(HashResult.make(hash2))); assertSameElements(SetUtil.set(participant1), repairCandidates.keySet()); // Same, but with an excluded version that does matter repairCandidates = versionCounts.getRepairCandidates(0, SetUtil.set(HashResult.make(hash1))); assertEmpty(repairCandidates); }
/** * Adds a collection of pluginIds for TdbAus that are different from those in this Tdb. * * @param pluginIds the set of pluginIds * @param otherTdb a Tdb */ private void addPluginIdsForDifferences(Set<String> pluginIds, Tdb otherTdb) { Map<String, TdbPublisher> tdbPublishers = otherTdb.getAllTdbPublishers(); for (TdbPublisher tdbPublisher : tdbPublishers.values()) { if (!this.tdbPublisherMap.containsKey(tdbPublisher.getName())) { // add pluginIds for publishers in tdb that are not in this Tdb tdbPublisher.addAllPluginIds(pluginIds); } } for (TdbPublisher thisPublisher : tdbPublisherMap.values()) { TdbPublisher tdbPublisher = tdbPublishers.get(thisPublisher.getName()); if (tdbPublisher == null) { // add pluginIds for publisher in this Tdb that is not in tdb thisPublisher.addAllPluginIds(pluginIds); } else { // add pluginIds for publishers in both Tdbs that are different thisPublisher.addPluginIdsForDifferences(pluginIds, tdbPublisher); } } }
/** * Add a new TdbAu to this title database. The TdbAu must have its pluginID, and title set. The * TdbAu''s title must also have its titleId and publisher set. The publisher name must be unique * to all publishers in this Tdb. * * @param au the TdbAu to add. * @throws TdbException if Tdb is sealed, this is a duplicate au, or the au's publisher is a * duplicate */ public void addTdbAu(TdbAu au) throws TdbException { if (au == null) { throw new IllegalArgumentException("TdbAu cannot be null"); } // verify not sealed if (isSealed()) { throw new TdbException("Cannot add TdbAu to sealed Tdb"); } // validate title TdbTitle title = au.getTdbTitle(); if (title == null) { throw new IllegalArgumentException("TdbAu's title not set"); } // validate publisher TdbPublisher publisher = title.getTdbPublisher(); if (publisher == null) { throw new IllegalArgumentException("TdbAu's publisher not set"); } // make sure publisher is not a duplicate String pubName = publisher.getName(); TdbPublisher oldPublisher = tdbPublisherMap.put(pubName, publisher); if ((oldPublisher != null) && (oldPublisher != publisher)) { // restore old publisher and report error tdbPublisherMap.put(pubName, oldPublisher); throw new TdbException("New au publisher with duplicate name: " + pubName); } // register the au with this instance if (!addTdbAuForPlugin(au)) { // remove new publisher and report error if (oldPublisher == null) { tdbPublisherMap.remove(pubName); } throw new TdbException("Cannot register au " + au.getName()); } }
/** * Return the auid -> au-subdir-path mapping. Enumerating the directories if necessary to * initialize the map */ Map getAuMap() { if (auMap == null) { logger.debug3("Loading name map for '" + repoCacheFile + "'."); auMap = new HashMap(); if (!repoCacheFile.exists()) { logger.debug3("Creating cache dir:" + repoCacheFile + "'."); if (!repoCacheFile.mkdirs()) { logger.critical("Couldn't create directory, check owner/permissions: " + repoCacheFile); // return empty map return auMap; } } else { // read each dir's property file and store mapping auid -> dir File[] auDirs = repoCacheFile.listFiles(); for (int ii = 0; ii < auDirs.length; ii++) { String dirName = auDirs[ii].getName(); // if (dirName.compareTo(lastPluginDir) == 1) { // // adjust the 'lastPluginDir' upwards if necessary // lastPluginDir = dirName; // } String path = auDirs[ii].getAbsolutePath(); Properties idProps = getAuIdProperties(path); if (idProps != null) { String auid = idProps.getProperty(AU_ID_PROP); StringBuilder sb = new StringBuilder(path.length() + File.separator.length()); sb.append(path); sb.append(File.separator); auMap.put(auid, sb.toString()); logger.debug3("Mapping to: " + auMap.get(auid) + ": " + auid); } else { logger.debug3("Not mapping " + path + ", no auid file."); } } } } return auMap; }