// Break the line at commas, return a map of the resulting strings // broken at equals sign. (<i>Ie</i>, name value pairs.) Map getRow(String line) { Map map = new HashMap(); for (Iterator iter = StringUtil.breakAt(line, ',').iterator(); iter.hasNext(); ) { String item = (String) iter.next(); List pair = StringUtil.breakAt(item, '='); map.put(pair.get(0), pair.get(1)); } return map; }
protected void assertEqualTables(Object[][] a1, List lines) { assertEquals("numrows", a1.length, lines.size() - NUM_HEADER_LINES); for (int irow = 0; irow <= a1.length - 1; irow++) { Object expRow[] = a1[irow]; List row = StringUtil.breakAt((String) lines.get(irow + NUM_HEADER_LINES), ','); assertEquals("numcols", expRow.length, row.size()); assertEquals(("row " + irow), SetUtil.fromArray(expRow), new HashSet(row)); } }
/** * Make an unmodifiable view of a HashMap containing the reputation transfers present in * peerPairs. Ignore the second time a peer is listed as a source or destination. */ private Map<PeerIdentity, Collection<PeerIdentity>> makeMap( Collection<String> peerPairs, IdentityManager idManager) { if (peerPairs == null) { return Collections.EMPTY_MAP; } else { HashMap<PeerIdentity, PeerIdentity> pidPidMap = new HashMap<PeerIdentity, PeerIdentity>(); for (String onePair : peerPairs) { // discardEmptyStrings = true, trimEachString = true List<String> list = StringUtil.breakAt(onePair, ',', -1, true, true); if (list.size() == 2) { try { PeerIdentity oldPid = idManager.stringToPeerIdentity(list.get(0)); PeerIdentity newPid = idManager.stringToPeerIdentity(list.get(1)); if (oldPid == newPid) { log.warning("Trying to extend a peer's reputation to itself: " + oldPid); continue; } if (pidPidMap.containsKey(newPid)) { log.warning( "Ignoring second transfer from " + oldPid + " to " + newPid + ". Keeping " + oldPid + " to " + pidPidMap.get(oldPid) + "."); continue; } if (pidPidMap.containsValue(oldPid)) { log.warning( "Ignoring second transfer from " + oldPid + " to " + newPid + ". " + newPid + " has a reputation donor."); continue; } pidPidMap.put(newPid, oldPid); if (log.isDebug2()) { log.debug2("Extend reputation from " + oldPid + " to " + newPid); } } catch (IdentityManager.MalformedIdentityKeyException e) { log.warning("Bad peer id in peer2peer map entry " + list, e); } } else { log.warning("Malformed reputation mapping: " + onePair); } } HashMap<PeerIdentity, Collection<PeerIdentity>> pidColMap = new HashMap<PeerIdentity, Collection<PeerIdentity>>(); for (PeerIdentity rootPid : pidPidMap.keySet()) { Collection<PeerIdentity> oldPids = new ArrayList<PeerIdentity>(); PeerIdentity newPid = rootPid; while (newPid != null) { oldPids.add(newPid); newPid = pidPidMap.get(newPid); // Found a loop; stop. if (oldPids.contains(newPid)) { log.warning("Found cycle: " + rootPid); break; } } // oldPids will be returned to clients -- make sure it's // unmodifiable. pidColMap.put(rootPid, Collections.unmodifiableCollection(oldPids)); } return Collections.unmodifiableMap(pidColMap); } }
public void setConfig( Configuration config, Configuration oldConfig, Configuration.Differences changedKeys) { // Build list of repositories from list of disk (fs) paths). Needs to // be generalized if ever another repository implementation. if (changedKeys.contains(ConfigManager.PARAM_PLATFORM_DISK_SPACE_LIST)) { List lst = new ArrayList(); String dspace = config.get(ConfigManager.PARAM_PLATFORM_DISK_SPACE_LIST, ""); List paths = StringUtil.breakAt(dspace, ';'); if (paths != null) { for (Iterator iter = paths.iterator(); iter.hasNext(); ) { lst.add("local:" + (String) iter.next()); } } repoList = lst; } if (changedKeys.contains(PARAM_MAX_PER_AU_CACHE_SIZE)) { paramNodeCacheSize = config.getInt(PARAM_MAX_PER_AU_CACHE_SIZE, DEFAULT_MAX_PER_AU_CACHE_SIZE); for (Iterator iter = getDaemon().getAllLockssRepositories().iterator(); iter.hasNext(); ) { LockssRepository repo = (LockssRepository) iter.next(); if (repo instanceof LockssRepositoryImpl) { LockssRepositoryImpl repoImpl = (LockssRepositoryImpl) repo; repoImpl.setNodeCacheSize(paramNodeCacheSize); } } } if (changedKeys.contains(PARAM_MAX_SUSPECT_VERSIONS_CACHE_SIZE)) { paramSuspectVersionsCacheSize = config.getInt( PARAM_MAX_SUSPECT_VERSIONS_CACHE_SIZE, DEFAULT_MAX_SUSPECT_VERSIONS_CACHE_SIZE); suspectVersionsCache.setMaxSize(paramSuspectVersionsCacheSize); } if (changedKeys.contains(GLOBAL_CACHE_PREFIX)) { paramIsGlobalNodeCache = config.getBoolean(PARAM_GLOBAL_CACHE_ENABLED, DEFAULT_GLOBAL_CACHE_ENABLED); if (paramIsGlobalNodeCache) { paramGlobalNodeCacheSize = config.getInt(PARAM_MAX_GLOBAL_CACHE_SIZE, DEFAULT_MAX_GLOBAL_CACHE_SIZE); log.debug("global node cache size: " + paramGlobalNodeCacheSize); globalNodeCache.setMaxSize(paramGlobalNodeCacheSize); } } if (changedKeys.contains(DISK_PREFIX)) { int minMB = config.getInt(PARAM_DISK_WARN_FRRE_MB, DEFAULT_DISK_WARN_FRRE_MB); double minPer = config.getPercentage(PARAM_DISK_WARN_FRRE_PERCENT, DEFAULT_DISK_WARN_FRRE_PERCENT); paramDFWarn = PlatformUtil.DF.makeThreshold(minMB, minPer); minMB = config.getInt(PARAM_DISK_FULL_FRRE_MB, DEFAULT_DISK_FULL_FRRE_MB); minPer = config.getPercentage(PARAM_DISK_FULL_FRRE_PERCENT, DEFAULT_DISK_FULL_FRRE_PERCENT); paramDFFull = PlatformUtil.DF.makeThreshold(minMB, minPer); } if (changedKeys.contains(PARAM_SIZE_CALC_MAX_LOAD)) { sizeCalcMaxLoad = config.getPercentage(PARAM_SIZE_CALC_MAX_LOAD, DEFAULT_SIZE_CALC_MAX_LOAD); } if (changedKeys.contains(PREFIX)) { maxUnusedDirSearch = config.getInt(PARAM_MAX_UNUSED_DIR_SEARCH, DEFAULT_MAX_UNUSED_DIR_SEARCH); isStatefulUnusedDirSearch = config.getBoolean( PARAM_IS_STATEFUL_UNUSED_DIR_SEARCH, DEFAULT_IS_STATEFUL_UNUSED_DIR_SEARCH); enableLongComponents = config.getBoolean(PARAM_ENABLE_LONG_COMPONENTS, DEFAULT_ENABLE_LONG_COMPONENTS); enableLongComponentsCompatibility = config.getBoolean( PARAM_ENABLE_LONG_COMPONENTS_COMPATIBILITY, DEFAULT_ENABLE_LONG_COMPONENTS_COMPATIBILITY); maxComponentLength = config.getInt(PARAM_MAX_COMPONENT_LENGTH, DEFAULT_MAX_COMPONENT_LENGTH); checkUnnormalized = (CheckUnnormalizedMode) config.getEnum( CheckUnnormalizedMode.class, PARAM_CHECK_UNNORMALIZED, DEFAULT_CHECK_UNNORMALIZED); } }
protected void initResultMap() throws PluginException.InvalidDefinition { HttpResultMap hResultMap = new HttpResultMap(); // XXX Currently this only allows a CacheResultHandler class to // initialize the result map. Instead, don't use a CacheResultMap // directly, use either the plugin's CacheResultHandler, if specified, // or a default one that wraps the CacheResultMap String handler_class = null; handler_class = definitionMap.getString(KEY_EXCEPTION_HANDLER, null); if (handler_class != null) { try { resultHandler = (CacheResultHandler) newAuxClass(handler_class, CacheResultHandler.class); resultHandler.init(hResultMap); } catch (Exception ex) { throw new PluginException.InvalidDefinition( mapName + " has invalid Exception handler: " + handler_class, ex); } catch (LinkageError le) { throw new PluginException.InvalidDefinition( mapName + " has invalid Exception handler: " + handler_class, le); } } else { // Expect a list of mappings from either result code or exception // name to CacheException name Collection<String> mappings = definitionMap.getCollection(KEY_EXCEPTION_LIST, null); if (mappings != null) { // add each entry for (String entry : mappings) { if (log.isDebug2()) { log.debug2("initMap(" + entry + ")"); } String first; String ceName; try { List<String> pair = StringUtil.breakAt(entry, '=', 2, true, true); first = pair.get(0); ceName = pair.get(1); } catch (Exception ex) { throw new PluginException.InvalidDefinition( "Invalid syntax: " + entry + "in " + mapName); } Object val; // Value should be either a CacheException or CacheResultHandler // class name. PluginFetchEventResponse resp = (PluginFetchEventResponse) newAuxClass(ceName, PluginFetchEventResponse.class, null); if (resp instanceof CacheException) { val = resp.getClass(); } else if (resp instanceof CacheResultHandler) { val = WrapperUtil.wrap((CacheResultHandler) resp, CacheResultHandler.class); } else { throw new PluginException.InvalidDefinition( "Second arg not a " + "CacheException or " + "CacheResultHandler class: " + entry + ", in " + mapName); } try { int code = Integer.parseInt(first); // If parseable as an integer, it's a result code. hResultMap.storeMapEntry(code, val); } catch (NumberFormatException e) { try { Class eClass = Class.forName(first); // If a class name, it should be an exception class if (Exception.class.isAssignableFrom(eClass)) { hResultMap.storeMapEntry(eClass, val); } else { throw new PluginException.InvalidDefinition( "First arg not an " + "Exception class: " + entry + ", in " + mapName); } } catch (Exception ex) { throw new PluginException.InvalidDefinition( "First arg not a " + "number or class: " + entry + ", in " + mapName); } catch (LinkageError le) { throw new PluginException.InvalidDefinition("Can't load " + first, le); } } } } } resultMap = hResultMap; }
protected void initMimeMap() throws PluginException.InvalidDefinition { for (Iterator iter = definitionMap.entrySet().iterator(); iter.hasNext(); ) { Map.Entry ent = (Map.Entry) iter.next(); String key = (String) ent.getKey(); Object val = ent.getValue(); if (key.endsWith(DefinableArchivalUnit.SUFFIX_LINK_EXTRACTOR_FACTORY)) { String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_LINK_EXTRACTOR_FACTORY); if (val instanceof String) { String factName = (String) val; log.debug(mime + " link extractor: " + factName); MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime); LinkExtractorFactory fact = (LinkExtractorFactory) newAuxClass(factName, LinkExtractorFactory.class); mti.setLinkExtractorFactory(fact); } } else if (key.endsWith(DefinableArchivalUnit.SUFFIX_CRAWL_FILTER_FACTORY)) { // XXX This clause must precede the one for SUFFIX_HASH_FILTER_FACTORY // XXX unless/until that key is changed to not be a terminal substring // XXX of this one String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_CRAWL_FILTER_FACTORY); if (val instanceof String) { String factName = (String) val; log.debug(mime + " crawl filter: " + factName); MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime); FilterFactory fact = (FilterFactory) newAuxClass(factName, FilterFactory.class); mti.setCrawlFilterFactory(fact); } } else if (key.endsWith(DefinableArchivalUnit.SUFFIX_HASH_FILTER_FACTORY)) { String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_HASH_FILTER_FACTORY); if (val instanceof String) { String factName = (String) val; log.debug(mime + " filter: " + factName); MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime); FilterFactory fact = (FilterFactory) newAuxClass(factName, FilterFactory.class); mti.setHashFilterFactory(fact); } } else if (key.endsWith(DefinableArchivalUnit.SUFFIX_FETCH_RATE_LIMIT)) { String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_FETCH_RATE_LIMIT); if (val instanceof String) { String rate = (String) val; log.debug(mime + " fetch rate: " + rate); MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime); RateLimiter limit = mti.getFetchRateLimiter(); if (limit != null) { limit.setRate(rate); } else { mti.setFetchRateLimiter(new RateLimiter(rate)); } } } else if (key.endsWith(DefinableArchivalUnit.SUFFIX_LINK_REWRITER_FACTORY)) { String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_LINK_REWRITER_FACTORY); String factName = (String) val; log.debug(mime + " link rewriter: " + factName); MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime); LinkRewriterFactory fact = (LinkRewriterFactory) newAuxClass(factName, LinkRewriterFactory.class); mti.setLinkRewriterFactory(fact); } else if (key.endsWith(DefinableArchivalUnit.SUFFIX_METADATA_EXTRACTOR_FACTORY_MAP)) { String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_METADATA_EXTRACTOR_FACTORY_MAP); Map factNameMap = (Map) val; Map factClassMap = new HashMap(); MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime); for (Iterator it = factNameMap.keySet().iterator(); it.hasNext(); ) { String mdTypes = (String) it.next(); String factName = (String) factNameMap.get(mdTypes); log.debug(mime + " (" + mdTypes + ") metadata extractor: " + factName); for (String mdType : (List<String>) StringUtil.breakAt(mdTypes, ";")) { setMdTypeFact(factClassMap, mdType, factName); } } mti.setFileMetadataExtractorFactoryMap(factClassMap); } } }