void stopThread() { if (sizeCalcThread != null) { log.debug2("Stopping thread"); sizeCalcThread.stopSizeCalc(); sizeCalcThread = null; } }
protected void initFeatureVersions() throws PluginException.InvalidDefinition { if (definitionMap.containsKey(KEY_PLUGIN_FEATURE_VERSION_MAP)) { Map<Plugin.Feature, String> map = new HashMap<Plugin.Feature, String>(); Map<String, String> spec = (Map<String, String>) definitionMap.getMap(KEY_PLUGIN_FEATURE_VERSION_MAP); log.debug2("features: " + spec); for (Map.Entry<String, String> ent : spec.entrySet()) { try { // Prefix version string with feature name to create separate // namespace for each feature String key = ent.getKey(); map.put(Plugin.Feature.valueOf(key), key + "_" + ent.getValue()); } catch (RuntimeException e) { log.warning( getPluginName() + " set unknown feature: " + ent.getKey() + " to version " + ent.getValue(), e); throw new PluginException.InvalidDefinition("Unknown feature: " + ent.getKey(), e); } } featureVersion = map; } else { featureVersion = null; } }
/** engqueue a size calculation for the node */ public void queueSizeCalc(RepositoryNode node) { synchronized (sizeCalcQueue) { if (sizeCalcQueue.add(node)) { log.debug2("Queue size calc: " + node); startOrKickThread(); } } }
void startOrKickThread() { if (sizeCalcThread == null) { log.debug2("Starting thread"); sizeCalcThread = new SizeCalcThread(); sizeCalcThread.start(); sizeCalcThread.waitRunning(); } sizeCalcSem.give(); }
static LocalRepository getLocalRepository(String repoRoot) { synchronized (localRepositories) { LocalRepository localRepo = (LocalRepository) localRepositories.get(repoRoot); if (localRepo == null) { logger.debug2("Creating LocalRepository(" + repoRoot + ")"); localRepo = new LocalRepository(repoRoot); localRepositories.put(repoRoot, localRepo); } return localRepo; } }
protected PermissionCheckerFactory getPermissionCheckerFactory() { if (permissionCheckerFact == null) { String permissionCheckerFactoryClass = definitionMap.getString(DefinableArchivalUnit.KEY_AU_PERMISSION_CHECKER_FACTORY, null); if (permissionCheckerFactoryClass != null) { permissionCheckerFact = (PermissionCheckerFactory) newAuxClass(permissionCheckerFactoryClass, PermissionCheckerFactory.class); log.debug2("Loaded PermissionCheckerFactory: " + permissionCheckerFact); } } return permissionCheckerFact; }
/** * Checks the consistency of the node, and continues with its children if it's consistent. * * @param node RepositoryNodeImpl the node to check */ private void recurseConsistencyCheck(RepositoryNodeImpl node) { logger.debug2("Checking node '" + node.getNodeUrl() + "'..."); // check consistency at each node // correct/deactivate as necessary // 'checkNodeConsistency()' will repair if possible if (node.checkNodeConsistency()) { logger.debug3("Node consistent; recursing on children..."); List children = node.getNodeList(null, false); Iterator iter = children.iterator(); while (iter.hasNext()) { RepositoryNodeImpl child = (RepositoryNodeImpl) iter.next(); recurseConsistencyCheck(child); } } else { logger.debug3("Node inconsistent; deactivating..."); deactivateInconsistentNode(node); } }
public void loadAuConfigDescrs(Configuration config) throws ConfigurationException { super.loadAuConfigDescrs(config); this.m_registryUrl = config.get(ConfigParamDescr.BASE_URL.getKey()); // Now we can construct a valid CC permission checker. m_permissionCheckers = // ListUtil.list(new CreativeCommonsPermissionChecker(m_registryUrl)); ListUtil.list(new CreativeCommonsPermissionChecker()); paramMap.putLong( KEY_AU_NEW_CONTENT_CRAWL_INTERVAL, CurrentConfig.getTimeIntervalParam( PARAM_REGISTRY_CRAWL_INTERVAL, DEFAULT_REGISTRY_CRAWL_INTERVAL)); if (log.isDebug2()) { log.debug2( "Setting Registry AU recrawl interval to " + StringUtil.timeIntervalToString( paramMap.getLong(KEY_AU_NEW_CONTENT_CRAWL_INTERVAL))); } }
private void writeFiles() { PlatformUtil platutil = PlatformUtil.getInstance(); CuIterator iter = AuUtil.getCuIterator(au); int errs = 0; CachedUrl curCu = null; CachedUrl nextCu = getNextCu(iter); while (nextCu != null) { curCu = nextCu; nextCu = getNextCu(iter); if (excludeDirNodes && nextCu != null && isDirOf(curCu, nextCu)) { continue; } CachedUrl[] cuVersions = curCu.getCuVersions(maxVersions > 0 ? maxVersions : Integer.MAX_VALUE); for (CachedUrl cu : cuVersions) { try { log.debug2("Exporting " + cu.getUrl()); writeCu(cu); } catch (IOException e) { if (platutil.isDiskFullError(e)) { recordError("Disk full, can't write export file."); isDiskFull = true; return; } } catch (Exception e) { // XXX Would like to differentiate between errors opening or // reading CU, which shouldn't cause abort, and errors writing // to export file, which should. recordError("Unable to copy " + cu.getUrl(), e); if (errs++ >= maxErrors) { recordError("Aborting after " + errs + " errors"); return; } } } } }
/** * Factory method to create new LockssRepository instances. * * @param au the {@link ArchivalUnit} * @return the new LockssRepository instance */ public static LockssRepository createNewLockssRepository(ArchivalUnit au) { String root = getRepositoryRoot(au); if (root == null || root.equals("null")) { logger.error("No repository dir set in config"); throw new LockssRepository.RepositoryStateException("No repository dir set in config"); } String auDir = LockssRepositoryImpl.mapAuToFileLocation(root, au); if (logger.isDebug2()) { logger.debug2("repo: " + auDir + ", au: " + au.getName()); } staticCacheLocation = extendCacheLocation(root); LockssRepositoryImpl repo = new LockssRepositoryImpl(auDir); Plugin plugin = au.getPlugin(); if (plugin != null) { LockssDaemon daemon = plugin.getDaemon(); if (daemon != null) { RepositoryManager mgr = daemon.getRepositoryManager(); if (mgr != null) { mgr.setRepositoryForPath(auDir, repo); } } } return repo; }
protected void initResultMap() throws PluginException.InvalidDefinition { HttpResultMap hResultMap = new HttpResultMap(); // XXX Currently this only allows a CacheResultHandler class to // initialize the result map. Instead, don't use a CacheResultMap // directly, use either the plugin's CacheResultHandler, if specified, // or a default one that wraps the CacheResultMap String handler_class = null; handler_class = definitionMap.getString(KEY_EXCEPTION_HANDLER, null); if (handler_class != null) { try { resultHandler = (CacheResultHandler) newAuxClass(handler_class, CacheResultHandler.class); resultHandler.init(hResultMap); } catch (Exception ex) { throw new PluginException.InvalidDefinition( mapName + " has invalid Exception handler: " + handler_class, ex); } catch (LinkageError le) { throw new PluginException.InvalidDefinition( mapName + " has invalid Exception handler: " + handler_class, le); } } else { // Expect a list of mappings from either result code or exception // name to CacheException name Collection<String> mappings = definitionMap.getCollection(KEY_EXCEPTION_LIST, null); if (mappings != null) { // add each entry for (String entry : mappings) { if (log.isDebug2()) { log.debug2("initMap(" + entry + ")"); } String first; String ceName; try { List<String> pair = StringUtil.breakAt(entry, '=', 2, true, true); first = pair.get(0); ceName = pair.get(1); } catch (Exception ex) { throw new PluginException.InvalidDefinition( "Invalid syntax: " + entry + "in " + mapName); } Object val; // Value should be either a CacheException or CacheResultHandler // class name. PluginFetchEventResponse resp = (PluginFetchEventResponse) newAuxClass(ceName, PluginFetchEventResponse.class, null); if (resp instanceof CacheException) { val = resp.getClass(); } else if (resp instanceof CacheResultHandler) { val = WrapperUtil.wrap((CacheResultHandler) resp, CacheResultHandler.class); } else { throw new PluginException.InvalidDefinition( "Second arg not a " + "CacheException or " + "CacheResultHandler class: " + entry + ", in " + mapName); } try { int code = Integer.parseInt(first); // If parseable as an integer, it's a result code. hResultMap.storeMapEntry(code, val); } catch (NumberFormatException e) { try { Class eClass = Class.forName(first); // If a class name, it should be an exception class if (Exception.class.isAssignableFrom(eClass)) { hResultMap.storeMapEntry(eClass, val); } else { throw new PluginException.InvalidDefinition( "First arg not an " + "Exception class: " + entry + ", in " + mapName); } } catch (Exception ex) { throw new PluginException.InvalidDefinition( "First arg not a " + "number or class: " + entry + ", in " + mapName); } catch (LinkageError le) { throw new PluginException.InvalidDefinition("Can't load " + first, le); } } } } } resultMap = hResultMap; }