void setConfig(Configuration config) { log.debug("config: " + config); proxyHost = config.get(PARAM_PROXY_HOST); proxyPort = config.getInt(PARAM_PROXY_PORT, DEFAULT_PROXY_PORT); if (StringUtil.isNullString(proxyHost) || proxyPort <= 0) { String http_proxy = System.getenv("http_proxy"); if (!StringUtil.isNullString(http_proxy)) { try { HostPortParser hpp = new HostPortParser(http_proxy); proxyHost = hpp.getHost(); proxyPort = hpp.getPort(); } catch (HostPortParser.InvalidSpec e) { log.warning("Can't parse http_proxy environment var, ignoring: " + http_proxy + ": " + e); } } } if (StringUtil.isNullString(proxyHost) || proxyPort <= 0) { proxyHost = null; } else { log.info("Proxying through " + proxyHost + ":" + proxyPort); } userAgent = config.get(PARAM_USER_AGENT); if (StringUtil.isNullString(userAgent)) { userAgent = null; } else { log.debug("Setting User-Agent to " + userAgent); } }
/** * Initialize the backend systems, the log handler and the restrictor. A subclass can tune this * step by overriding {@link #createRestrictor(String)} and {@link * #createLogHandler(ServletConfig, boolean)} * * @param pServletConfig servlet configuration */ @Override public void init(ServletConfig pServletConfig) throws ServletException { super.init(pServletConfig); Configuration config = initConfig(pServletConfig); // Create a log handler early in the lifecycle, but not too early String logHandlerClass = config.get(ConfigKey.LOGHANDLER_CLASS); logHandler = logHandlerClass != null ? (LogHandler) ClassUtil.newInstance(logHandlerClass) : createLogHandler(pServletConfig, Boolean.valueOf(config.get(ConfigKey.DEBUG))); // Different HTTP request handlers httpGetHandler = newGetHttpRequestHandler(); httpPostHandler = newPostHttpRequestHandler(); if (restrictor == null) { restrictor = createRestrictor(NetworkUtil.replaceExpression(config.get(ConfigKey.POLICY_LOCATION))); } else { logHandler.info("Using custom access restriction provided by " + restrictor); } configMimeType = config.get(ConfigKey.MIME_TYPE); backendManager = new BackendManager(config, logHandler, restrictor); requestHandler = new HttpRequestHandler(config, backendManager, logHandler); initDiscoveryMulticast(config); }
/** Create LockssKeystore from a config subtree */ LockssKeyStore createLockssKeyStore(Configuration config) { log.debug2("Creating LockssKeyStore from config: " + config); String name = config.get(KEYSTORE_PARAM_NAME); LockssKeyStore lk = new LockssKeyStore(name); String file = config.get(KEYSTORE_PARAM_FILE); String resource = config.get(KEYSTORE_PARAM_RESOURCE); String url = config.get(KEYSTORE_PARAM_URL); if (!StringUtil.isNullString(file)) { lk.setLocation(file, LocationType.File); } else if (!StringUtil.isNullString(resource)) { lk.setLocation(resource, LocationType.Resource); } else if (!StringUtil.isNullString(url)) { lk.setLocation(url, LocationType.Url); } lk.setType(config.get(KEYSTORE_PARAM_TYPE, defaultKeyStoreType)); lk.setProvider(config.get(KEYSTORE_PARAM_PROVIDER, defaultKeyStoreProvider)); lk.setPassword(config.get(KEYSTORE_PARAM_PASSWORD)); lk.setKeyPassword(config.get(KEYSTORE_PARAM_KEY_PASSWORD)); lk.setKeyPasswordFile(config.get(KEYSTORE_PARAM_KEY_PASSWORD_FILE)); lk.setMayCreate(config.getBoolean(KEYSTORE_PARAM_CREATE, DEFAULT_CREATE)); return lk; }
private Log openLog(String logManagerName, String logName) { try { ModifiableConfiguration configuration = new ModifiableConfiguration( GraphDatabaseConfiguration.ROOT_NS, config.copy(), BasicConfiguration.Restriction.NONE); configuration.set(GraphDatabaseConfiguration.UNIQUE_INSTANCE_ID, "reader"); configuration.set( GraphDatabaseConfiguration.LOG_READ_INTERVAL, Duration.ofMillis(500L), logManagerName); if (logStoreManager == null) { logStoreManager = Backend.getStorageManager(configuration); } StoreFeatures f = logStoreManager.getFeatures(); boolean part = f.isDistributed() && f.isKeyOrdered(); if (part) { for (String logname : new String[] {USER_LOG, TRANSACTION_LOG, MANAGEMENT_LOG}) configuration.set(KCVSLogManager.LOG_MAX_PARTITIONS, 8, logname); } assert logStoreManager != null; if (!logManagers.containsKey(logManagerName)) { // Open log manager - only supports KCVSLog Configuration logConfig = configuration.restrictTo(logManagerName); Preconditions.checkArgument( logConfig.get(LOG_BACKEND).equals(LOG_BACKEND.getDefaultValue())); logManagers.put(logManagerName, new KCVSLogManager(logStoreManager, logConfig)); } assert logManagers.containsKey(logManagerName); return logManagers.get(logManagerName).openLog(logName); } catch (BackendException e) { throw new TitanException("Could not open log: " + logName, e); } }
/** * This method checks to see if symlinks are to be create for the localized cache files in the * current working directory Used by internal DistributedCache code. * * @param conf the jobconf * @return true if symlinks are to be created- else return false */ public static boolean getSymlink(Configuration conf) { String result = conf.get(CACHE_SYMLINK); if ("yes".equals(result)) { return true; } return false; }
/** * Return a TitleConfig for the AU. Returns matching entry from the title db if found, else * creates one */ TitleConfig titleConfigFromAu(InactiveAuProxy au) { PluginProxy plugin = au.getPlugin(); String auname = au.getName(); Configuration auConfig = au.getConfiguration(); TitleConfig tc = AuUtil.findTitleConfig(auConfig, plugin.getPlugin()); if (tc != null) { return tc; } tc = new TitleConfig(auname, plugin.getPluginId()); ArrayList<ConfigParamAssignment> params = new ArrayList(); for (Iterator iter = auConfig.keyIterator(); iter.hasNext(); ) { String key = (String) iter.next(); if (!ConfigParamDescr.isReservedParam(key)) { String val = auConfig.get(key); ConfigParamDescr descr = plugin.findAuConfigDescr(key); if (descr != null) { ConfigParamAssignment cpa = new ConfigParamAssignment(descr, val); params.add(cpa); } else { log.warning("Unknown parameter key: " + key + " in au: " + auname); } } } params.trimToSize(); tc.setParams(params); return tc; }
/** * This method checks to see if symlinks are to be create for the localized cache files in the * current working directory * * @param conf the jobconf * @return true if symlinks are to be created- else return false */ public static boolean getSymlink(Configuration conf) { String result = conf.get("mapred.create.symlink"); if ("yes".equals(result)) { return true; } return false; }
public int run(String[] args) throws Exception { Configuration argConf = getConf(); // JobConf conf = new JobConf(diffdb.class); Configuration config = HBaseConfiguration.create(); HBaseAdmin hbAdmin = new HBaseAdmin(config); dbutil db_util = new dbutil(config); HTable runTable = new HTable(config, "gestore_runs"); Get runGet = new Get(argConf.get("id").getBytes()); Result pipeline = runTable.get(runGet); NavigableMap<byte[], byte[]> pipeMap = pipeline.getFamilyMap("d".getBytes()); Map.Entry<byte[], byte[]> results = pipeMap.pollFirstEntry(); HashMap<String, HashMap<String, String>> resultMap = new HashMap<String, HashMap<String, String>>(); while (results != null) { String resultKey = new String(results.getKey()); String resultValue = new String(results.getValue()); String field = "type"; HashMap<String, String> tempMap = new HashMap<String, String>(); String entry = resultKey; if (resultKey.endsWith("_db_timestamp")) { field = "db_timestamp"; entry = resultKey.substring(0, resultKey.lastIndexOf("_db_timestamp")); } else if (resultKey.endsWith("_filename")) { field = "filename"; entry = resultKey.substring(0, resultKey.lastIndexOf("_filename")); } else if (resultKey.endsWith("_regex")) { field = "regex"; entry = resultKey.substring(0, resultKey.lastIndexOf("_regex")); } if (resultMap.containsKey(entry)) { tempMap = resultMap.get(entry); } tempMap.put(field, resultValue); resultMap.put(entry, tempMap); // System.out.println("Key: " + resultKey + " Value: " + resultValue); results = pipeMap.pollFirstEntry(); } for (String key : resultMap.keySet()) { System.out.println("File ID: " + key); for (String subKey : resultMap.get(key).keySet()) { // System.out.println("\t " + subKey + "\t\t" + resultMap.get(key).get(subKey)); System.out.format(" %1$-20s %2$s\n", subKey, resultMap.get(key).get(subKey)); } } return 0; }
public static InetSocketAddress getAddress(Configuration conf) { String jobTrackerStr = conf.get("mapred.job.tracker", "localhost:8012"); int colon = jobTrackerStr.indexOf(":"); if (colon < 0) { throw new RuntimeException("Bad mapred.job.tracker: " + jobTrackerStr); } String jobTrackerName = jobTrackerStr.substring(0, colon); int jobTrackerPort = Integer.parseInt(jobTrackerStr.substring(colon + 1)); return new InetSocketAddress(jobTrackerName, jobTrackerPort); }
/** * Add a file path to the current set of classpath entries. It adds the file to cache as well. * Intended to be used by user code. * * @param file Path of the file to be added * @param conf Configuration that contains the classpath setting * @param fs FileSystem with respect to which {@code archivefile} should be interpreted. */ public static void addFileToClassPath(Path file, Configuration conf, FileSystem fs) throws IOException { String filepath = file.toUri().getPath(); String classpath = conf.get("mapred.job.classpath.files"); conf.set( "mapred.job.classpath.files", classpath == null ? filepath : classpath + System.getProperty("path.separator") + filepath); URI uri = fs.makeQualified(file).toUri(); addCacheFile(uri, conf); }
public static String getRepositorySpec(ArchivalUnit au) { Configuration auConfig = au.getConfiguration(); if (auConfig != null) { // can be null in unit tests String repoSpec = auConfig.get(PluginManager.AU_PARAM_REPOSITORY); if (repoSpec != null && repoSpec.startsWith("local:")) { return repoSpec; } } return "local:" + CurrentConfig.getParam(PARAM_CACHE_LOCATION); }
public synchronized void setConfig( Configuration config, Configuration prevConfig, Configuration.Differences changedKeys) { if (changedKeys.contains(PREFIX)) { defaultKeyStoreType = config.get(PARAM_DEFAULT_KEYSTORE_TYPE, DEFAULT_DEFAULT_KEYSTORE_TYPE); defaultKeyStoreProvider = config.get(PARAM_DEFAULT_KEYSTORE_PROVIDER, DEFAULT_DEFAULT_KEYSTORE_PROVIDER); paramExitIfMissingKeyStore = config.getBoolean(PARAM_EXIT_IF_MISSING_KEYSTORE, DEFAULT_EXIT_IF_MISSING_KEYSTORE); if (changedKeys.contains(PARAM_KEYSTORE)) { configureKeyStores(config); // defer initial set of keystore loading until startService if (isInited()) { // load any newly added keystores loadKeyStores(); } } } }
/** * Get the archive entries in classpath as an array of Path * * @param conf Configuration that contains the classpath setting */ public static Path[] getArchiveClassPaths(Configuration conf) { String classpath = conf.get("mapred.job.classpath.archives"); if (classpath == null) return null; ArrayList list = Collections.list(new StringTokenizer(classpath, System.getProperty("path.separator"))); Path[] paths = new Path[list.size()]; for (int i = 0; i < list.size(); i++) { paths[i] = new Path((String) list.get(i)); } return paths; }
// Try to find an URL for system props or config private String findAgentUrl(Configuration pConfig) { // System property has precedence String url = System.getProperty("jolokia." + ConfigKey.DISCOVERY_AGENT_URL.getKeyValue()); if (url == null) { url = System.getenv("JOLOKIA_DISCOVERY_AGENT_URL"); if (url == null) { url = pConfig.get(ConfigKey.DISCOVERY_AGENT_URL); } } return NetworkUtil.replaceExpression(url); }
/** * Add an archive path to the current set of classpath entries. It adds the archive to cache as * well. Intended to be used by user code. * * @param archive Path of the archive to be added * @param conf Configuration that contains the classpath setting * @param fs FileSystem with respect to which {@code archive} should be interpreted. */ public static void addArchiveToClassPath(Path archive, Configuration conf, FileSystem fs) throws IOException { String archivepath = archive.toUri().getPath(); String classpath = conf.get("mapred.job.classpath.archives"); conf.set( "mapred.job.classpath.archives", classpath == null ? archivepath : classpath + System.getProperty("path.separator") + archivepath); URI uri = fs.makeQualified(archive).toUri(); addCacheArchive(uri, conf); }
private static URI addArchiveToClassPathHelper(Path archive, Configuration conf) throws IOException { String classpath = conf.get("mapred.job.classpath.archives"); // the scheme/authority use ':' as separator. put the unqualified path in classpath String archivePath = archive.toUri().getPath(); conf.set( "mapred.job.classpath.archives", classpath == null ? archivePath : classpath + System.getProperty("path.separator") + archivePath); return archive.makeQualified(archive.getFileSystem(conf)).toUri(); }
/** * Construct in a named directory. * * @param conf * @param dir * @throws IOException */ public NutchBean(Configuration conf, Path dir) throws IOException { this.conf = conf; this.fs = FileSystem.get(this.conf); if (dir == null) { dir = new Path(this.conf.get("searcher.dir", "crawl")); } final Path luceneConfig = new Path(dir, "search-servers.txt"); final Path solrConfig = new Path(dir, "solr-servers.txt"); if (fs.exists(luceneConfig) || fs.exists(solrConfig)) { searchBean = new DistributedSearchBean(conf, luceneConfig, solrConfig); } else { final Path indexDir = new Path(dir, "index"); final Path indexesDir = new Path(dir, "indexes"); searchBean = new LuceneSearchBean(conf, indexDir, indexesDir); } String tableName = conf.get("table.name", "webtable"); table = new HTable(tableName); summarizerFactory = new SummarizerFactory(conf); }
public void setConf(Configuration conf) { this.scriptName = conf.get(SCRIPT_FILENAME_KEY); this.maxArgs = conf.getInt(SCRIPT_ARG_COUNT_KEY, DEFAULT_ARG_COUNT); this.conf = conf; }
/** * Add a file that has been localized to the conf.. Used by internal DistributedCache code. * * @param conf The conf to modify to contain the localized caches * @param str a comma separated list of local files */ public static void addLocalFiles(Configuration conf, String str) { String files = conf.get(CACHE_LOCALFILES); conf.set(CACHE_LOCALFILES, files == null ? str : files + "," + str); }
/** * Add a archives to be localized to the conf. Intended to be used by user code. * * @param uri The uri of the cache to be localized * @param conf Configuration to add the cache to */ public static void addCacheArchive(URI uri, Configuration conf) { String archives = conf.get(CACHE_ARCHIVES); conf.set(CACHE_ARCHIVES, archives == null ? uri.toString() : archives + "," + uri.toString()); }
/** * Add a file to be localized to the conf. Intended to be used by user code. * * @param uri The uri of the cache to be localized * @param conf Configuration to add the cache to */ public static void addCacheFile(URI uri, Configuration conf) { String files = conf.get(CACHE_FILES); conf.set(CACHE_FILES, files == null ? uri.toString() : files + "," + uri.toString()); }
/** Sets up configuration based on params */ private static boolean setup(Hashtable<String, String> curConf, Configuration argConf) { if (argConf.get("file") == null) { logger.fatal("Missing file parameter"); System.exit(1); } if (argConf.get("hdfs_base_path") == null) { logger.fatal("Missing HDFS base path, check gestore-conf.xml"); System.exit(1); } if (argConf.get("hdfs_temp_path") == null) { logger.fatal("Missing HDFS temp path, check gestore-conf.xml"); System.exit(1); } if (argConf.get("local_temp_path") == null) { logger.fatal("Missing local temp path, check gestore-conf.xml"); System.exit(1); } // Input paramaters curConf.put("run_id", argConf.get("run", "")); curConf.put("task_id", argConf.get("task", "")); curConf.put("file_id", argConf.get("file")); curConf.put("local_path", argConf.get("path", "")); curConf.put("type", argConf.get("type", "l2r")); curConf.put("timestamp_start", argConf.get("timestamp_start", "1")); curConf.put( "timestamp_stop", argConf.get("timestamp_stop", Integer.toString(Integer.MAX_VALUE))); curConf.put("delimiter", argConf.get("regex", "ID=.*")); curConf.put("taxon", argConf.get("taxon", "all")); curConf.put("intermediate", argConf.get("full_run", "false")); curConf.put("quick_add", argConf.get("quick_add", "false")); Boolean full_run = curConf.get("intermediate").matches("(?i).*true.*"); curConf.put("format", argConf.get("format", "unknown")); curConf.put("split", argConf.get("split", "1")); curConf.put("copy", argConf.get("copy", "true")); // Constants curConf.put("base_path", argConf.get("hdfs_base_path")); curConf.put("temp_path", argConf.get("hdfs_temp_path")); curConf.put("local_temp_path", argConf.get("local_temp_path")); curConf.put("db_name_files", argConf.get("hbase_file_table")); curConf.put("db_name_runs", argConf.get("hbase_run_table")); curConf.put("db_name_updates", argConf.get("hbase_db_update_table")); // Timestamps Date currentTime = new Date(); Date endDate = new Date(new Long(curConf.get("timestamp_stop"))); curConf.put("timestamp_real", Long.toString(currentTime.getTime())); return true; }
public void setConfig( Configuration config, Configuration oldConfig, Configuration.Differences changedKeys) { // Build list of repositories from list of disk (fs) paths). Needs to // be generalized if ever another repository implementation. if (changedKeys.contains(ConfigManager.PARAM_PLATFORM_DISK_SPACE_LIST)) { List lst = new ArrayList(); String dspace = config.get(ConfigManager.PARAM_PLATFORM_DISK_SPACE_LIST, ""); List paths = StringUtil.breakAt(dspace, ';'); if (paths != null) { for (Iterator iter = paths.iterator(); iter.hasNext(); ) { lst.add("local:" + (String) iter.next()); } } repoList = lst; } if (changedKeys.contains(PARAM_MAX_PER_AU_CACHE_SIZE)) { paramNodeCacheSize = config.getInt(PARAM_MAX_PER_AU_CACHE_SIZE, DEFAULT_MAX_PER_AU_CACHE_SIZE); for (Iterator iter = getDaemon().getAllLockssRepositories().iterator(); iter.hasNext(); ) { LockssRepository repo = (LockssRepository) iter.next(); if (repo instanceof LockssRepositoryImpl) { LockssRepositoryImpl repoImpl = (LockssRepositoryImpl) repo; repoImpl.setNodeCacheSize(paramNodeCacheSize); } } } if (changedKeys.contains(PARAM_MAX_SUSPECT_VERSIONS_CACHE_SIZE)) { paramSuspectVersionsCacheSize = config.getInt( PARAM_MAX_SUSPECT_VERSIONS_CACHE_SIZE, DEFAULT_MAX_SUSPECT_VERSIONS_CACHE_SIZE); suspectVersionsCache.setMaxSize(paramSuspectVersionsCacheSize); } if (changedKeys.contains(GLOBAL_CACHE_PREFIX)) { paramIsGlobalNodeCache = config.getBoolean(PARAM_GLOBAL_CACHE_ENABLED, DEFAULT_GLOBAL_CACHE_ENABLED); if (paramIsGlobalNodeCache) { paramGlobalNodeCacheSize = config.getInt(PARAM_MAX_GLOBAL_CACHE_SIZE, DEFAULT_MAX_GLOBAL_CACHE_SIZE); log.debug("global node cache size: " + paramGlobalNodeCacheSize); globalNodeCache.setMaxSize(paramGlobalNodeCacheSize); } } if (changedKeys.contains(DISK_PREFIX)) { int minMB = config.getInt(PARAM_DISK_WARN_FRRE_MB, DEFAULT_DISK_WARN_FRRE_MB); double minPer = config.getPercentage(PARAM_DISK_WARN_FRRE_PERCENT, DEFAULT_DISK_WARN_FRRE_PERCENT); paramDFWarn = PlatformUtil.DF.makeThreshold(minMB, minPer); minMB = config.getInt(PARAM_DISK_FULL_FRRE_MB, DEFAULT_DISK_FULL_FRRE_MB); minPer = config.getPercentage(PARAM_DISK_FULL_FRRE_PERCENT, DEFAULT_DISK_FULL_FRRE_PERCENT); paramDFFull = PlatformUtil.DF.makeThreshold(minMB, minPer); } if (changedKeys.contains(PARAM_SIZE_CALC_MAX_LOAD)) { sizeCalcMaxLoad = config.getPercentage(PARAM_SIZE_CALC_MAX_LOAD, DEFAULT_SIZE_CALC_MAX_LOAD); } if (changedKeys.contains(PREFIX)) { maxUnusedDirSearch = config.getInt(PARAM_MAX_UNUSED_DIR_SEARCH, DEFAULT_MAX_UNUSED_DIR_SEARCH); isStatefulUnusedDirSearch = config.getBoolean( PARAM_IS_STATEFUL_UNUSED_DIR_SEARCH, DEFAULT_IS_STATEFUL_UNUSED_DIR_SEARCH); enableLongComponents = config.getBoolean(PARAM_ENABLE_LONG_COMPONENTS, DEFAULT_ENABLE_LONG_COMPONENTS); enableLongComponentsCompatibility = config.getBoolean( PARAM_ENABLE_LONG_COMPONENTS_COMPATIBILITY, DEFAULT_ENABLE_LONG_COMPONENTS_COMPATIBILITY); maxComponentLength = config.getInt(PARAM_MAX_COMPONENT_LENGTH, DEFAULT_MAX_COMPONENT_LENGTH); checkUnnormalized = (CheckUnnormalizedMode) config.getEnum( CheckUnnormalizedMode.class, PARAM_CHECK_UNNORMALIZED, DEFAULT_CHECK_UNNORMALIZED); } }
/** * Add a file to be localized to the conf * * @param uri The uri of the cache to be localized * @param conf Configuration to add the cache to */ public static void addCacheFile(URI uri, Configuration conf) { String files = conf.get("mapred.cache.files"); conf.set("mapred.cache.files", files == null ? uri.toString() : files + "," + uri.toString()); }
/** * Add a archives to be localized to the conf * * @param uri The uri of the cache to be localized * @param conf Configuration to add the cache to */ public static void addSharedCacheArchive(URI uri, Configuration conf) { String archives = conf.get("mapred.cache.shared.archives"); conf.set( "mapred.cache.shared.archives", archives == null ? uri.toString() : archives + "," + uri.toString()); }
/** * Add a archive that has been localized to the conf. Used by internal DistributedCache code. * * @param conf The conf to modify to contain the localized caches * @param str a comma separated list of local archives */ public static void addLocalArchives(Configuration conf, String str) { String archives = conf.get(CACHE_LOCALARCHIVES); conf.set(CACHE_LOCALARCHIVES, archives == null ? str : archives + "," + str); }