void setConfig(Configuration config) { log.debug("config: " + config); proxyHost = config.get(PARAM_PROXY_HOST); proxyPort = config.getInt(PARAM_PROXY_PORT, DEFAULT_PROXY_PORT); if (StringUtil.isNullString(proxyHost) || proxyPort <= 0) { String http_proxy = System.getenv("http_proxy"); if (!StringUtil.isNullString(http_proxy)) { try { HostPortParser hpp = new HostPortParser(http_proxy); proxyHost = hpp.getHost(); proxyPort = hpp.getPort(); } catch (HostPortParser.InvalidSpec e) { log.warning("Can't parse http_proxy environment var, ignoring: " + http_proxy + ": " + e); } } } if (StringUtil.isNullString(proxyHost) || proxyPort <= 0) { proxyHost = null; } else { log.info("Proxying through " + proxyHost + ":" + proxyPort); } userAgent = config.get(PARAM_USER_AGENT); if (StringUtil.isNullString(userAgent)) { userAgent = null; } else { log.debug("Setting User-Agent to " + userAgent); } }
/** * Initialize the backend systems, the log handler and the restrictor. A subclass can tune this * step by overriding {@link #createRestrictor(String)} and {@link * #createLogHandler(ServletConfig, boolean)} * * @param pServletConfig servlet configuration */ @Override public void init(ServletConfig pServletConfig) throws ServletException { super.init(pServletConfig); Configuration config = initConfig(pServletConfig); // Create a log handler early in the lifecycle, but not too early String logHandlerClass = config.get(ConfigKey.LOGHANDLER_CLASS); logHandler = logHandlerClass != null ? (LogHandler) ClassUtil.newInstance(logHandlerClass) : createLogHandler(pServletConfig, Boolean.valueOf(config.get(ConfigKey.DEBUG))); // Different HTTP request handlers httpGetHandler = newGetHttpRequestHandler(); httpPostHandler = newPostHttpRequestHandler(); if (restrictor == null) { restrictor = createRestrictor(NetworkUtil.replaceExpression(config.get(ConfigKey.POLICY_LOCATION))); } else { logHandler.info("Using custom access restriction provided by " + restrictor); } configMimeType = config.get(ConfigKey.MIME_TYPE); backendManager = new BackendManager(config, logHandler, restrictor); requestHandler = new HttpRequestHandler(config, backendManager, logHandler); initDiscoveryMulticast(config); }
/** Create LockssKeystore from a config subtree */ LockssKeyStore createLockssKeyStore(Configuration config) { log.debug2("Creating LockssKeyStore from config: " + config); String name = config.get(KEYSTORE_PARAM_NAME); LockssKeyStore lk = new LockssKeyStore(name); String file = config.get(KEYSTORE_PARAM_FILE); String resource = config.get(KEYSTORE_PARAM_RESOURCE); String url = config.get(KEYSTORE_PARAM_URL); if (!StringUtil.isNullString(file)) { lk.setLocation(file, LocationType.File); } else if (!StringUtil.isNullString(resource)) { lk.setLocation(resource, LocationType.Resource); } else if (!StringUtil.isNullString(url)) { lk.setLocation(url, LocationType.Url); } lk.setType(config.get(KEYSTORE_PARAM_TYPE, defaultKeyStoreType)); lk.setProvider(config.get(KEYSTORE_PARAM_PROVIDER, defaultKeyStoreProvider)); lk.setPassword(config.get(KEYSTORE_PARAM_PASSWORD)); lk.setKeyPassword(config.get(KEYSTORE_PARAM_KEY_PASSWORD)); lk.setKeyPasswordFile(config.get(KEYSTORE_PARAM_KEY_PASSWORD_FILE)); lk.setMayCreate(config.getBoolean(KEYSTORE_PARAM_CREATE, DEFAULT_CREATE)); return lk; }
/** * This method checks to see if symlinks are to be create for the localized cache files in the * current working directory * * @param conf the jobconf * @return true if symlinks are to be created- else return false */ public static boolean getSymlink(Configuration conf) { String result = conf.get("mapred.create.symlink"); if ("yes".equals(result)) { return true; } return false; }
/** Initialize SecondaryNameNode. */ private void initialize(Configuration conf) throws IOException { // initiate Java VM metrics JvmMetrics.init("SecondaryNameNode", conf.get("session.id")); // Create connection to the namenode. shouldRun = true; nameNodeAddr = NameNode.getAddress(conf); this.conf = conf; this.namenode = (NamenodeProtocol) RPC.waitForProxy( NamenodeProtocol.class, NamenodeProtocol.versionID, nameNodeAddr, conf); // initialize checkpoint directories fsName = getInfoServer(); checkpointDirs = FSImage.getCheckpointDirs(conf, "/tmp/hadoop/dfs/namesecondary"); checkpointEditsDirs = FSImage.getCheckpointEditsDirs(conf, "/tmp/hadoop/dfs/namesecondary"); checkpointImage = new CheckpointStorage(conf); checkpointImage.recoverCreate(checkpointDirs, checkpointEditsDirs); // Initialize other scheduling parameters from the configuration checkpointPeriod = conf.getLong("fs.checkpoint.period", 3600); checkpointSize = conf.getLong("fs.checkpoint.size", 4194304); // initialize the webserver for uploading files. String infoAddr = NetUtils.getServerAddress( conf, "dfs.secondary.info.bindAddress", "dfs.secondary.info.port", "dfs.secondary.http.address"); InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr); infoBindAddress = infoSocAddr.getHostName(); int tmpInfoPort = infoSocAddr.getPort(); infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort, tmpInfoPort == 0, conf); infoServer.setAttribute("name.system.image", checkpointImage); this.infoServer.setAttribute("name.conf", conf); infoServer.addInternalServlet("getimage", "/getimage", GetImageServlet.class); infoServer.start(); // The web-server port can be ephemeral... ensure we have the correct info infoPort = infoServer.getPort(); conf.set("dfs.secondary.http.address", infoBindAddress + ":" + infoPort); LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" + infoPort); LOG.warn( "Checkpoint Period :" + checkpointPeriod + " secs " + "(" + checkpointPeriod / 60 + " min)"); LOG.warn( "Log Size Trigger :" + checkpointSize + " bytes " + "(" + checkpointSize / 1024 + " KB)"); }
/** * This method checks to see if symlinks are to be create for the localized cache files in the * current working directory Used by internal DistributedCache code. * * @param conf the jobconf * @return true if symlinks are to be created- else return false */ public static boolean getSymlink(Configuration conf) { String result = conf.get(CACHE_SYMLINK); if ("yes".equals(result)) { return true; } return false; }
public int run(String[] args) throws Exception { Configuration argConf = getConf(); // JobConf conf = new JobConf(diffdb.class); Configuration config = HBaseConfiguration.create(); HBaseAdmin hbAdmin = new HBaseAdmin(config); dbutil db_util = new dbutil(config); HTable runTable = new HTable(config, "gestore_runs"); Get runGet = new Get(argConf.get("id").getBytes()); Result pipeline = runTable.get(runGet); NavigableMap<byte[], byte[]> pipeMap = pipeline.getFamilyMap("d".getBytes()); Map.Entry<byte[], byte[]> results = pipeMap.pollFirstEntry(); HashMap<String, HashMap<String, String>> resultMap = new HashMap<String, HashMap<String, String>>(); while (results != null) { String resultKey = new String(results.getKey()); String resultValue = new String(results.getValue()); String field = "type"; HashMap<String, String> tempMap = new HashMap<String, String>(); String entry = resultKey; if (resultKey.endsWith("_db_timestamp")) { field = "db_timestamp"; entry = resultKey.substring(0, resultKey.lastIndexOf("_db_timestamp")); } else if (resultKey.endsWith("_filename")) { field = "filename"; entry = resultKey.substring(0, resultKey.lastIndexOf("_filename")); } else if (resultKey.endsWith("_regex")) { field = "regex"; entry = resultKey.substring(0, resultKey.lastIndexOf("_regex")); } if (resultMap.containsKey(entry)) { tempMap = resultMap.get(entry); } tempMap.put(field, resultValue); resultMap.put(entry, tempMap); // System.out.println("Key: " + resultKey + " Value: " + resultValue); results = pipeMap.pollFirstEntry(); } for (String key : resultMap.keySet()) { System.out.println("File ID: " + key); for (String subKey : resultMap.get(key).keySet()) { // System.out.println("\t " + subKey + "\t\t" + resultMap.get(key).get(subKey)); System.out.format(" %1$-20s %2$s\n", subKey, resultMap.get(key).get(subKey)); } } return 0; }
public synchronized void setConfig( Configuration config, Configuration prevConfig, Configuration.Differences changedKeys) { if (changedKeys.contains(PREFIX)) { defaultKeyStoreType = config.get(PARAM_DEFAULT_KEYSTORE_TYPE, DEFAULT_DEFAULT_KEYSTORE_TYPE); defaultKeyStoreProvider = config.get(PARAM_DEFAULT_KEYSTORE_PROVIDER, DEFAULT_DEFAULT_KEYSTORE_PROVIDER); paramExitIfMissingKeyStore = config.getBoolean(PARAM_EXIT_IF_MISSING_KEYSTORE, DEFAULT_EXIT_IF_MISSING_KEYSTORE); if (changedKeys.contains(PARAM_KEYSTORE)) { configureKeyStores(config); // defer initial set of keystore loading until startService if (isInited()) { // load any newly added keystores loadKeyStores(); } } } }
public static InetSocketAddress getAddress(Configuration conf) { String jobTrackerStr = conf.get("mapred.job.tracker", "localhost:8012"); int colon = jobTrackerStr.indexOf(":"); if (colon < 0) { throw new RuntimeException("Bad mapred.job.tracker: " + jobTrackerStr); } String jobTrackerName = jobTrackerStr.substring(0, colon); int jobTrackerPort = Integer.parseInt(jobTrackerStr.substring(colon + 1)); return new InetSocketAddress(jobTrackerName, jobTrackerPort); }
/** * Add a file path to the current set of classpath entries. It adds the file to cache as well. * Intended to be used by user code. * * @param file Path of the file to be added * @param conf Configuration that contains the classpath setting * @param fs FileSystem with respect to which {@code archivefile} should be interpreted. */ public static void addFileToClassPath(Path file, Configuration conf, FileSystem fs) throws IOException { String filepath = file.toUri().getPath(); String classpath = conf.get("mapred.job.classpath.files"); conf.set( "mapred.job.classpath.files", classpath == null ? filepath : classpath + System.getProperty("path.separator") + filepath); URI uri = fs.makeQualified(file).toUri(); addCacheFile(uri, conf); }
public static String getRepositorySpec(ArchivalUnit au) { Configuration auConfig = au.getConfiguration(); if (auConfig != null) { // can be null in unit tests String repoSpec = auConfig.get(PluginManager.AU_PARAM_REPOSITORY); if (repoSpec != null && repoSpec.startsWith("local:")) { return repoSpec; } } return "local:" + CurrentConfig.getParam(PARAM_CACHE_LOCATION); }
static { if (!Configuration.get("DISABLE_L10N").equals("yes")) { String fn = "messages_" + currentLocale.getLanguage() + ".properties"; if (Configuration.get("LOCALES_DIR") != "") { System.out.print("Look " + fn + " at: " + Configuration.get("LOCALES_DIR") + " "); try { messages.load( new FileInputStream(Configuration.get("LOCALES_DIR") + File.separator + fn)); System.out.println(" - found"); } catch (IOException ex) { // Do nothing ... System.out.println(" - not found"); ex.printStackTrace(); } } if (messages.size() == 0) { try { messages.load(Local.class.getResourceAsStream("localmessages/" + fn)); } catch (Exception e) { // Do nothing ... } } } else { currentLocale = new Locale("en", "US"); /*DEBUG*/ System.out.println("* DEBUG: Locales are disabled"); } if (messages.size() == 0) messages = null; /** * DEBUG PURPOSES ** */ System.out.println("Default locale: " + currentLocale.getDisplayName()); if (messages != null) { System.out.println( "Use local messages: messages_" + currentLocale.getLanguage() + ".properties"); } else { System.out.println( "* DEBUG: Locales are disabled or not found: messages_" + currentLocale.getLanguage() + ".properties"); } /** ******************* */ }
// Try to find an URL for system props or config private String findAgentUrl(Configuration pConfig) { // System property has precedence String url = System.getProperty("jolokia." + ConfigKey.DISCOVERY_AGENT_URL.getKeyValue()); if (url == null) { url = System.getenv("JOLOKIA_DISCOVERY_AGENT_URL"); if (url == null) { url = pConfig.get(ConfigKey.DISCOVERY_AGENT_URL); } } return NetworkUtil.replaceExpression(url); }
/** * Get the archive entries in classpath as an array of Path * * @param conf Configuration that contains the classpath setting */ public static Path[] getArchiveClassPaths(Configuration conf) { String classpath = conf.get("mapred.job.classpath.archives"); if (classpath == null) return null; ArrayList list = Collections.list(new StringTokenizer(classpath, System.getProperty("path.separator"))); Path[] paths = new Path[list.size()]; for (int i = 0; i < list.size(); i++) { paths[i] = new Path((String) list.get(i)); } return paths; }
public static String[] getWeekdayNames() { String[] localwdnames = new String[7]; String[] localnames = weekdaynames; if (Configuration.get("FIRST_DAY_OF_WEEK").equals("mon")) localnames = new String[] {"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"}; for (int i = 0; i < 7; i++) { localwdnames[i] = getString(localnames[i]); } return localwdnames; }
/** * Add an archive path to the current set of classpath entries. It adds the archive to cache as * well. Intended to be used by user code. * * @param archive Path of the archive to be added * @param conf Configuration that contains the classpath setting * @param fs FileSystem with respect to which {@code archive} should be interpreted. */ public static void addArchiveToClassPath(Path archive, Configuration conf, FileSystem fs) throws IOException { String archivepath = archive.toUri().getPath(); String classpath = conf.get("mapred.job.classpath.archives"); conf.set( "mapred.job.classpath.archives", classpath == null ? archivepath : classpath + System.getProperty("path.separator") + archivepath); URI uri = fs.makeQualified(archive).toUri(); addCacheArchive(uri, conf); }
private static URI addArchiveToClassPathHelper(Path archive, Configuration conf) throws IOException { String classpath = conf.get("mapred.job.classpath.archives"); // the scheme/authority use ':' as separator. put the unqualified path in classpath String archivePath = archive.toUri().getPath(); conf.set( "mapred.job.classpath.archives", classpath == null ? archivePath : classpath + System.getProperty("path.separator") + archivePath); return archive.makeQualified(archive.getFileSystem(conf)).toUri(); }
/** * Construct in a named directory. * * @param conf * @param dir * @throws IOException */ public NutchBean(Configuration conf, Path dir) throws IOException { this.conf = conf; this.fs = FileSystem.get(this.conf); if (dir == null) { dir = new Path(this.conf.get("searcher.dir", "crawl")); } final Path luceneConfig = new Path(dir, "search-servers.txt"); final Path solrConfig = new Path(dir, "solr-servers.txt"); if (fs.exists(luceneConfig) || fs.exists(solrConfig)) { searchBean = new DistributedSearchBean(conf, luceneConfig, solrConfig); } else { final Path indexDir = new Path(dir, "index"); final Path indexesDir = new Path(dir, "indexes"); searchBean = new LuceneSearchBean(conf, indexDir, indexesDir); } String tableName = conf.get("table.name", "webtable"); table = new HTable(tableName); summarizerFactory = new SummarizerFactory(conf); }
public void setConf(Configuration conf) { this.scriptName = conf.get(SCRIPT_FILENAME_KEY); this.maxArgs = conf.getInt(SCRIPT_ARG_COUNT_KEY, DEFAULT_ARG_COUNT); this.conf = conf; }
/** * Add a file that has been localized to the conf.. Used by internal DistributedCache code. * * @param conf The conf to modify to contain the localized caches * @param str a comma separated list of local files */ public static void addLocalFiles(Configuration conf, String str) { String files = conf.get(CACHE_LOCALFILES); conf.set(CACHE_LOCALFILES, files == null ? str : files + "," + str); }
/** * Add a archives to be localized to the conf. Intended to be used by user code. * * @param uri The uri of the cache to be localized * @param conf Configuration to add the cache to */ public static void addCacheArchive(URI uri, Configuration conf) { String archives = conf.get(CACHE_ARCHIVES); conf.set(CACHE_ARCHIVES, archives == null ? uri.toString() : archives + "," + uri.toString()); }
/** * Add a file to be localized to the conf. Intended to be used by user code. * * @param uri The uri of the cache to be localized * @param conf Configuration to add the cache to */ public static void addCacheFile(URI uri, Configuration conf) { String files = conf.get(CACHE_FILES); conf.set(CACHE_FILES, files == null ? uri.toString() : files + "," + uri.toString()); }
/** * Add a archive that has been localized to the conf. Used by internal DistributedCache code. * * @param conf The conf to modify to contain the localized caches * @param str a comma separated list of local archives */ public static void addLocalArchives(Configuration conf, String str) { String archives = conf.get(CACHE_LOCALARCHIVES); conf.set(CACHE_LOCALARCHIVES, archives == null ? str : archives + "," + str); }
/** * Add a archives to be localized to the conf * * @param uri The uri of the cache to be localized * @param conf Configuration to add the cache to */ public static void addSharedCacheArchive(URI uri, Configuration conf) { String archives = conf.get("mapred.cache.shared.archives"); conf.set( "mapred.cache.shared.archives", archives == null ? uri.toString() : archives + "," + uri.toString()); }
/** * Add a file to be localized to the conf * * @param uri The uri of the cache to be localized * @param conf Configuration to add the cache to */ public static void addCacheFile(URI uri, Configuration conf) { String files = conf.get("mapred.cache.files"); conf.set("mapred.cache.files", files == null ? uri.toString() : files + "," + uri.toString()); }