private static void setMRFrameworkClasspath(Map<String, String> environment) throws IOException { InputStream classpathFileStream = null; BufferedReader reader = null; try { // Get yarn mapreduce-app classpath from generated classpath // Works if compile time env is same as runtime. Mainly tests. ClassLoader thisClassLoader = Thread.currentThread().getContextClassLoader(); String mrAppGeneratedClasspathFile = "mrapp-generated-classpath"; classpathFileStream = thisClassLoader.getResourceAsStream(mrAppGeneratedClasspathFile); reader = new BufferedReader(new InputStreamReader(classpathFileStream)); String cp = reader.readLine(); if (cp != null) { Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), cp.trim()); } // Put the file itself on classpath for tasks. Apps.addToEnvironment( environment, Environment.CLASSPATH.name(), thisClassLoader.getResource(mrAppGeneratedClasspathFile).getFile().split("!")[0]); // Add standard Hadoop classes for (String c : ApplicationConstants.APPLICATION_CLASSPATH) { Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c); } } finally { if (classpathFileStream != null) { classpathFileStream.close(); } if (reader != null) { reader.close(); } } // TODO: Remove duplicates. }
void setupWorkerEnv(Map<String, String> workerEnv) { for (String c : configuration.getStrings( YarnConfiguration.YARN_APPLICATION_CLASSPATH, YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) { addToEnv(workerEnv, Environment.CLASSPATH.name(), c.trim()); } addToEnv(workerEnv, Environment.CLASSPATH.name(), Environment.PWD.$() + File.separator + "*"); }
private void setupAppMasterEnv(Map<String, String> appMasterEnv) { for (String c : conf.getStrings( YarnConfiguration.YARN_APPLICATION_CLASSPATH, YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) { Apps.addToEnvironment(appMasterEnv, Environment.CLASSPATH.name(), c.trim()); } Apps.addToEnvironment( appMasterEnv, Environment.CLASSPATH.name(), Environment.PWD.$() + File.separator + "*"); }
public static void setupYarnClassPath(Configuration conf, Map<String, String> appMasterEnv) { addToEnvironment( appMasterEnv, Environment.CLASSPATH.name(), appMasterEnv.get(YarnConfigKeys.ENV_FLINK_CLASSPATH)); String[] applicationClassPathEntries = conf.getStrings( YarnConfiguration.YARN_APPLICATION_CLASSPATH, YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH); for (String c : applicationClassPathEntries) { addToEnvironment(appMasterEnv, Environment.CLASSPATH.name(), c.trim()); } }
/** * Connects to CM, sets up container launch context for shell command and eventually dispatches * the container start request to the CM. */ @Override public void run() { System.out.println( "Setting up container launch container for containerid=" + container.getId()); ContainerLaunchContext ctx = Records.newRecord(ContainerLaunchContext.class); // Set the environment StringBuilder classPathEnv = new StringBuilder(Environment.CLASSPATH.$()).append(File.pathSeparatorChar).append("./*"); for (String c : am.getConf() .getStrings( YarnConfiguration.YARN_APPLICATION_CLASSPATH, YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) { classPathEnv.append(':'); classPathEnv.append(File.pathSeparatorChar); classPathEnv.append(c.trim()); } if (am.getConf().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) { classPathEnv.append(':'); classPathEnv.append(System.getProperty("java.class.path")); } am.getShellEnv().put("CLASSPATH", classPathEnv.toString()); // Set the environment ctx.setEnvironment(am.getShellEnv()); ctx.setLocalResources(task.buildScriptsAndSetResources(container)); // Set the necessary command to execute on the allocated container Vector<CharSequence> vargs = new Vector<>(5); vargs.add(Environment.JAVA_HOME.$() + "/bin/java"); // Set Xmx based on am memory size vargs.add("-Xmx" + am.getContainerMemory() + "m"); // Set class name vargs.add(HiWayConfiguration.HIWAY_WORKER_CLASS); vargs.add("--appId " + am.getAppId().toString()); vargs.add("--containerId " + container.getId().toString()); vargs.add("--workflowId " + task.getWorkflowId()); vargs.add("--taskId " + task.getTaskId()); vargs.add("--taskName " + task.getTaskName()); vargs.add("--langLabel " + task.getLanguageLabel()); vargs.add("--id " + task.getId()); for (Data inputData : task.getInputData()) { vargs.add( "--input " + inputData.getLocalPath() + "," + inputData.isInput() + "," + inputData.getContainerId()); } for (Data outputData : task.getOutputData()) { vargs.add("--output " + outputData.getLocalPath()); } if (am.isDetermineFileSizes()) { vargs.add("--size"); } String invocScript = task.getInvocScript(); if (invocScript.length() > 0) { vargs.add("--invocScript " + invocScript); } vargs.add(">> " + Invocation.STDOUT_FILENAME); vargs.add("2>> " + Invocation.STDERR_FILENAME); // Get final commmand StringBuilder command = new StringBuilder(); for (CharSequence str : vargs) { command.append(str).append(" "); } List<String> commands = new ArrayList<>(); commands.add(command.toString()); ctx.setCommands(commands); /* Set up tokens for the container. For normal shell commands, the container in distribute-shell doesn't need any tokens. We are populating them mainly * for NodeManagers to be able to download any files in the distributed file-system. The tokens are otherwise also useful in cases, for e.g., when one * is running a "hadoop dfs" command inside the distributed shell. */ ctx.setTokens(am.getAllTokens().duplicate()); containerListener.addContainer(container.getId(), container); am.getNmClientAsync().startContainerAsync(container, ctx); }
public static void setClasspath(Map<String, String> environment) throws IOException { Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), MRJobConfig.JOB_JAR); Apps.addToEnvironment( environment, Environment.CLASSPATH.name(), Environment.PWD.$() + Path.SEPARATOR + "*"); MRApps.setMRFrameworkClasspath(environment); }