static { specialClasses.put("org.apache.spark.repl.Main", "spark-shell"); specialClasses.put( "org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver", "spark-internal"); specialClasses.put( "org.apache.spark.sql.hive.thriftserver.HiveThriftServer2", "spark-internal"); }
private void constructEnvVarArgs(Map<String, String> env, String submitArgsEnvVariable) throws IOException { mergeEnvPathList( env, getLibPathEnvName(), getEffectiveConfig().get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH)); StringBuilder submitArgs = new StringBuilder(); for (String arg : buildSparkSubmitArgs()) { if (submitArgs.length() > 0) { submitArgs.append(" "); } submitArgs.append(quoteForCommandString(arg)); } env.put(submitArgsEnvVariable, submitArgs.toString()); }
private List<String> buildSparkRCommand(Map<String, String> env) throws IOException { if (!appArgs.isEmpty() && appArgs.get(0).endsWith(".R")) { System.err.println( "Running R applications through 'sparkR' is not supported as of Spark 2.0.\n" + "Use ./bin/spark-submit <R file>"); System.exit(-1); } // When launching the SparkR shell, store the spark-submit arguments in the SPARKR_SUBMIT_ARGS // env variable. constructEnvVarArgs(env, "SPARKR_SUBMIT_ARGS"); // Set shell.R as R_PROFILE_USER to load the SparkR package when the shell comes up. String sparkHome = System.getenv("SPARK_HOME"); env.put( "R_PROFILE_USER", join(File.separator, sparkHome, "R", "lib", "SparkR", "profile", "shell.R")); List<String> args = new ArrayList<>(); args.add(firstNonEmpty(System.getenv("SPARKR_DRIVER_R"), "R")); return args; }