예제 #1
0
 static {
   specialClasses.put("org.apache.spark.repl.Main", "spark-shell");
   specialClasses.put(
       "org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver", "spark-internal");
   specialClasses.put(
       "org.apache.spark.sql.hive.thriftserver.HiveThriftServer2", "spark-internal");
 }
예제 #2
0
 private boolean isClientMode(Map<String, String> userProps) {
   String userMaster = firstNonEmpty(master, userProps.get(SparkLauncher.SPARK_MASTER));
   String userDeployMode = firstNonEmpty(deployMode, userProps.get(SparkLauncher.DEPLOY_MODE));
   // Default master is "local[*]", so assume client mode in that case
   return userMaster == null
       || "client".equals(userDeployMode)
       || (!userMaster.equals("yarn-cluster") && userDeployMode == null);
 }
예제 #3
0
  private List<String> buildSparkSubmitCommand(Map<String, String> env) throws IOException {
    // Load the properties file and check whether spark-submit will be running the app's driver
    // or just launching a cluster app. When running the driver, the JVM's argument will be
    // modified to cover the driver's configuration.
    Map<String, String> config = getEffectiveConfig();
    boolean isClientMode = isClientMode(config);
    String extraClassPath = isClientMode ? config.get(SparkLauncher.DRIVER_EXTRA_CLASSPATH) : null;

    List<String> cmd = buildJavaCommand(extraClassPath);
    // Take Thrift Server as daemon
    if (isThriftServer(mainClass)) {
      addOptionString(cmd, System.getenv("SPARK_DAEMON_JAVA_OPTS"));
    }
    addOptionString(cmd, System.getenv("SPARK_SUBMIT_OPTS"));
    addOptionString(cmd, System.getenv("SPARK_JAVA_OPTS"));

    if (isClientMode) {
      // Figuring out where the memory value come from is a little tricky due to precedence.
      // Precedence is observed in the following order:
      // - explicit configuration (setConf()), which also covers --driver-memory cli argument.
      // - properties file.
      // - SPARK_DRIVER_MEMORY env variable
      // - SPARK_MEM env variable
      // - default value (1g)
      // Take Thrift Server as daemon
      String tsMemory = isThriftServer(mainClass) ? System.getenv("SPARK_DAEMON_MEMORY") : null;
      String memory =
          firstNonEmpty(
              tsMemory,
              config.get(SparkLauncher.DRIVER_MEMORY),
              System.getenv("SPARK_DRIVER_MEMORY"),
              System.getenv("SPARK_MEM"),
              DEFAULT_MEM);
      cmd.add("-Xms" + memory);
      cmd.add("-Xmx" + memory);
      addOptionString(cmd, config.get(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS));
      mergeEnvPathList(
          env, getLibPathEnvName(), config.get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH));
    }

    addPermGenSizeOpt(cmd);
    cmd.add("org.apache.spark.deploy.SparkSubmit");
    cmd.addAll(buildSparkSubmitArgs());
    return cmd;
  }
예제 #4
0
  private void constructEnvVarArgs(Map<String, String> env, String submitArgsEnvVariable)
      throws IOException {
    mergeEnvPathList(
        env,
        getLibPathEnvName(),
        getEffectiveConfig().get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH));

    StringBuilder submitArgs = new StringBuilder();
    for (String arg : buildSparkSubmitArgs()) {
      if (submitArgs.length() > 0) {
        submitArgs.append(" ");
      }
      submitArgs.append(quoteForCommandString(arg));
    }
    env.put(submitArgsEnvVariable, submitArgs.toString());
  }
예제 #5
0
  private List<String> buildSparkRCommand(Map<String, String> env) throws IOException {
    if (!appArgs.isEmpty() && appArgs.get(0).endsWith(".R")) {
      System.err.println(
          "Running R applications through 'sparkR' is not supported as of Spark 2.0.\n"
              + "Use ./bin/spark-submit <R file>");
      System.exit(-1);
    }
    // When launching the SparkR shell, store the spark-submit arguments in the SPARKR_SUBMIT_ARGS
    // env variable.
    constructEnvVarArgs(env, "SPARKR_SUBMIT_ARGS");

    // Set shell.R as R_PROFILE_USER to load the SparkR package when the shell comes up.
    String sparkHome = System.getenv("SPARK_HOME");
    env.put(
        "R_PROFILE_USER",
        join(File.separator, sparkHome, "R", "lib", "SparkR", "profile", "shell.R"));

    List<String> args = new ArrayList<>();
    args.add(firstNonEmpty(System.getenv("SPARKR_DRIVER_R"), "R"));
    return args;
  }