@Test public void emptyConfigurationVariablesOnlyForNonSparkProperties() { Properties intpProperty = repl.getProperty(); SparkConf sparkConf = repl.getSparkContext().getConf(); for (Object oKey : intpProperty.keySet()) { String key = (String) oKey; String value = (String) intpProperty.get(key); LOGGER.debug(String.format("[%s]: [%s]", key, value)); if (key.startsWith("spark.") && value.isEmpty()) { assertTrue( String.format("configuration starting from 'spark.' should not be empty. [%s]", key), !sparkConf.contains(key) || !sparkConf.get(key).isEmpty()); } } }
private static JavaSparkContext createSparkContext(SparkContextOptions contextOptions) { if (contextOptions.getUsesProvidedSparkContext()) { LOG.info("Using a provided Spark Context"); JavaSparkContext jsc = contextOptions.getProvidedSparkContext(); if (jsc == null || jsc.sc().isStopped()) { LOG.error("The provided Spark context " + jsc + " was not created or was stopped"); throw new RuntimeException("The provided Spark context was not created or was stopped"); } return jsc; } else { LOG.info("Creating a brand new Spark Context."); SparkConf conf = new SparkConf(); if (!conf.contains("spark.master")) { // set master if not set. conf.setMaster(contextOptions.getSparkMaster()); } conf.setAppName(contextOptions.getAppName()); // register immutable collections serializers because the SDK uses them. conf.set("spark.kryo.registrator", BeamSparkRunnerRegistrator.class.getName()); conf.set("spark.serializer", KryoSerializer.class.getName()); return new JavaSparkContext(conf); } }