public static synchronized JavaSparkContext getSparkContext(SparkPipelineOptions options) {
   SparkContextOptions contextOptions = options.as(SparkContextOptions.class);
   // reuse should be ignored if the context is provided.
   if (Boolean.getBoolean(TEST_REUSE_SPARK_CONTEXT)
       && !contextOptions.getUsesProvidedSparkContext()) {
     // if the context is null or stopped for some reason, re-create it.
     if (sparkContext == null || sparkContext.sc().isStopped()) {
       sparkContext = createSparkContext(contextOptions);
       sparkMaster = options.getSparkMaster();
     } else if (!options.getSparkMaster().equals(sparkMaster)) {
       throw new IllegalArgumentException(
           String.format(
               "Cannot reuse spark context "
                   + "with different spark master URL. Existing: %s, requested: %s.",
               sparkMaster, options.getSparkMaster()));
     }
     return sparkContext;
   } else {
     return createSparkContext(contextOptions);
   }
 }
 private static JavaSparkContext createSparkContext(SparkContextOptions contextOptions) {
   if (contextOptions.getUsesProvidedSparkContext()) {
     LOG.info("Using a provided Spark Context");
     JavaSparkContext jsc = contextOptions.getProvidedSparkContext();
     if (jsc == null || jsc.sc().isStopped()) {
       LOG.error("The provided Spark context " + jsc + " was not created or was stopped");
       throw new RuntimeException("The provided Spark context was not created or was stopped");
     }
     return jsc;
   } else {
     LOG.info("Creating a brand new Spark Context.");
     SparkConf conf = new SparkConf();
     if (!conf.contains("spark.master")) {
       // set master if not set.
       conf.setMaster(contextOptions.getSparkMaster());
     }
     conf.setAppName(contextOptions.getAppName());
     // register immutable collections serializers because the SDK uses them.
     conf.set("spark.kryo.registrator", BeamSparkRunnerRegistrator.class.getName());
     conf.set("spark.serializer", KryoSerializer.class.getName());
     return new JavaSparkContext(conf);
   }
 }