@After public void baseTeardown() throws Exception { for (final HeliosClient client : clients) { client.close(); } clients.clear(); for (Service service : services) { try { service.stopAsync(); } catch (Exception e) { log.error("Uncaught exception", e); } } for (Service service : services) { try { service.awaitTerminated(); } catch (Exception e) { log.error("Service failed", e); } } services.clear(); // Clean up docker try (final DockerClient dockerClient = getNewDockerClient()) { final List<Container> containers = dockerClient.listContainers(); for (final Container container : containers) { for (final String name : container.names()) { if (name.contains(testTag)) { try { dockerClient.killContainer(container.id()); } catch (DockerException e) { e.printStackTrace(); } break; } } } } catch (Exception e) { log.error("Docker client exception", e); } if (zk != null) { zk.close(); } listThreads(); }
public static void awaitTermination(Service service, long timeout) { long begin = System.currentTimeMillis(); while (true) { State state = service.state(); if (state == State.TERMINATED) { break; } if (state == State.FAILED) { throw new IllegalStateException("Service '" + service + "' execution unexpectedly failed"); } TimeUtils.sleepMillis(500); long now = System.currentTimeMillis(); long diff = now - begin; if (diff > timeout) { throw new RuntimeException( String.format("Waiting for service %s is failed. Timeout %d", service, diff)); } } }
@Override public ProgramController run(Program program, ProgramOptions options) { // Extract and verify parameters final ApplicationSpecification appSpec = program.getApplicationSpecification(); Preconditions.checkNotNull(appSpec, "Missing application specification."); ProgramType processorType = program.getType(); Preconditions.checkNotNull(processorType, "Missing processor type."); Preconditions.checkArgument( processorType == ProgramType.SPARK, "Only Spark process type is supported."); final SparkSpecification spec = appSpec.getSpark().get(program.getName()); Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getName()); // Optionally get runId. If the spark started by other program (e.g. Workflow), it inherit the // runId. Arguments arguments = options.getArguments(); RunId runId = RunIds.fromString(arguments.getOption(ProgramOptionConstants.RUN_ID)); long logicalStartTime = arguments.hasOption(ProgramOptionConstants.LOGICAL_START_TIME) ? Long.parseLong(arguments.getOption(ProgramOptionConstants.LOGICAL_START_TIME)) : System.currentTimeMillis(); WorkflowToken workflowToken = null; if (arguments.hasOption(ProgramOptionConstants.WORKFLOW_TOKEN)) { workflowToken = GSON.fromJson( arguments.getOption(ProgramOptionConstants.WORKFLOW_TOKEN), BasicWorkflowToken.class); } ClientSparkContext context = new ClientSparkContext( program, runId, logicalStartTime, options.getUserArguments().asMap(), new TransactionContext(txSystemClient), datasetFramework, discoveryServiceClient, metricsCollectionService, workflowToken); Spark spark; try { spark = new InstantiatorFactory(false).get(TypeToken.of(program.<Spark>getMainClass())).create(); // Fields injection Reflections.visit( spark, TypeToken.of(spark.getClass()), new PropertyFieldSetter(spec.getProperties()), new DataSetFieldSetter(context), new MetricsFieldSetter(context.getMetrics())); } catch (Exception e) { LOG.error("Failed to instantiate Spark class for {}", spec.getClassName(), e); throw Throwables.propagate(e); } Service sparkRuntimeService = new SparkRuntimeService( cConf, hConf, spark, new SparkContextFactory(hConf, context, datasetFramework, streamAdmin), program.getJarLocation(), txSystemClient); sparkRuntimeService.addListener( createRuntimeServiceListener(program.getId(), runId, arguments), Threads.SAME_THREAD_EXECUTOR); ProgramController controller = new SparkProgramController(sparkRuntimeService, context); LOG.info("Starting Spark Job: {}", context.toString()); sparkRuntimeService.start(); return controller; }
@Override public void dispose() { super.dispose(); service.stopAsync(); }
@Override protected void doStop() { // Stops the decorated service decoratedService.stop(); callbackExecutor.shutdownNow(); }