/** * Parameter {@code uploadedJarLocation} is actually used to point to the local jar, because Flink * does not support uploading a jar file before hand. Jar files are always uploaded directly when * a program is submitted. */ public void submitTopologyWithOpts( final String name, final String uploadedJarLocation, final FlinkTopology topology) throws AlreadyAliveException, InvalidTopologyException { if (this.getTopologyJobId(name) != null) { throw new AlreadyAliveException(); } final URI uploadedJarUri; final URL uploadedJarUrl; try { uploadedJarUri = new File(uploadedJarLocation).getAbsoluteFile().toURI(); uploadedJarUrl = uploadedJarUri.toURL(); JobWithJars.checkJarFile(uploadedJarUrl); } catch (final IOException e) { throw new RuntimeException("Problem with jar file " + uploadedJarLocation, e); } try { FlinkClient.addStormConfigToTopology(topology, conf); } catch (ClassNotFoundException e) { LOG.error("Could not register class for Kryo serialization.", e); throw new InvalidTopologyException("Could not register class for Kryo serialization."); } final StreamGraph streamGraph = topology.getExecutionEnvironment().getStreamGraph(); streamGraph.setJobName(name); final JobGraph jobGraph = streamGraph.getJobGraph(); jobGraph.addJar(new Path(uploadedJarUri)); final Configuration configuration = jobGraph.getJobConfiguration(); configuration.setString(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY, jobManagerHost); configuration.setInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, jobManagerPort); final Client client; try { client = new Client(configuration); } catch (IOException e) { throw new RuntimeException("Could not establish a connection to the job manager", e); } try { ClassLoader classLoader = JobWithJars.buildUserCodeClassLoader( Lists.newArrayList(uploadedJarUrl), Collections.<URL>emptyList(), this.getClass().getClassLoader()); client.runDetached(jobGraph, classLoader); } catch (final ProgramInvocationException e) { throw new RuntimeException("Cannot execute job due to ProgramInvocationException", e); } }
protected void internalNonBlockingStart() throws IOException { Plan plan = env.createProgramPlan(); Optimizer optimizer = new Optimizer(new DataStatistics(), new org.apache.flink.configuration.Configuration()); OptimizedPlan optimizedPlan = optimizer.compile(plan); final JobGraph jobGraph = new JobGraphGenerator().compileJobGraph(optimizedPlan); for (String jarPath : classPath) { jobGraph.addJar(new Path(jarPath)); } jobID = jobGraph.getJobID(); accumulatorCache.setJobID(jobID); if (isLocalExecution()) { flowStep.logInfo("Executing in local mode."); startLocalCluster(); org.apache.flink.configuration.Configuration config = new org.apache.flink.configuration.Configuration(); config.setString(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY, localCluster.hostname()); client = new Client(config); client.setPrintStatusDuringExecution(env.getConfig().isSysoutLoggingEnabled()); } else { flowStep.logInfo("Executing in cluster mode."); try { String path = this.getClass().getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); jobGraph.addJar(new Path(path)); classPath.add(path); } catch (URISyntaxException e) { throw new IOException("Could not add the submission JAR as a dependency."); } client = ((ContextEnvironment) env).getClient(); } List<URL> fileList = new ArrayList<URL>(classPath.size()); for (String path : classPath) { URL url; try { url = new URL(path); } catch (MalformedURLException e) { url = new URL("file://" + path); } fileList.add(url); } final ClassLoader loader = JobWithJars.buildUserCodeClassLoader( fileList, Collections.<URL>emptyList(), getClass().getClassLoader()); accumulatorCache.setClient(client); final Callable<JobSubmissionResult> callable = new Callable<JobSubmissionResult>() { @Override public JobSubmissionResult call() throws Exception { return client.runBlocking(jobGraph, loader); } }; jobSubmission = executorService.submit(callable); flowStep.logInfo("submitted Flink job: " + jobID); }