public void deployVerticle( String verticleName, DeploymentOptions options, Handler<AsyncResult<String>> completionHandler) { ContextImpl currentContext = vertx.getOrCreateContext(); ClassLoader cl = getClassLoader(options.getIsolationGroup()); int pos = verticleName.indexOf(':'); if (pos == -1) { throw new IllegalArgumentException("verticleName must start with prefix"); } String prefix = verticleName.substring(0, pos); if (pos + 1 >= verticleName.length()) { throw new IllegalArgumentException("Invalid name: " + verticleName); } String actualName = verticleName.substring(pos + 1); VerticleFactory verticleFactory = verticleFactories.get(prefix); if (verticleFactory == null) { // Use default Java verticle factory verticleFactory = DEFAULT_VERTICLE_FACTORY; } try { Verticle verticle = verticleFactory.createVerticle(actualName, cl); if (verticle == null) { reportFailure( new NullPointerException("VerticleFactory::createVerticle returned null"), currentContext, completionHandler); } else { doDeploy(verticle, options, currentContext, completionHandler); } } catch (Exception e) { reportFailure(e, currentContext, completionHandler); } }
public static void main(String... args) throws FileNotFoundException { DeploymentOptions deploymentOptions = new DeploymentOptions().setInstances(1); deploymentOptions.setConfig(new JsonObject().put("http.port", 8082)); Vertx vertx = Vertx.vertx(); vertx.deployVerticle( "com.stepan.verticles.ClientVerticle", deploymentOptions, event -> { System.out.println("Client Verticle successfully deployed: " + event.succeeded()); }); vertx.deployVerticle( "com.stepan.verticles.ServerVerticle", deploymentOptions, event -> { System.out.println("Server Verticle successfully deployed: " + event.succeeded()); }); }
@Before public void runEnvironment(TestContext context) { PropertiesReader propertiesReader = new PropertiesReader(); propertiesReader.read(); DeploymentOptions dopts = new DeploymentOptions(); dopts.setConfig(propertiesReader.getAsJson()); PotatoVerticle potatoVerticle = new PotatoVerticle(); rule.vertx().deployVerticle(potatoVerticle, dopts, context.asyncAssertSuccess()); Guice.createInjector( new GuiceModuleForTests(propertiesReader.getAsProperties()), new DbModule(), new GuiceModule(rule.vertx())) .injectMembers(this); // Clear DB before running tests dBCleaner.apply(); }
// Add some information on a deployment in the cluster so other nodes know about it private void addToHA( String deploymentID, String verticleName, DeploymentOptions deploymentOptions) { String encoded; synchronized (haInfo) { JsonObject verticleConf = new JsonObject().put("dep_id", deploymentID); verticleConf.put("verticle_name", verticleName); verticleConf.put("options", deploymentOptions.toJson()); JsonArray haMods = haInfo.getJsonArray("verticles"); haMods.add(verticleConf); encoded = haInfo.encode(); clusterMap.put(nodeID, encoded); } }
private void doDeploy( Verticle verticle, DeploymentOptions options, ContextImpl currentContext, Handler<AsyncResult<String>> completionHandler) { if (options.isMultiThreaded() && !options.isWorker()) { throw new IllegalArgumentException("If multi-threaded then must be worker too"); } ContextImpl context = options.isWorker() ? vertx.createWorkerContext(options.isMultiThreaded()) : vertx.createEventLoopContext(); String deploymentID = UUID.randomUUID().toString(); DeploymentImpl deployment = new DeploymentImpl(deploymentID, context, verticle); context.setDeployment(deployment); Deployment parent = currentContext.getDeployment(); if (parent != null) { parent.addChild(deployment); } JsonObject conf = options.getConfig() == null ? null : options.getConfig().copy(); // Copy it context.runOnContext( v -> { try { verticle.setVertx(vertx); verticle.setConfig(conf); verticle.setDeploymentID(deploymentID); Future<Void> startFuture = new FutureResultImpl<>(); verticle.start(startFuture); startFuture.setHandler( ar -> { if (ar.succeeded()) { deployments.put(deploymentID, deployment); reportSuccess(deploymentID, currentContext, completionHandler); } else { reportFailure(ar.cause(), currentContext, completionHandler); } }); } catch (Throwable t) { reportFailure(t, currentContext, completionHandler); } }); }
/** Iterate and deploy verticles */ private void deployVerticle(final Message<JsonObject> event) { // iterate over all candidates to be deployed Set<String> candidates = this.workingCopy.fieldNames(); // detach from underlying json Map<String, JsonObject> initiants = new HashMap<>(); candidates.forEach( id -> { JsonObject info = this.workingCopy.getJsonObject(id); JsonArray dependsOn = info.getJsonArray("dependsOn"); if (dependsOn != null && deployed.getList().containsAll(dependsOn.getList()) || dependsOn == null || dependsOn.isEmpty()) { initiants.put(id, info); } }); // remove the initiants initiants.keySet().forEach(id -> this.workingCopy.remove(id)); // setup latch for the reply CountDownLatch latch = new CountDownLatch(initiants.size()); if (initiants.isEmpty()) { event.reply(Boolean.TRUE); return; } // run over all dependencies initiants.forEach( (id, info) -> { // get the name of the verticle String name = info.getString("name"); final JsonObject localConfig = new JsonObject(); localConfig.mergeIn(globalConfig); localConfig.mergeIn(info.getJsonObject("config", new JsonObject())); Handler<AsyncResult<String>> handler = innerEvent -> { if (innerEvent.succeeded()) { // add service to deployed-list deployed.add(id); // re-emit vertx .eventBus() .send( LOOPBACK, workingCopy, (AsyncResult<Message<Boolean>> recursiveReply) -> { // always decrease latch latch.countDown(); if (recursiveReply.succeeded() && recursiveReply.result().body()) { if (latch.getCount() == 0) { event.reply(recursiveReply.result().body() & Boolean.TRUE); } } else { event.fail(500, this.getFailure(id, recursiveReply)); } }); } else { event.fail(500, id + " >> " + innerEvent.cause().getMessage()); } }; LOG.log(Level.INFO, "Deploying: ''{0}''", new Object[] {id}); DeploymentOptions deploymentOptions = new DeploymentOptions(info); vertx.deployVerticle(name, deploymentOptions.setConfig(localConfig), handler); }); }