@Override public Response uninstallCluster(final String clusterName) { Preconditions.checkNotNull(clusterName); if (hiveManager.getCluster(clusterName) == null) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(clusterName + " cluster not found.") .build(); } UUID uuid = hiveManager.uninstallCluster(clusterName); OperationState state = waitUntilOperationFinish(uuid); return createResponse(uuid, state); }
@Override public Response installCluster( final String clusterName, final String hadoopClusterName, final String server, final String namenode, final String clients) { Preconditions.checkNotNull(clusterName); Preconditions.checkNotNull(hadoopClusterName); Preconditions.checkNotNull(server); Preconditions.checkNotNull(clients); Set<String> uuidSet = Sets.newHashSet(); HiveConfig config = new HiveConfig(); config.setClusterName(validateInput(clusterName, true)); config.setHadoopClusterName(hadoopClusterName); config.setServer(server); config.setNamenode(namenode); List<String> hosts = JsonUtil.fromJson(clients, new TypeToken<List<String>>() {}.getType()); for (String node : hosts) { uuidSet.add(node); } config.setClients(uuidSet); UUID uuid = hiveManager.installCluster(config); OperationState state = waitUntilOperationFinish(uuid); return createResponse(uuid, state); }
@Override public Response getAvailableNodes(final String clusterName) { Set<String> hostsName = Sets.newHashSet(); HiveConfig hiveConfig = hiveManager.getCluster(clusterName); HadoopClusterConfig hadoopConfig = hadoopManager.getCluster(hiveConfig.getHadoopClusterName()); Set<String> nodes = new HashSet<>(hadoopConfig.getAllNodes()); nodes.removeAll(hiveConfig.getAllNodes()); if (!nodes.isEmpty()) { Set<EnvironmentContainerHost> hosts; try { hosts = environmentManager .loadEnvironment(hadoopConfig.getEnvironmentId()) .getContainerHostsByIds(nodes); for (final EnvironmentContainerHost host : hosts) { hostsName.add(host.getHostname()); } } catch (ContainerHostNotFoundException | EnvironmentNotFoundException e) { e.printStackTrace(); } } else { LOG.info("All nodes in corresponding Hadoop cluster have Nutch installed"); // return Response.status( Response.Status.NOT_FOUND ).build(); } String hosts = JsonUtil.GSON.toJson(hostsName); return Response.status(Response.Status.OK).entity(hosts).build(); }
@Override public Response getCluster(final String clusterName) { HiveConfig config = hiveManager.getCluster(clusterName); if (config == null) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(clusterName + "cluster not found") .build(); } String cluster = JsonUtil.GSON.toJson(parsePojo(config)); return Response.status(Response.Status.OK).entity(cluster).build(); }
@Override public Response getClusters() { List<HiveConfig> configs = hiveManager.getClusters(); ArrayList<String> clusterNames = Lists.newArrayList(); for (HiveConfig config : configs) { clusterNames.add(config.getClusterName()); } String clusters = JsonUtil.GSON.toJson(clusterNames); return Response.status(Response.Status.OK).entity(clusters).build(); }
private HivePojo parsePojo(HiveConfig config) { HivePojo pojo = new HivePojo(); try { Environment env = environmentManager.loadEnvironment(config.getEnvironmentId()); pojo.setClusterName(config.getClusterName()); pojo.setEnvironmentId(config.getEnvironmentId()); pojo.setHadoopClusterName(config.getHadoopClusterName()); pojo.setServer(new NodePojo(config.getServer(), env)); UUID uuid = hiveManager.statusCheck(config.getClusterName(), pojo.getServer().getHostname()); pojo.getServer().setStatus(checkStatus(tracker, uuid)); Set<NodePojo> clients = new HashSet<>(); for (String slave : config.getClients()) { clients.add(new NodePojo(slave, env)); } pojo.setClients(clients); } catch (EnvironmentNotFoundException e) { e.printStackTrace(); } return pojo; }
@Override public Response getAngularConfig() { return Response.ok(hiveManager.getWebModule().getAngularDependecyList()).build(); }