@SuppressWarnings("unchecked") public static void main(String[] args) { final Cli.CliBuilder<Runnable> builder = Cli.builder("druid"); builder .withDescription("Druid command-line runner.") .withDefaultCommand(Help.class) .withCommands(Help.class, Version.class); builder .withGroup("server") .withDescription("Run one of the Druid server types.") .withDefaultCommand(Help.class) .withCommands( CliCoordinator.class, CliHistorical.class, CliBroker.class, CliRealtime.class, CliOverlord.class, CliMiddleManager.class, CliBridge.class, CliRouter.class); builder .withGroup("example") .withDescription("Run an example") .withDefaultCommand(Help.class) .withCommands(CliRealtimeExample.class); builder .withGroup("tools") .withDescription("Various tools for working with Druid") .withDefaultCommand(Help.class) .withCommands(ConvertProperties.class, DruidJsonValidator.class, PullDependencies.class); builder .withGroup("index") .withDescription("Run indexing for druid") .withDefaultCommand(Help.class) .withCommands(CliHadoopIndexer.class); builder .withGroup("internal") .withDescription( "Processes that Druid runs \"internally\", you should rarely use these directly") .withDefaultCommand(Help.class) .withCommands(CliPeon.class, CliInternalHadoopIndexer.class); final Injector injector = GuiceInjectors.makeStartupInjector(); final ExtensionsConfig config = injector.getInstance(ExtensionsConfig.class); final Collection<CliCommandCreator> extensionCommands = Initialization.getFromExtensions(config, CliCommandCreator.class); for (CliCommandCreator creator : extensionCommands) { creator.addCommands(builder); } final Cli<Runnable> cli = builder.build(); try { final Runnable command = cli.parse(args); if (!(command instanceof Help)) { // Hack to work around Help not liking being injected injector.injectMembers(command); } command.run(); } catch (ParseException e) { System.out.println("ERROR!!!!"); System.out.println(e.getMessage()); System.out.println("==="); cli.parse(new String[] {"help"}).run(); } }
@SuppressWarnings("unchecked") @Override public TaskStatus run(TaskToolbox toolbox) throws Exception { final List<String> finalHadoopDependencyCoordinates = hadoopDependencyCoordinates != null ? hadoopDependencyCoordinates : toolbox.getConfig().getDefaultHadoopCoordinates(); final DefaultTeslaAether aetherClient = Initialization.getAetherClient(extensionsConfig); final List<URL> extensionURLs = Lists.newArrayList(); for (String coordinate : extensionsConfig.getCoordinates()) { final ClassLoader coordinateLoader = Initialization.getClassLoaderForCoordinates(aetherClient, coordinate); extensionURLs.addAll(Arrays.asList(((URLClassLoader) coordinateLoader).getURLs())); } final List<URL> nonHadoopURLs = Lists.newArrayList(); nonHadoopURLs.addAll( Arrays.asList(((URLClassLoader) HadoopIndexTask.class.getClassLoader()).getURLs())); final List<URL> driverURLs = Lists.newArrayList(); driverURLs.addAll(nonHadoopURLs); // put hadoop dependencies last to avoid jets3t & apache.httpcore version conflicts for (String hadoopDependencyCoordinate : finalHadoopDependencyCoordinates) { final ClassLoader hadoopLoader = Initialization.getClassLoaderForCoordinates(aetherClient, hadoopDependencyCoordinate); driverURLs.addAll(Arrays.asList(((URLClassLoader) hadoopLoader).getURLs())); } final URLClassLoader loader = new URLClassLoader(driverURLs.toArray(new URL[driverURLs.size()]), null); Thread.currentThread().setContextClassLoader(loader); final List<URL> jobUrls = Lists.newArrayList(); jobUrls.addAll(nonHadoopURLs); jobUrls.addAll(extensionURLs); System.setProperty( "druid.hadoop.internal.classpath", Joiner.on(File.pathSeparator).join(jobUrls)); boolean determineIntervals = !spec.getDataSchema().getGranularitySpec().bucketIntervals().isPresent(); final Class<?> determineConfigurationMainClass = loader.loadClass(HadoopDetermineConfigInnerProcessing.class.getName()); final Method determineConfigurationMainMethod = determineConfigurationMainClass.getMethod("runTask", String[].class); String[] determineConfigArgs = new String[] { toolbox.getObjectMapper().writeValueAsString(spec), toolbox.getConfig().getHadoopWorkingPath(), toolbox.getSegmentPusher().getPathForHadoop(getDataSource()) }; String config = (String) determineConfigurationMainMethod.invoke(null, new Object[] {determineConfigArgs}); HadoopIngestionSpec indexerSchema = toolbox.getObjectMapper().readValue(config, HadoopIngestionSpec.class); // We should have a lock from before we started running only if interval was specified final String version; if (determineIntervals) { Interval interval = JodaUtils.umbrellaInterval( JodaUtils.condenseIntervals( indexerSchema.getDataSchema().getGranularitySpec().bucketIntervals().get())); TaskLock lock = toolbox.getTaskActionClient().submit(new LockAcquireAction(interval)); version = lock.getVersion(); } else { Iterable<TaskLock> locks = getTaskLocks(toolbox); final TaskLock myLock = Iterables.getOnlyElement(locks); version = myLock.getVersion(); } log.info("Setting version to: %s", version); final Class<?> indexGeneratorMainClass = loader.loadClass(HadoopIndexGeneratorInnerProcessing.class.getName()); final Method indexGeneratorMainMethod = indexGeneratorMainClass.getMethod("runTask", String[].class); String[] indexGeneratorArgs = new String[] {toolbox.getObjectMapper().writeValueAsString(indexerSchema), version}; String segments = (String) indexGeneratorMainMethod.invoke(null, new Object[] {indexGeneratorArgs}); if (segments != null) { List<DataSegment> publishedSegments = toolbox.getObjectMapper().readValue(segments, new TypeReference<List<DataSegment>>() {}); toolbox.pushSegments(publishedSegments); return TaskStatus.success(getId()); } else { return TaskStatus.failure(getId()); } }