/** * This is the starting point of the compiler. * * @param args Command line arguments to control the compiler */ public static void main(String[] args) { try { run(args); } catch (FileNotFoundException e) { System.err.println("FILE NOT FOUND: " + e.getLocalizedMessage()); System.exit(FILE_NOT_FOUND_ERROR); } catch (ParseException e) { System.err.println("PARSE ERROR: " + e.getLocalizedMessage()); System.exit(PARSE_ERROR); } catch (ShadowException e) { System.err.println("ERROR IN FILE: " + e.getLocalizedMessage()); e.printStackTrace(); System.exit(TYPE_CHECK_ERROR); } catch (IOException e) { System.err.println("FILE DEPENDENCY ERROR: " + e.getLocalizedMessage()); e.printStackTrace(); System.exit(TYPE_CHECK_ERROR); } catch (org.apache.commons.cli.ParseException e) { System.err.println("COMMAND LINE ERROR: " + e.getLocalizedMessage()); Arguments.printHelp(); System.exit(COMMAND_LINE_ERROR); } catch (ConfigurationException e) { System.err.println("CONFIGURATION ERROR: " + e.getLocalizedMessage()); Arguments.printHelp(); System.exit(CONFIGURATION_ERROR); } catch (TypeCheckException e) { System.err.println("TYPE CHECK ERROR: " + e.getLocalizedMessage()); System.exit(TYPE_CHECK_ERROR); } catch (CompileException e) { System.err.println("COMPILATION ERROR: " + e.getLocalizedMessage()); System.exit(COMPILE_ERROR); } }
/** * How this class is executed from the command line. It will create an instance of an {@link * org.pentaho.platform.plugin.services.importexport.ImportProcessor} and initialize it base on * the options provided on the command line. * * @param args */ public static void main(String[] args) throws Exception { try { // reset the exception information exception = null; final CommandLineProcessor commandLineProcessor = new CommandLineProcessor(args); String legacy = commandLineProcessor.getOptionValue( Messages.getInstance().getString("CommandLineProcessor.INFO_OPTION_LEGACY_KEY"), Messages.getInstance().getString("CommandLineProcessor.INFO_OPTION_LEGACY_NAME"), false, true); useRestService = "false".equals(legacy) ? false : true; // default to new REST version if not provided // new service only switch (commandLineProcessor.getRequestType()) { case HELP: printHelp(); break; case IMPORT: commandLineProcessor.performImport(); break; case EXPORT: commandLineProcessor.performExport(); break; case REST: commandLineProcessor.performREST(); break; } } catch (ParseException parseException) { exception = parseException; System.err.println(parseException.getLocalizedMessage()); printHelp(); } catch (Exception e) { exception = e; e.printStackTrace(); log.error(e.getMessage(), e); } }
/** * ************************************************************************* Main loop of the * command client * * @throws Exception ************************************************************************ */ private void mainLoop(String[] args) throws Exception { CommandLineParser parser = new BasicParser(); try { m_options = parser.parse(m_optionsDef, args); } catch (ParseException ex) { showHelp(); throw new RuntimeException("Invalid arguments: " + ex.getLocalizedMessage()); } start(); if (m_options.hasOption(OPT_EXEC)) { String exec = m_options.getOptionValue(OPT_EXEC); if (exec == null || exec.trim().isEmpty()) { showHelp(); throw new RuntimeException("Invalid value for --exec (-e) option: '" + exec + "'"); } executionLoop(exec); } else { interactiveLoop(); } }
public static void main(String[] arguments) { final CommandLineParser parser = new PosixParser(); final Options opts = new Options(); CommandLine cmd = null; Pipeline.addLogHelpAndInputOptions(opts); Pipeline.addTikaOptions(opts); Pipeline.addOutputOptions(opts); Pipeline.addJdbcResourceOptions( opts, DEFAULT_JDBC_DRIVER, DEFAULT_DB_PROVIDER, DEFAULT_DATABASE); // entity annotator options setup opts.addOption("Q", "query-file", true, "file with SQL SELECT queries"); OptionBuilder.withLongOpt("query"); OptionBuilder.withArgName("SELECT"); OptionBuilder.hasArgs(); OptionBuilder.withDescription("one or more SQL SELECT queries"); opts.addOption(OptionBuilder.create('q')); opts.addOption( "m", "entity-map", true, "name of the entity map file [" + DEFAULT_MAPPING_FILE + "]"); opts.addOption( "n", "namespace", true, "namespace of the entity annotations [" + DEFAULT_NAMESPACE + "]"); try { cmd = parser.parse(opts, arguments); } catch (final ParseException e) { System.err.println(e.getLocalizedMessage()); System.exit(1); // == exit == } final Logger l = Pipeline.loggingSetup(cmd, opts, "txtfnnl entities [options] <directory|files...>\n"); // output options XmiWriter.Builder writer = Pipeline.configureWriter(cmd, XmiWriter.configure(Pipeline.ensureOutputDirectory(cmd))); // DB resource ExternalResourceDescription jdbcResource = null; try { jdbcResource = Pipeline.getJdbcConnectionResource( cmd, l, DEFAULT_JDBC_DRIVER, DEFAULT_DB_PROVIDER, DEFAULT_DATABASE); } catch (final ClassNotFoundException e) { System.err.println("JDBC resoruce setup failed:"); System.err.println(e.toString()); System.exit(1); // == EXIT == } catch (ResourceInitializationException e) { System.err.println("JDBC resoruce setup failed:"); System.err.println(e.toString()); System.exit(1); // == EXIT == } /* BEGIN entity annotator */ final String queryFileName = cmd.getOptionValue('Q'); final String entityMapPath = cmd.getOptionValue('m', DEFAULT_MAPPING_FILE); final String namespace = cmd.getOptionValue('n', DEFAULT_NAMESPACE); String[] queries = cmd.getOptionValues('q'); File entityMap; // m if (queryFileName != null) { final File queryFile = new File(queryFileName); if (!queryFile.isFile() || !queryFile.canRead()) { System.err.print("cannot read query file "); System.err.println(queryFile); System.exit(1); // == EXIT == } String[] fileQueries = null; try { fileQueries = IOUtils.read(new FileInputStream(queryFile), Pipeline.inputEncoding(cmd)).split("\n"); } catch (final Exception e) { System.err.print("cannot read query file "); System.err.print(queryFile); System.err.print(":"); System.err.println(e.getLocalizedMessage()); System.exit(1); // == EXIT == } if (queries == null || queries.length == 0) { queries = fileQueries; } else { final String[] tmp = new String[queries.length + fileQueries.length]; System.arraycopy(queries, 0, tmp, 0, queries.length); System.arraycopy(fileQueries, 0, tmp, queries.length, fileQueries.length); queries = tmp; } } entityMap = new File(entityMapPath); if (!entityMap.isFile() || !entityMap.canRead()) { System.err.print("cannot read entity map file "); System.err.println(entityMapPath); System.exit(1); // == EXIT == } if (queries == null || queries.length == 0) { queries = DEFAULT_SQL_QUERIES; } /* END entity annotator */ try { final Pipeline pipeline = new Pipeline(2); // tika and known entity annotator KnownEntityAnnotator.Builder builder = KnownEntityAnnotator.configure(namespace, queries, entityMap, jdbcResource); pipeline.setReader(cmd); pipeline.configureTika(cmd); pipeline.set(1, Pipeline.multiviewEngine(builder.create())); pipeline.setConsumer(Pipeline.textEngine(writer.create())); pipeline.run(); pipeline.destroy(); } catch (final UIMAException e) { l.severe(e.toString()); System.err.println(e.getLocalizedMessage()); System.exit(1); // == EXIT == } catch (final IOException e) { l.severe(e.toString()); System.err.println(e.getLocalizedMessage()); System.exit(1); // == EXIT == } System.exit(0); }
public static void main(String[] args) { CommandLineParser parser = new GnuParser(); CommandLine line = null; String dbType = null; String schemaVer = null; Options cmdLineOptions = new Options(); // Argument handling initOptions(cmdLineOptions); try { line = parser.parse(cmdLineOptions, args); } catch (ParseException e) { System.err.println("HiveSchemaTool:Parsing failed. Reason: " + e.getLocalizedMessage()); printAndExit(cmdLineOptions); } if (line.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("schemaTool", cmdLineOptions); return; } if (line.hasOption("dbType")) { dbType = line.getOptionValue("dbType"); if ((!dbType.equalsIgnoreCase(HiveSchemaHelper.DB_DERBY) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_MSSQL) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_MYSQL) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_POSTGRACE) && !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_ORACLE))) { System.err.println("Unsupported dbType " + dbType); printAndExit(cmdLineOptions); } } else { System.err.println("no dbType supplied"); printAndExit(cmdLineOptions); } System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.varname, "true"); try { HiveSchemaTool schemaTool = new HiveSchemaTool(dbType); if (line.hasOption("userName")) { schemaTool.setUserName(line.getOptionValue("userName")); } if (line.hasOption("passWord")) { schemaTool.setPassWord(line.getOptionValue("passWord")); } if (line.hasOption("dryRun")) { schemaTool.setDryRun(true); } if (line.hasOption("verbose")) { schemaTool.setVerbose(true); } if (line.hasOption("dbOpts")) { schemaTool.setDbOpts(line.getOptionValue("dbOpts")); } if (line.hasOption("info")) { schemaTool.showInfo(); } else if (line.hasOption("upgradeSchema")) { schemaTool.doUpgrade(); } else if (line.hasOption("upgradeSchemaFrom")) { schemaVer = line.getOptionValue("upgradeSchemaFrom"); schemaTool.doUpgrade(schemaVer); } else if (line.hasOption("initSchema")) { schemaTool.doInit(); } else if (line.hasOption("initSchemaTo")) { schemaVer = line.getOptionValue("initSchemaTo"); schemaTool.doInit(schemaVer); } else { System.err.println("no valid option supplied"); printAndExit(cmdLineOptions); } } catch (HiveMetaException e) { System.err.println(e); if (line.hasOption("verbose")) { e.printStackTrace(); } System.err.println("*** schemaTool failed ***"); System.exit(1); } System.out.println("schemaTool completed"); }
public void run(String[] args) throws Exception { try { Parser parser = new GnuParser(); Options options = new Options(); buildOptions(options); CommandLine commandLine = parser.parse(options, args); String[] remainingArgs = commandLine.getArgs(); if (remainingArgs.length < 2) { printUsage(); System.exit(-1); } FederatedTransitDataBundleCreator creator = new FederatedTransitDataBundleCreator(); Map<String, BeanDefinition> beans = new HashMap<String, BeanDefinition>(); creator.setContextBeans(beans); List<GtfsBundle> gtfsBundles = new ArrayList<GtfsBundle>(); List<String> contextPaths = new ArrayList<String>(); for (int i = 0; i < remainingArgs.length - 1; i++) { File path = new File(remainingArgs[i]); if (path.isDirectory() || path.getName().endsWith(".zip")) { GtfsBundle gtfsBundle = new GtfsBundle(); gtfsBundle.setPath(path); gtfsBundles.add(gtfsBundle); } else { contextPaths.add("file:" + path); } } if (!gtfsBundles.isEmpty()) { BeanDefinitionBuilder bean = BeanDefinitionBuilder.genericBeanDefinition(GtfsBundles.class); bean.addPropertyValue("bundles", gtfsBundles); beans.put("gtfs-bundles", bean.getBeanDefinition()); } if (commandLine.hasOption(ARG_USE_DATABASE_FOR_GTFS)) { contextPaths.add("classpath:org/onebusaway/gtfs/application-context.xml"); } else { BeanDefinitionBuilder bean = BeanDefinitionBuilder.genericBeanDefinition(GtfsRelationalDaoImpl.class); beans.put("gtfsRelationalDaoImpl", bean.getBeanDefinition()); } if (commandLine.hasOption(ARG_DATASOURCE_URL)) { String dataSourceUrl = commandLine.getOptionValue(ARG_DATASOURCE_URL); BeanDefinitionBuilder bean = BeanDefinitionBuilder.genericBeanDefinition(DriverManagerDataSource.class); bean.addPropertyValue("url", dataSourceUrl); if (commandLine.hasOption(ARG_DATASOURCE_DRIVER_CLASS_NAME)) bean.addPropertyValue( "driverClassName", commandLine.getOptionValue(ARG_DATASOURCE_DRIVER_CLASS_NAME)); if (commandLine.hasOption(ARG_DATASOURCE_USERNAME)) bean.addPropertyValue("username", commandLine.getOptionValue(ARG_DATASOURCE_USERNAME)); if (commandLine.hasOption(ARG_DATASOURCE_PASSWORD)) bean.addPropertyValue("password", commandLine.getOptionValue(ARG_DATASOURCE_PASSWORD)); beans.put("dataSource", bean.getBeanDefinition()); } if (commandLine.hasOption(ARG_OSM)) { File osmPath = new File(commandLine.getOptionValue(ARG_OSM)); BeanDefinitionBuilder bean = BeanDefinitionBuilder.genericBeanDefinition(FileBasedOpenStreetMapProviderImpl.class); bean.addPropertyValue("path", osmPath); beans.put("osmProvider", bean.getBeanDefinition()); } File outputPath = new File(remainingArgs[remainingArgs.length - 1]); if (commandLine.hasOption(ARG_ONLY_IF_DNE) && outputPath.exists()) { System.err.println("Bundle path already exists. Exiting..."); System.exit(0); } if (commandLine.hasOption(ARG_RANDOMIZE_CACHE_DIR)) creator.setRandomizeCacheDir(true); if (commandLine.hasOption(ARG_BUNDLE_KEY)) { String key = commandLine.getOptionValue(ARG_BUNDLE_KEY); creator.setBundleKey(key); } /** * Optionally override any system properties (ok this duplicates existing functionality, yes, * but it allows for -D arguments after the main class) */ if (commandLine.hasOption("D")) { Properties props = commandLine.getOptionProperties("D"); for (Object key : props.keySet()) { String propName = (String) key; String propValue = props.getProperty(propName); System.setProperty(propName, propValue); } } /** * Optionally override any system properties (ok this duplicates existing functionality, yes, * but it allows for -D arguments after the main class) */ if (commandLine.hasOption("P")) { Properties props = commandLine.getOptionProperties("P"); creator.setAdditionalBeanPropertyOverrides(props); } setStagesToSkip(commandLine, creator); creator.setOutputPath(outputPath); creator.setContextPaths(contextPaths); try { if (commandLine.hasOption(ARG_ADDITIONAL_RESOURCES_DIRECTORY)) { File additionalResourceDirectory = new File(commandLine.getOptionValue(ARG_ADDITIONAL_RESOURCES_DIRECTORY)); copyFiles(additionalResourceDirectory, outputPath); } creator.run(); } catch (Exception ex) { _log.error("error building transit data bundle", ex); System.exit(-1); } } catch (ParseException ex) { System.err.println(ex.getLocalizedMessage()); printUsage(); System.exit(-1); } System.exit(0); }
/** Hadoop {@link Tool} implementation */ @Override public int run(String[] args) throws Exception { Options options = new Options(); configureOptions(options); CommandLineParser parser = new GnuParser(); try { CommandLine commandLine = parser.parse(options, args); if (commandLine.hasOption(VERBOSE)) { Logger.getGlobal().setLevel(Level.FINEST); } if (commandLine.hasOption(QUIET)) { Logger.getGlobal().setLevel(Level.OFF); } String transformationLocation = commandLine.getOptionValue(TRANSFORMATION); String sourcemmLocation = commandLine.getOptionValue(SOURCE_PACKAGE); String targetmmLocation = commandLine.getOptionValue(TARGET_PACKAGE); String recordsLocation = commandLine.getOptionValue(RECORDS_FILE); String inputLocation = commandLine.getOptionValue(INPUT_MODEL); String outputLocation = commandLine.getOptionValue( OUTPUT_MODEL, new Path(inputLocation).suffix(".out.xmi").toString()); int recommendedMappers = 1; if (commandLine.hasOption(RECOMMENDED_MAPPERS)) { recommendedMappers = ((Number) commandLine.getParsedOptionValue(RECOMMENDED_MAPPERS)).intValue(); } Configuration conf = this.getConf(); Job job = Job.getInstance(conf, JOB_NAME); // Configure classes job.setJarByClass(ATLMRMaster.class); job.setMapperClass(ATLMRMapper.class); job.setReducerClass(ATLMRReducer.class); job.setInputFormatClass(NLineInputFormat.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setMapOutputKeyClass(LongWritable.class); job.setMapOutputValueClass(Text.class); job.setNumReduceTasks(1); // Configure MapReduce input/outputs Path recordsPath = new Path(recordsLocation); FileInputFormat.setInputPaths(job, recordsPath); String timestamp = new SimpleDateFormat("yyyyMMddhhmm").format(new Date()); String outDirName = "atlmr-out-" + timestamp + "-" + UUID.randomUUID(); FileOutputFormat.setOutputPath( job, new Path(job.getWorkingDirectory().suffix(Path.SEPARATOR + outDirName).toUri())); // Configure records per map FileSystem fileSystem = FileSystem.get(recordsPath.toUri(), conf); InputStream inputStream = fileSystem.open(recordsPath); long linesPerMap = (long) Math.ceil((double) countLines(inputStream) / (double) recommendedMappers); job.getConfiguration().setLong(NLineInputFormat.LINES_PER_MAP, linesPerMap); // Configure ATL related inputs/outputs job.getConfiguration().set(TRANSFORMATION, transformationLocation); job.getConfiguration().set(SOURCE_PACKAGE, sourcemmLocation); job.getConfiguration().set(TARGET_PACKAGE, targetmmLocation); job.getConfiguration().set(INPUT_MODEL, inputLocation); job.getConfiguration().set(OUTPUT_MODEL, outputLocation); Logger.getGlobal().log(Level.INFO, "Starting Job execution"); long begin = System.currentTimeMillis(); int returnValue = job.waitForCompletion(true) ? STATUS_OK : STATUS_ERROR; long end = System.currentTimeMillis(); Logger.getGlobal() .log( Level.INFO, MessageFormat.format( "Job execution ended in {0}s with status code {1}", (end - begin) / 1000, returnValue)); return returnValue; } catch (ParseException e) { System.err.println(e.getLocalizedMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.setOptionComparator(new OptionComarator<>()); try { formatter.setWidth(Math.max(Terminal.getTerminal().getTerminalWidth(), 80)); } catch (Throwable t) { // Nothing to do... } ; formatter.printHelp("yarn jar <this-file.jar>", options, true); return STATUS_ERROR; } }
@Override public void run(String[] args) throws Exception { CommandLineParser parser = new GnuParser(); CommandLine line = null; String dbType = null; String schemaVer = null; Options cmdLineOptions = new Options(); String configFileName = null; // Argument handling initOptions(cmdLineOptions); try { line = parser.parse(cmdLineOptions, args); } catch (ParseException e) { System.err.println("SentrySchemaTool:Parsing failed. Reason: " + e.getLocalizedMessage()); printAndExit(cmdLineOptions); } if (line.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("schemaTool", cmdLineOptions); return; } if (line.hasOption("dbType")) { dbType = line.getOptionValue("dbType"); if ((!dbType.equalsIgnoreCase(SentrySchemaHelper.DB_DERBY) && !dbType.equalsIgnoreCase(SentrySchemaHelper.DB_MYSQL) && !dbType.equalsIgnoreCase(SentrySchemaHelper.DB_POSTGRACE) && !dbType.equalsIgnoreCase(SentrySchemaHelper.DB_ORACLE) && !dbType.equalsIgnoreCase(SentrySchemaHelper.DB_DB2))) { System.err.println("Unsupported dbType " + dbType); printAndExit(cmdLineOptions); } } else { System.err.println("no dbType supplied"); printAndExit(cmdLineOptions); } if (line.hasOption(ServiceConstants.ServiceArgs.CONFIG_FILE_LONG)) { configFileName = line.getOptionValue(ServiceConstants.ServiceArgs.CONFIG_FILE_LONG); } else { System.err.println("no config file specified"); printAndExit(cmdLineOptions); } try { SentrySchemaTool schemaTool = new SentrySchemaTool(SentryService.loadConfig(configFileName), dbType); if (line.hasOption("userName")) { schemaTool.setUserName(line.getOptionValue("userName")); } if (line.hasOption("passWord")) { schemaTool.setPassWord(line.getOptionValue("passWord")); } if (line.hasOption("dryRun")) { schemaTool.setDryRun(true); } if (line.hasOption("verbose")) { schemaTool.setVerbose(true); } if (line.hasOption("dbOpts")) { schemaTool.setDbOpts(line.getOptionValue("dbOpts")); } if (line.hasOption("info")) { schemaTool.showInfo(); } else if (line.hasOption("upgradeSchema")) { schemaTool.doUpgrade(); } else if (line.hasOption("upgradeSchemaFrom")) { schemaVer = line.getOptionValue("upgradeSchemaFrom"); schemaTool.doUpgrade(schemaVer); } else if (line.hasOption("initSchema")) { schemaTool.doInit(); } else if (line.hasOption("initSchemaTo")) { schemaVer = line.getOptionValue("initSchemaTo"); schemaTool.doInit(schemaVer); } else { System.err.println("no valid option supplied"); printAndExit(cmdLineOptions); } } catch (SentryUserException e) { System.err.println(e); if (line.hasOption("verbose")) { e.printStackTrace(); } System.err.println("*** Sentry schemaTool failed ***"); System.exit(1); } catch (MalformedURLException e) { System.err.println(e); if (line.hasOption("verbose")) { e.printStackTrace(); } System.err.println("*** Sentry schemaTool failed ***"); System.exit(1); } System.out.println("Sentry schemaTool completed"); }