private static JSONObject runCmd(List<String> cmd) throws Exception { ProcessBuilder builder = new ProcessBuilder(cmd); StopWatch stopWatch = new StopWatch(); stopWatch.start(); Process process = builder.start(); int exitCode = process.waitFor(); stopWatch.stop(); String stdout = streamToString(process.getInputStream()); String stderr = streamToString(process.getErrorStream()); log.warn(String.format("stdout:%n%s", stdout)); log.warn(String.format("stderr:%n%s", stderr)); String message = createOutputMessage(stdout, stderr); if (exitCode == 0) { log.info( String.format( "Command succeeded in %dms: %s", stopWatch.getTime(), StringUtils.join(cmd, " "))); } else { log.warn( String.format( "Command failed with code=%d in %dms: %s", exitCode, stopWatch.getTime(), StringUtils.join(cmd, " "))); log.warn(String.format("stdout:\n%s", stdout)); log.warn(String.format("stderr:\n%s", stderr)); } JSONObject obj = new JSONObject(); obj.put("message", message); return obj; }
@Test public void testTimeFor_FiftyThousand() { Choir testChoir = new Choir(); Address choirAddress = new Address(); choirAddress.setCity("Omaha"); choirAddress.setState("NE"); testChoir.setChoirName("Omaha Children's Choir"); testChoir.setAddress(choirAddress); StopWatch timer = new StopWatch(); String result = null; timer.start(); for (int i = 0; i < 50000; i++) { result = TestUtility.post(timeTestBase + "names", testChoir.toJson(false)); } timer.stop(); assertNotNull(result); say("\n\nTime taken = " + timer.getTime()); say("Time taken (nano) = " + timer.getNanoTime()); }
/** * @param cmd * @param dir * @param out * @param err * @param env * @return */ private CommandResult exec( String[] pCmd, File dir, StringWriter pOut, StringWriter pErr, String[] env) throws IOException, InterruptedException { int out = 0; String pCmdString = ArrayUtils.toString(pCmd); if (_log.isDebugEnabled()) _log.debug( "Executing '" + pCmdString + "' with Environment '" + ArrayUtils.toString(env) + "'"); StopWatch clock = new StopWatch(); clock.start(); try { process = Runtime.getRuntime().exec(pCmd, env, dir); out = handleProcess(process, pCmdString, pOut, pErr, _outputList, sig_interrupt); } finally { this.cleanUpProcess(); clock.stop(); if (_log.isInfoEnabled()) _log.info("'" + pCmd + "' completed in " + clock.getTime() + " ms"); } if (sig_interrupt.getValue() == true) { out = -9999; } CommandResult result = new CommandResult(pCmdString, out, pOut.toString(), pErr.toString()); return result; }
public static void main(String[] args) throws Exception { List<Book> books = new ArrayList<Book>(); books.add(new Book("The Hitchhiker's Guide to the Galaxy", 5.70, true, "0")); books.add(new Book("Life, the Universe and Everything", 5.60, false, "N")); books.add(new Book("The > Restaurant at the < End of the Universe & all", 5.40, true, "Yes")); Map<String, Object> model = new HashMap<String, Object>(); model.put("books", books); model.put("pageName", "My Bookshelf"); JadeConfiguration jadeConfiguration = new JadeConfiguration(); jadeConfiguration.setPrettyPrint(true); jadeConfiguration.setMode(Jade4J.Mode.XML); JadeTemplate template = jadeConfiguration.getTemplate("src/main/java/jade2j/index.jade"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); String result = jadeConfiguration.renderTemplate(template, model); stopWatch.stop(); System.out.println(result); System.out.println(stopWatch.toString()); }
public T execute() throws Exception { stopWatch.start(); T result = executeImpl(); stopWatch.stop(); logger.debug("Message={} - Elapsed time: {}", message, stopWatch.toString()); return result; }
public CommandResult exec( String[] pCmd, Map<String, String> pEnv, boolean useSysEnv, Writer pOut, Writer pErr) throws IOException, InterruptedException { int out = 0; String pCmdString = ArrayUtils.toString(pCmd); ProcessBuilder builder = new ProcessBuilder(); builder.command(pCmd); Map<String, String> env = builder.environment(); if (!useSysEnv) env.clear(); for (String name : pEnv.keySet()) { env.put(name, pEnv.get(name)); } logExec(pCmdString, env); StopWatch clock = new StopWatch(); clock.start(); try { process = builder.start(); out = handleProcess(process, pCmdString, pOut, pErr, _outputList, sig_interrupt); } finally { this.cleanUpProcess(); clock.stop(); if (_log.isInfoEnabled()) _log.info("'" + pCmdString + "' completed in " + clock.getTime() + " ms"); } if (sig_interrupt.getValue() == true) { out = -9999; } CommandResult result = new CommandResult(pCmdString, out, pOut.toString(), pErr.toString()); return result; }
public CommandResult exec(String[] pCmd, File dir, Writer pOut, Writer pErr) throws IOException, InterruptedException { ProcessBuilder builder = new ProcessBuilder(); Map<String, String> env = builder.environment(); int out = 0; String pCmdString = ArrayUtils.toString(pCmd); logExec(pCmdString, env); StopWatch clock = new StopWatch(); clock.start(); try { process = Runtime.getRuntime().exec(pCmd, null, dir); out = handleProcess(process, pCmdString, pOut, pErr, _outputList, sig_interrupt); } finally { this.cleanUpProcess(); clock.stop(); if (_log.isInfoEnabled()) _log.info("'" + pCmd + "' completed in " + clock.getTime() + " ms"); } if (sig_interrupt.getValue() == true) { out = -9999; } CommandResult result = new CommandResult(pCmdString, out, pOut.toString(), pErr.toString()); return result; }
@Override public void start(Stage primaryStage) { this.primaryStage = primaryStage; ApplicationContextHolder.getContext(); startLogAppender(); Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionLogger()); primaryStage.setScene(initScene()); primaryStage .getIcons() .addAll(ApplicationContextHolder.getContext().getBeansOfType(Image.class).values()); primaryStage.setTitle(ApplicationContextHolder.getContext().getBean(Pdfsam.class).name()); primaryStage.setOnCloseRequest(e -> Platform.exit()); initWindowsStatusController(primaryStage); initOverwriteDialogController(primaryStage); initActiveModule(); primaryStage.show(); requestCheckForUpdateIfNecessary(); requestLatestNewsPanelDisplay(); eventStudio().addAnnotatedListeners(this); closeSplash(); STOPWATCH.stop(); LOG.info( DefaultI18nContext.getInstance() .i18n( "Started in {0}", DurationFormatUtils.formatDurationWords(STOPWATCH.getTime(), true, true))); }
/** * The main method to check the mappabilities of the given read file names. * * @param btOutputFileName BowTie output file name * @param fileTileLength Tile length * @param outputFileName Output file name which the result will be saved into * @param readFileName Reads file name * @param chr1FileLines chromosome file name * @throws IOException */ public void checkMappability( String btOutputFileName, int fileTileLength, String outputFileName, String readFileName, List<String> chr1FileLines) throws IOException { System.out.println("Creating [" + outputFileName + "]..."); StopWatch stopWatch = new StopWatch(); stopWatch.start(); System.out.println("Reading [" + btOutputFileName + "]"); List<String> btOutputFileLines = FileUtils.getInstance().readFile(btOutputFileName); System.out.println("Done reading [" + btOutputFileName + "]"); System.out.println("Reading [" + readFileName + "]"); List<String> readFileLines = FileUtils.getInstance().readFile(readFileName); System.out.println("Done reading [" + readFileName + "]"); Map<String, Mappability> result = createMappability(btOutputFileLines, readFileLines, chr1FileLines); FileUtils.getInstance().writeFile(result, outputFileName, fileTileLength); stopWatch.stop(); System.out.println( "[" + outputFileName + "] is done in [" + TimeUnit.MILLISECONDS.convert(stopWatch.getNanoTime(), TimeUnit.NANOSECONDS) + "] MILLISECONDS"); }
/** * Performs an inverse discrete fourier transform with the solution being the number of rows and * number of columns. See matlab's iftt2 for more examples * * @param input the input to transform * @param rows the number of rows for the transform * @param cols the number of columns for the transform * @return the 2d inverse discrete fourier transform */ public static ComplexFloatMatrix complexInverseDisceteFourierTransform( FloatMatrix input, int rows, int cols) { ComplexFloatMatrix base = null; StopWatch watch = new StopWatch(); watch.start(); // pad if (input.rows < rows || input.columns < cols) base = MatrixUtil.complexPadWithZeros(input, rows, cols); // truncation else if (input.rows > rows || input.columns > cols) { base = new ComplexFloatMatrix(input); base = base.get( MatrixUtil.toIndices(RangeUtils.interval(0, rows)), MatrixUtil.toIndices(RangeUtils.interval(0, cols))); } else base = new ComplexFloatMatrix(input); ComplexFloatMatrix temp = new ComplexFloatMatrix(base.rows, base.columns); ComplexFloatMatrix ret = new ComplexFloatMatrix(base.rows, base.columns); for (int i = 0; i < base.columns; i++) { ComplexFloatMatrix column = base.getColumn(i); temp.putColumn(i, complexInverseDisceteFourierTransform1d(column)); } for (int i = 0; i < ret.rows; i++) { ComplexFloatMatrix row = temp.getRow(i); ret.putRow(i, complexInverseDisceteFourierTransform1d(row)); } watch.stop(); return ret; }
private void method2() { if (stopWatch2.isStarted()) { stopWatch2.resume(); } else { stopWatch2.start(); } try { Thread.sleep(1000); } catch (Exception e) {; } stopWatch2.suspend(); }
@Test @Ignore public void encode() { StopWatch stopWatch = new StopWatch(); Coder coder = new JsonSmartCoder(); stopWatch.start(); byte[] bytes = coder.encode(new Bean()); stopWatch.stop(); System.out.printf("Encoded %d bytes in %s:%n%s%n", bytes.length, stopWatch, new String(bytes)); Bean bean = coder.decode(bytes); System.out.println(bean); }
/** Unpauses the collection of mouse points */ public void unpause() { if (pause) { System.out.println("unpause"); stopWatch.stop(); // Adds a mouse event that includes the new mouse locaiton and also the number of miliseconds // that the collection has been paused. mouseEvents.add( new SimpleMouseEvent( MouseInfo.getPointerInfo().getLocation().x, MouseInfo.getPointerInfo().getLocation().y, (int) stopWatch.getTime())); System.out.println(mouseEvents.getLast().toString()); stopWatch.reset(); pause = false; } }
public static void main(String args[]) { Connection c = null; PreparedStatement stmt = null; try { Class.forName("org.postgresql.Driver"); c = DriverManager.getConnection("jdbc:postgresql://localhost:5432/test", "pdv", "pdv"); System.out.println("Opened database successfully"); List<String> results = new ArrayList<String>(); File dir = new File("D:/jboss/eclipse/workspace/LTF"); Iterator<File> files = FileUtils.iterateFilesAndDirs(dir, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE); StopWatch timerAll = new StopWatch(); timerAll.start(); while (files.hasNext()) { File file = (File) files.next(); if (file.isFile()) { StopWatch timeForAFile = new StopWatch(); timeForAFile.start(); String toDB = encodeFileToBase64Binary(file); String sql = "INSERT INTO test (file_id,file_name,file_data,file_date) VALUES (nextval('test_file_id_seq'),?,?,?)"; stmt = c.prepareStatement(sql); stmt.setString(1, file.getAbsolutePath()); stmt.setString(2, toDB); stmt.setDate(3, new java.sql.Date(Calendar.getInstance().getTimeInMillis())); stmt.executeUpdate(); timeForAFile.stop(); System.out.println(timeForAFile.toString()); } } timerAll.stop(); System.out.println("================================================="); System.out.println(timerAll.toString()); stmt.close(); /* * Statement statement = null; statement = c.createStatement(); * ResultSet rs = statement.executeQuery( * "select file_data from test limit 1"); while (rs.next()) { String * encoded = rs.getString("file_data"); byte[] decoded = * Base64.getDecoder().decode(encoded); FileOutputStream fos = new * FileOutputStream("c:/temp/2.jpg"); fos.write(decoded); * fos.close(); } */ c.close(); } catch (Exception e) { System.err.println(e.getClass().getName() + ": " + e.getMessage()); System.exit(0); } System.out.println("Table created successfully"); }
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { HttpServletRequest httpRequest = (HttpServletRequest) request; // 如果在excludePath中,则跳过 if (StringUtils.isNotBlank( UrlUtils.urlMatch(excludePathSet, RequestUtils.getReqPath(httpRequest)))) { chain.doFilter(request, response); return; } StopWatch stopWatch = new StopWatch(); stopWatch.start(); request.setAttribute(START_TIME, stopWatch); chain.doFilter(request, response); stopWatch.stop(); if (null != request.getAttribute(START_TIME)) { logger.info( "URL[" + httpRequest.getRequestURI() + "]executeTime[" + stopWatch.toString() + "]"); } }
/** Starts adding points to the mouseEvents list every 2 milliseconds */ private void collect() { int same = 0; // The number of times that the mouse has been in same position. pause = false; // whether or not the mouse is currently idle while (record) { if (!pause) { // If the mouse is not currently idle, A new point is added to the "mouseEvents" List. point // has x and y coordinates mouseEvents.add( new SimpleMouseEvent( MouseInfo.getPointerInfo().getLocation().x, MouseInfo.getPointerInfo().getLocation().y, 0)); } // If the mouse is currently set as idle, but mouvement has been detected: that is, if the // current position is different than // where the mouse is when it was set as idle... if (pause && ((MouseInfo.getPointerInfo().getLocation().x) != mouseEvents.getLast().gx() || MouseInfo.getPointerInfo().getLocation().y != mouseEvents.getLast().gy())) { unpause(); } // Detects if the current position of the mouse is the same as the last time it was checked. if (mouseEvents.size() > 5 && (mouseEvents .getLast() .toString() .equals(mouseEvents.get(mouseEvents.size() - 2).toString()))) { // If position is the same, the "same" count is increased. same++; // Once the mouse has been in the same place for 10 cycles, it is said to be idle. if (same == 10) { pause = true; // The stopwatch will count how long the mouse has been idle for stopWatch.start(); System.out.println("Pause"); } } else { // If the mouse has moved since last cycle, then the number of times it has been in the same // position is set to 0. same = 0; } try { Thread.sleep(2); // Delay between cycles. } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } if (!record) { gui.log("Recording Stopped."); gui.refreshActionList(); } } }
public void ingestPairs(int vert) { StopWatch timer = new StopWatch(); timer.start(); for (int i = 0; i < vert; i++) { Vertex person = graph.addVertex("p" + i); person.setProperty("type", "person"); person.setProperty("age", i); person.setProperty("firstname", "John" + i); person.setProperty("lastname", "Doe" + i); Vertex location = graph.addVertex("l" + i); location.setProperty("type", "location"); location.setProperty("address", "address" + i); Edge edge = graph.addEdge(i, person, location, "e"); edge.setProperty("type", "lived at"); edge.setProperty("from", "199" + i); edge.setProperty("to", "201" + i); } System.out.println("Created graph\n"); }
@Override public GrepResults execute(List<GrepRequest> grepRequests) { GrepResults results = new GrepResults(); ExecutorService executorService = null; StackSessionPool.getInstance().startPool(); try { clock.start(); executorService = Executors.newFixedThreadPool( maxGrepTaskThreads(this.optionsDecorator, grepRequests.size())); List<GrepTask> grepTasks = new ArrayList<GrepTask>(); for (GrepRequest grepRequest : grepRequests) { grepTasks.add(new GrepTask(grepRequest)); } List<Future<List<GrepResult>>> grepTaskFutures = executorService.invokeAll(grepTasks); for (Future<List<GrepResult>> future : grepTaskFutures) { for (GrepResult singleGrepResult : future.get()) results.add(singleGrepResult); } } catch (Exception e) { throw new RuntimeException("Error when executing the GrepTask", e); } finally { clock.stop(); results.setExecutionTime(clock.getTime()); if (executorService != null) { executorService.shutdownNow(); } try { StackSessionPool.getInstance().getPool().close(); } catch (UnsupportedOperationException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } } return results; }
@Override protected List<RssItem> doInBackground(Void... urls) { DatabaseConnectionOrm dbConn = new DatabaseConnectionOrm(context); SharedPreferences mPrefs = PreferenceManager.getDefaultSharedPreferences(context); boolean onlyUnreadItems = mPrefs.getBoolean(SettingsActivity.CB_SHOWONLYUNREAD_STRING, false); boolean onlyStarredItems = false; if (idFolder != null) if (idFolder == SubscriptionExpandableListAdapter.SPECIAL_FOLDERS.ALL_STARRED_ITEMS.getValue()) onlyStarredItems = true; String sqlSelectStatement = null; if (idFeed != null) sqlSelectStatement = dbConn.getAllItemsIdsForFeedSQL( idFeed, onlyUnreadItems, onlyStarredItems, sortDirection); else if (idFolder != null) { if (idFolder == SubscriptionExpandableListAdapter.SPECIAL_FOLDERS.ALL_STARRED_ITEMS.getValue()) onlyUnreadItems = false; sqlSelectStatement = dbConn.getAllItemsIdsForFolderSQL(idFolder, onlyUnreadItems, sortDirection); } if (sqlSelectStatement != null) { dbConn.insertIntoRssCurrentViewTable(sqlSelectStatement); } StopWatch sw = new StopWatch(); sw.start(); List<RssItem> items = dbConn.getCurrentRssItemView(0); sw.stop(); Log.v(TAG, "Time needed (init loading): " + sw.toString()); return items; }
public static void main(String[] args) { LOGGER.info("in constructor"); StopWatch stopWatch = new StopWatch(); stopWatch.start(); try { Thread.sleep(3000); } catch (InterruptedException e) { e .printStackTrace(); // To change body of catch statement use File | Settings | File // Templates. } stopWatch.split(); try { Thread.sleep(3000); } catch (InterruptedException e) { e .printStackTrace(); // To change body of catch statement use File | Settings | File // Templates. } stopWatch.stop(); // System.out.println("split time " + stopWatch.toSplitString()); System.out.println("time elapsed is " + stopWatch.getTime()); }
@Override public void init() { STOPWATCH.start(); UserContext userContext = new DefaultUserContext(); System.setProperty( EventStudio.MAX_QUEUE_SIZE_PROP, Integer.toString(userContext.getNumberOfLogRows())); LOG.info("Starting PDFsam"); System.setProperty(Sejda.UNETHICAL_READ_PROPERTY_NAME, "true"); cleanUserContextIfNeeded(userContext); String localeString = userContext.getLocale(); if (isNotBlank(localeString)) { eventStudio().broadcast(new SetLocaleEvent(localeString)); } String defaultworkingPath = userContext.getDefaultWorkingPath(); if (isNotBlank(defaultworkingPath)) { try { if (Files.isDirectory(Paths.get(defaultworkingPath))) { eventStudio().broadcast(new SetLatestDirectoryEvent(new File(defaultworkingPath))); } } catch (InvalidPathException e) { LOG.warn("Unable to set initial directory, default path is invalid.", e); } } }
@Override public ExecutionResult execute(ExecutionContext executionContext) throws ProcessExecutionException, InterruptedException { try { StopWatch stopWatch = new StopWatch(); stopWatch.start(); log.info("Starting Kmer Counting on all Reads"); // Create shortcut to args for convienience Args args = this.getArgs(); // Force run parallel to false if not using a scheduler if (!executionContext.usingScheduler() && args.isRunParallel()) { log.warn("Forcing linear execution due to lack of job scheduler"); args.setRunParallel(false); } // Create the output directory args.getOutputDir().mkdirs(); JobOutputMap jfCountOutputs = new JobOutputMap(); List<ExecutionResult> jobResults = new ArrayList<>(); List<ExecutionResult> allJobResults = new ArrayList<>(); // Create the output directory for the RAW datasets File rawOutputDir = new File(args.getOutputDir(), "raw"); if (!rawOutputDir.exists()) { rawOutputDir.mkdirs(); } // Start jellyfish on all RAW datasets for (Library lib : args.getAllLibraries()) { // Execute jellyfish and add id to list of job ids JobOutput jfOut = this.executeJellyfishCount(args, "raw", args.getOutputDir(), lib); jobResults.add(jfOut.getResult()); allJobResults.add(jfOut.getResult()); jfCountOutputs.updateTracker("raw", jfOut.getOutputFile()); } // Also start jellyfish on all the prep-processed libraries from MECQ if (args.getAllMecqs() != null) { for (Mecq.EcqArgs ecqArgs : args.getAllMecqs()) { // Create the output directory for the RAW datasets File ecqOutputDir = new File(args.getOutputDir(), ecqArgs.getName()); if (!ecqOutputDir.exists()) { ecqOutputDir.mkdirs(); } for (Library lib : ecqArgs.getOutputLibraries()) { // Add jellyfish id to list of job ids JobOutput jfOut = this.executeJellyfishCount(args, ecqArgs.getName(), args.getOutputDir(), lib); jobResults.add(jfOut.getResult()); allJobResults.add(jfOut.getResult()); jfCountOutputs.updateTracker(ecqArgs.getName(), jfOut.getOutputFile()); } } } // If we're using a scheduler and we have been asked to run each job // in parallel, then we should wait for all those to complete before continueing. if (executionContext.usingScheduler() && args.isRunParallel()) { log.info("Kmer counting all ECQ groups in parallel, waiting for completion"); this.conanExecutorService.executeScheduledWait( jobResults, args.getJobPrefix() + "-count-*", ExitStatus.Type.COMPLETED_ANY, args.getJobPrefix() + "-kmer-count-wait", args.getOutputDir()); } // Waiting point... clear job ids. jobResults.clear(); JobOutputMap mergedOutputs = new JobOutputMap(); // Now execute merge jobs if required for (Map.Entry<String, Set<File>> entry : jfCountOutputs.entrySet()) { String ecqName = entry.getKey(); Set<File> fileSet = entry.getValue(); // Only merge if there's more than one library if (fileSet.size() > 1) { JobOutput jfOut = this.executeJellyfishMerger( args, ecqName, fileSet, new File(args.getOutputDir(), ecqName)); jobResults.add(jfOut.getResult()); allJobResults.add(jfOut.getResult()); mergedOutputs.updateTracker(ecqName, jfOut.getOutputFile()); } } // If we're using a scheduler and we have been asked to run each job // in parallel, then we should wait for all those to complete before continueing. if (executionContext.usingScheduler() && args.isRunParallel()) { log.info( "Creating merged kmer counts for all ECQ groups in parallel, waiting for completion"); this.conanExecutorService.executeScheduledWait( jobResults, args.getJobPrefix() + "-merge-*", ExitStatus.Type.COMPLETED_ANY, args.getJobPrefix() + "-kmer-merge-wait", args.getOutputDir()); } // Waiting point... clear job ids. jobResults.clear(); // Combine all jellyfish out maps jfCountOutputs.combine(mergedOutputs); String katGcpJobPrefix = args.getJobPrefix() + "-kat-gcp"; // Run KAT GCP on everything List<ExecutionResult> katGcpResults = this.executeKatGcp( jfCountOutputs, katGcpJobPrefix, args.getThreadsPerProcess(), args.getMemoryPerProcess(), args.isRunParallel()); for (ExecutionResult result : katGcpResults) { result.setName(result.getName().substring(args.getJobPrefix().length() + 1)); jobResults.add(result); allJobResults.add(result); } // If we're using a scheduler and we have been asked to run each job // in parallel, then we should wait for all those to complete before continueing. if (executionContext.usingScheduler() && args.isRunParallel()) { log.info("Running \"kat gcp\" for all ECQ groups in parallel, waiting for completion"); this.conanExecutorService.executeScheduledWait( jobResults, katGcpJobPrefix + "*", ExitStatus.Type.COMPLETED_ANY, args.getJobPrefix() + "-kat-gcp-wait", args.getOutputDir()); } // Waiting point... clear job ids. jobResults.clear(); log.info("Kmer counting of all reads finished."); stopWatch.stop(); TaskResult taskResult = new DefaultTaskResult( "rampart-read_analysis-kmer", true, allJobResults, stopWatch.getTime() / 1000L); // Output the resource usage to file FileUtils.writeLines( new File(args.getOutputDir(), args.getJobPrefix() + ".summary"), taskResult.getOutput()); return new DefaultExecutionResult( taskResult.getTaskName(), 0, new String[] {}, null, -1, new ResourceUsage( taskResult.getMaxMemUsage(), taskResult.getActualTotalRuntime(), taskResult.getTotalExternalCputime())); } catch (ConanParameterException | IOException e) { throw new ProcessExecutionException(-1, e); } }
@PostConstruct @Scheduled(cron = "${msisdn_generator.scheduling}") public void generate() { logger.info("Start parse MSISDN from {} ...", fileName); File file; InputStream in; try { file = new File(getClass().getClassLoader().getResource(fileName).toURI()); in = new FileInputStream(file); } catch (FileNotFoundException e) { throw new RuntimeException(fileName + " not found.", e); } catch (URISyntaxException e) { throw new RuntimeException(fileName + " not found.", e); } StopWatch sw = new StopWatch(); sw.start(); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); String line; int lineNo = 0; String shortFileName = file.getName(); try { int count = 0; while (!stop && (line = reader.readLine()) != null) { lineNo++; line = line.trim(); logger.info("Processing line {}: {}", lineNo, line); if (line.isEmpty()) { continue; } int index = line.indexOf('-'); if (index == -1) { if (!MSISDNUtils.isValid(line)) { logger.error( "Invalid MSISDN: {} (Line {} of {})", new Object[] {line, lineNo, shortFileName}); } else { msisdnSender.send(line); count++; } continue; } String startMSISDN = line.substring(0, index); if (!MSISDNUtils.isValid(startMSISDN)) { logger.error( "Invalid start MSISDN: {} (Line {} of {})", new Object[] {line, lineNo, shortFileName}); continue; } String endMSISDN = line.substring(index + 1); if (!MSISDNUtils.isValid(endMSISDN)) { logger.error( "Invalid end MSISDN: {} (Line {} of {})", new Object[] {line, lineNo, shortFileName}); continue; } long start = MSISDNUtils.sanitizeToLong(startMSISDN); long end = MSISDNUtils.sanitizeToLong(endMSISDN); if (start > end) { end = start + end; start = end - start; end = end - start; } for (long l = start; l <= end && !stop; l++) { msisdnSender.send(l + ""); count++; } } logger.info( "{} parsed complted, obtained {} MSISDN. ({} ms)", new Object[] {shortFileName, count, sw.getTime()}); } catch (IOException e) { logger.error("Parse MSISDN from " + fileName + " failed", e); } finally { try { reader.close(); } catch (IOException e) { } try { in.close(); } catch (IOException e) { } } }
@Override public ExecutionResult execute(ExecutionContext executionContext) throws ProcessExecutionException, InterruptedException { try { StopWatch stopWatch = new StopWatch(); stopWatch.start(); RampartProcessArgs args = this.getRampartArgs(); // Force run parallel to false if not using a scheduler if (!executionContext.usingScheduler()) { if (args.isRunParallel()) { log.warn("Forcing linear execution due to lack of job scheduler"); } args.setRunParallel(false); } log.info("Starting " + this.getName() + " Process"); this.results = new ArrayList<>(); // Loop through all samples to process for (Mecq.Sample sample : args.samples) { if (sample.failedAtStage == -1) { File stageDir = args.getStageDir(sample); // Ensure sample output directory exists if (!stageDir.exists()) { stageDir.mkdirs(); } // Do samples specific work TaskResult sampleResults = this.executeSample(sample, executionContext); // Collect results for (ExecutionResult res : sampleResults.getProcessResults()) { results.add(res); } } } // Ensure wait log directory exists File logDir = new File(args.outputDir, "wait_logs"); if (!logDir.exists()) { logDir.mkdirs(); } // If we're using a scheduler and we have been asked to run jobs // in parallel, then we should wait for all those to complete before finishing this stage. if (executionContext.usingScheduler() && args.runParallel) { log.info("Running all " + this.getName() + " jobs in parallel, waiting for completion"); MultiWaitResult mrw = this.conanExecutorService.executeScheduledWait( results, args.jobPrefix + "-*", ExitStatus.Type.COMPLETED_ANY, args.jobPrefix + "-wait", logDir); } // Check all the required output files are in place (delegated to child class) // Loop through all samples to process for (int i = 0; i < args.samples.size(); i++) { Mecq.Sample sample = args.samples.get(i); if (sample.failedAtStage == -1) { boolean valid = this.validateOutput(sample); if (!valid) { sample.failedAtStage = args.getStage().ordinal(); if (args.samples.size() == 1) { throw new IOException( "Stage " + args.getStage().name() + " failed to produce valid output."); } else { log.error( "Sample " + sample.name + " failed to produce valid output for stage " + args.getStage().name() + " discontinuing pipeline for this sample."); } } } } this.finalise(); log.info("Finished " + this.getName() + " Process"); stopWatch.stop(); this.taskResult = new DefaultTaskResult( "rampart-" + this.getName(), true, results, stopWatch.getTime() / 1000L); // Output the resource usage to file FileUtils.writeLines( new File(logDir, args.jobPrefix + ".summary"), this.taskResult.getOutput()); return new DefaultExecutionResult( this.taskResult.getTaskName(), 0, new String[] {}, null, -1, new ResourceUsage( this.taskResult.getMaxMemUsage(), this.taskResult.getActualTotalRuntime(), this.taskResult.getTotalExternalCputime())); } catch (IOException e) { throw new ProcessExecutionException(2, e); } }