public void connectAndFormStreams() throws Exception { serverSocket = new ServerSocket(); serverSocket.setReuseAddress(true); serverSocket.bind(new InetSocketAddress(listeningPortNumber)); while (!Thread.interrupted()) { Log.i("Connect", "Before accept()"); s = serverSocket.accept(); Log.i("Connect", "after accept"); is = s.getInputStream(); InputStreamReader isr = new InputStreamReader(is); br = new BufferedReader(isr); Thread serverThread = new Thread(this); serverThread.start(); } s.close(); serverSocket.close(); }
@Override public ByteBuffer readPage(File file, long position, ByteBuffer pageBuffer) throws IOException, InterruptedException { long start = System.currentTimeMillis(); RandomAccessFile randomAccessFile = randomAccessFile(file); try { randomAccessFile.seek(position); randomAccessFile.readFully(pageBuffer.array(), pageBuffer.arrayOffset(), pageSizeBytes); if (Thread.interrupted()) { throw new InterruptedException(); } long stop = System.currentTimeMillis(); if (LOG.isLoggable(Level.FINE)) { LOG.log( Level.FINE, "Read page at {0} of {1}: {2} msec", new Object[] {position, file, stop - start}); } } catch (EOFException e) { LOG.log( Level.SEVERE, "Caught EOFException while reading {0}, position {1}", new Object[] {file, position}); LOG.log(Level.SEVERE, "stack", e); throw e; } finally { randomAccessFile.close(); } return pageBuffer; }
/** Stops Jetty. */ private void stopJetty() { // Jetty does not really stop the server if port is busy. try { if (httpSrv != null) { // If server was successfully started, deregister ports. if (httpSrv.isStarted()) ctx.ports().deregisterPorts(getClass()); // Record current interrupted status of calling thread. boolean interrupted = Thread.interrupted(); try { httpSrv.stop(); } finally { // Reset interrupted flag on calling thread. if (interrupted) Thread.currentThread().interrupt(); } } } catch (InterruptedException ignored) { if (log.isDebugEnabled()) log.debug("Thread has been interrupted."); Thread.currentThread().interrupt(); } catch (Exception e) { U.error(log, "Failed to stop Jetty HTTP server.", e); } }
public ResponderResult call() throws Exception { logger.debug("started responder thread"); runResponder(); if (null != resultHandler && !Thread.currentThread().isInterrupted()) { resultHandler.handleResult(success, failureReason); } return new ResponderResult(responder, failureReason, success, Thread.interrupted()); }
/* * Fetch a file that is in a Hadoop file system. Return a local File. * Interruptible. */ private File hdfsFetch(Path fromPath, Reporter reporter) throws IOException, InterruptedException { UUID uniqueId = UUID.randomUUID(); File toFile = new File(tempDir, uniqueId.toString() + "/" + fromPath.getName()); File toDir = new File(toFile.getParent()); if (toDir.exists()) { FileUtils.deleteDirectory(toDir); } toDir.mkdirs(); Path toPath = new Path(toFile.getCanonicalPath()); FileSystem fS = fromPath.getFileSystem(hadoopConf); FileSystem tofS = FileSystem.getLocal(hadoopConf); Throttler throttler = new Throttler((double) bytesPerSecThrottle); try { for (FileStatus fStatus : fS.globStatus(fromPath)) { log.info("Copying " + fStatus.getPath() + " to " + toPath); long bytesSoFar = 0; FSDataInputStream iS = fS.open(fStatus.getPath()); FSDataOutputStream oS = tofS.create(toPath); byte[] buffer = new byte[downloadBufferSize]; int nRead; while ((nRead = iS.read(buffer, 0, buffer.length)) != -1) { // Needed to being able to be interrupted at any moment. if (Thread.interrupted()) { iS.close(); oS.close(); cleanDirNoExceptions(toDir); throw new InterruptedException(); } bytesSoFar += nRead; oS.write(buffer, 0, nRead); throttler.incrementAndThrottle(nRead); if (bytesSoFar >= bytesToReportProgress) { reporter.progress(bytesSoFar); bytesSoFar = 0l; } } if (reporter != null) { reporter.progress(bytesSoFar); } oS.close(); iS.close(); } return toDir; } catch (ClosedByInterruptException e) { // This can be thrown by the method read. cleanDirNoExceptions(toDir); throw new InterruptedIOException(); } }
/** * This method represents the application code that we'd like to run on a separate thread. It * simulates slowly computing a value, in this case just a string 'All Done'. It updates the * progress bar every half second to remind the user that we're still busy. */ Object doWork() { try { if (Thread.interrupted()) { throw new InterruptedException(); } while (!this.state.terminator.isTerminated(this.state.optimizer.getPopulation())) { if (Thread.interrupted()) { throw new InterruptedException(); } this.state.optimizer.optimize(); } System.gc(); } catch (InterruptedException e) { updateStatus("Interrupted", 0); return "Interrupted"; } updateStatus("All Done", 0); return "All Done"; }
public void waitForHead() { if (headLength == 0) { return; } assert headCatchedUpLatch != null; try { headCatchedUpLatch.await(); } catch (InterruptedException e) { Thread.interrupted(); } }
/** * Copy an input stream to an output stream. * * @param bufferSize the size of the buffer * @param progress the progress observer it could be null * @param in the input stream * @param out the output stream * @param canStop if true, the copy can be stopped by interrupting the thread * @return <code>true</code> if the copy was done, <code>false</code> if it was interrupted * @throws IOException IOException If an I/O error occurs */ public static boolean copyStream( int bufferSize, ProgressObserver progress, InputStream in, OutputStream out, boolean canStop) throws IOException { byte[] buffer = new byte[bufferSize]; int n; long copied = 0L; while (-1 != (n = in.read(buffer))) { out.write(buffer, 0, n); copied += n; if (progress != null) progress.setValue(copied); if (canStop && Thread.interrupted()) return false; } return true; }
private void saveData(final BuildFSState fsState, File dataStorageRoot) { final boolean wasInterrupted = Thread.interrupted(); try { saveFsState(dataStorageRoot, fsState); final ProjectDescriptor pd = myProjectDescriptor; if (pd != null) { pd.release(); } } finally { if (wasInterrupted) { Thread.currentThread().interrupt(); } } }
/** {@inheritDoc} */ @Override protected void body() throws InterruptedException, IgniteInterruptedCheckedException { if (log.isDebugEnabled()) log.debug("GC worker started."); File workTokDir = tokDir.getParentFile(); assert workTokDir != null; boolean lastRunNeeded = true; while (true) { try { // Sleep only if not cancelled. if (lastRunNeeded) Thread.sleep(GC_FREQ); } catch (InterruptedException ignored) { // No-op. } if (log.isDebugEnabled()) log.debug("Starting GC iteration."); cleanupResources(workTokDir); // Process spaces created by this endpoint. if (log.isDebugEnabled()) log.debug("Processing local spaces."); for (IpcSharedMemoryClientEndpoint e : endpoints) { if (log.isDebugEnabled()) log.debug("Processing endpoint: " + e); if (!e.checkOtherPartyAlive()) { endpoints.remove(e); if (log.isDebugEnabled()) log.debug("Removed endpoint: " + e); } } if (isCancelled()) { if (lastRunNeeded) { lastRunNeeded = false; // Clear interrupted status. Thread.interrupted(); } else { Thread.currentThread().interrupt(); break; } } } }
/** Given the process handle, waits for its completion and returns the exit code. */ public static int waitForExitProcess(Pointer hProcess) throws InterruptedException { while (true) { if (Thread.interrupted()) throw new InterruptedException(); Kernel32.INSTANCE.WaitForSingleObject(hProcess, 1000); IntByReference exitCode = new IntByReference(); exitCode.setValue(-1); Kernel32.INSTANCE.GetExitCodeProcess(hProcess, exitCode); int v = exitCode.getValue(); if (v != Kernel32.STILL_ACTIVE) { return v; } } }
protected Exception postJsonToPipelineWithRetry( String endpoint, List docs, ArrayList<String> mutable, Exception lastExc, int requestId) throws Exception { Exception retryAfterException = null; try { postJsonToPipeline(endpoint, docs, requestId); if (lastExc != null) log.info( "Re-try request " + requestId + " to " + endpoint + " succeeded after seeing a " + lastExc.getMessage()); } catch (Exception exc) { log.warn("Failed to send request " + requestId + " to '" + endpoint + "' due to: " + exc); if (mutable.size() > 1) { // try another endpoint but update the cloned list to avoid re-hitting the one having an // error if (log.isDebugEnabled()) log.debug("Will re-try failed request " + requestId + " on next endpoint in the list"); mutable.remove(endpoint); retryAfterException = exc; } else { // no other endpoints to try ... brief wait and then retry log.warn( "No more endpoints available to try ... will retry to send request " + requestId + " to " + endpoint + " after waiting 1 sec"); try { Thread.sleep(1000); } catch (InterruptedException ignore) { Thread.interrupted(); } // note we want the exception to propagate from here up the stack since we re-tried and it // didn't work postJsonToPipeline(endpoint, docs, requestId); log.info("Re-try request " + requestId + " to " + endpoint + " succeeded"); retryAfterException = null; // return success condition } } return retryAfterException; }
/** {@inheritDoc} */ @Override public void close() { closed = true; U.closeQuiet(srvSock); if (gcWorker != null) { U.cancel(gcWorker); // This method may be called from already interrupted thread. // Need to ensure cleaning on close. boolean interrupted = Thread.interrupted(); try { U.join(gcWorker); } catch (IgniteInterruptedCheckedException e) { U.warn(log, "Interrupted when stopping GC worker.", e); } finally { if (interrupted) Thread.currentThread().interrupt(); } } }
@Override public void write(File file, long position, ByteBuffer buffer) throws IOException, InterruptedException { long start = System.currentTimeMillis(); RandomAccessFile randomAccessFile = randomAccessFile(file); try { FileChannel channel = randomAccessFile.getChannel(); channel.position(position); channel.write(buffer); if (Thread.interrupted()) { throw new InterruptedException(); } long stop = System.currentTimeMillis(); if (LOG.isLoggable(Level.FINE)) { LOG.log( Level.FINE, "Wrote page at {0} of {1}: {2} msec", new Object[] {position, file, stop - start}); } } finally { randomAccessFile.close(); } }
protected Applet createApplet(final AppletClassLoader loader) throws ClassNotFoundException, IllegalAccessException, IOException, InstantiationException, InterruptedException { String code = getCode(); if (code != null) { applet = (Applet) loader.loadCode(code).newInstance(); } else { String msg = "nocode"; status = APPLET_ERROR; showAppletStatus(msg); showAppletLog(msg); repaint(); } // Determine the JDK level that the applet targets. // This is critical for enabling certain backward // compatibility switch if an applet is a JDK 1.1 // applet. [stanley.ho] findAppletJDKLevel(applet); if (Thread.interrupted()) { try { status = APPLET_DISPOSE; // APPLET_ERROR? applet = null; // REMIND: This may not be exactly the right thing: the // status is set by the stop button and not necessarily // here. showAppletStatus("death"); } finally { Thread.currentThread().interrupt(); // resignal interrupt } return null; } return applet; }
public static void checkInterrupted() throws InterruptedException { if (Thread.interrupted()) { throw new InterruptedException(); } }
/* * Fetch a file that is in a S3 file system. Return a local File. It accepts "s3://" and "s3n://" prefixes. * Interruptible. */ private File s3Fetch(URI uri, Reporter reporter) throws IOException, InterruptedException { String bucketName = uri.getHost(); String path = uri.getPath(); UUID uniqueId = UUID.randomUUID(); File destFolder = new File(tempDir, uniqueId.toString() + "/" + path); if (destFolder.exists()) { FileUtils.deleteDirectory(destFolder); } destFolder.mkdirs(); Throttler throttler = new Throttler((double) bytesPerSecThrottle); boolean done = false; try { s3Service = new RestS3Service(getCredentials()); if (s3Service.checkBucketStatus(bucketName) != RestS3Service.BUCKET_STATUS__MY_BUCKET) { throw new IOException("Bucket doesn't exist or is already claimed: " + bucketName); } if (path.startsWith("/")) { path = path.substring(1, path.length()); } for (S3Object object : s3Service.listObjects(new S3Bucket(bucketName), path, "")) { long bytesSoFar = 0; String fileName = path; if (path.contains("/")) { fileName = path.substring(path.lastIndexOf("/") + 1, path.length()); } File fileDest = new File(destFolder, fileName); log.info("Downloading " + object.getKey() + " to " + fileDest + " ..."); if (fileDest.exists()) { fileDest.delete(); } object = s3Service.getObject(new S3Bucket(bucketName), object.getKey()); InputStream iS = object.getDataInputStream(); FileOutputStream writer = new FileOutputStream(fileDest); byte[] buffer = new byte[downloadBufferSize]; int nRead; while ((nRead = iS.read(buffer, 0, buffer.length)) != -1) { // Needed to being able to be interrupted at any moment. if (Thread.interrupted()) { iS.close(); writer.close(); cleanDirNoExceptions(destFolder); throw new InterruptedException(); } bytesSoFar += nRead; writer.write(buffer, 0, nRead); throttler.incrementAndThrottle(nRead); if (bytesSoFar >= bytesToReportProgress) { reporter.progress(bytesSoFar); bytesSoFar = 0l; } } if (reporter != null) { reporter.progress(bytesSoFar); } writer.close(); iS.close(); done = true; } if (!done) { throw new IOException("Bucket is empty! " + bucketName + " path: " + path); } } catch (S3ServiceException e) { throw new IOException(e); } return destFolder; }
/** * This method represents the application code that we'd like to run on a separate thread. It * simulates slowly computing a value, in this case just a string 'All Done'. It updates the * progress bar every half second to remind the user that we're still busy. */ public Object doWork() { try { this.optimizationParameters.saveInstance(); if (this.show) { this.statusField.setText("Optimizing..."); } RNG.setRandomSeed(optimizationParameters.getRandomSeed()); // opening output file... if (!this.outputFileName.equalsIgnoreCase("none")) { String name = ""; SimpleDateFormat formatter = new SimpleDateFormat("E'_'yyyy.MM.dd'_'HH.mm.ss"); String startDate = formatter.format(new Date()); name = this.outputPath + this.outputFileName + "_" + this.experimentName + "_" + startDate + ".dat"; try { this.outputFile = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(name))); } catch (FileNotFoundException e) { System.out.println("Could not open output file! Filename: " + name); } } else { this.outputFile = null; } // initialize problem this.optimizationParameters.getProblem().initializeProblem(); this.optimizationParameters .getOptimizer() .setProblem(this.optimizationParameters.getProblem()); // int optimizer and population // this.optimizationParameters.getOptimizer().initialize(); // initialize the log data ArrayList tmpMultiRun = new ArrayList(); this.performedRuns.add(tmpMultiRun); // something to log file // if (outputFile != null) // this.writeToFile(this.optimizationParameters.getOptimizer().getStringRepresentation()); // this.writeToFile("Here i'll write something characterizing the algorithm."); for (int j = 0; j < this.multiRuns; j++) { this.optimizationParameters .getProblem() .initializeProblem(); // in the loop as well, dynamic probs may need that (MK) this.tmpData = new ArrayList<>(); this.currentRun = j; if (this.show) { this.statusField.setText( "Optimizing Run " + (j + 1) + " of " + this.multiRuns + " Multi Runs..."); } if (Thread.interrupted()) { throw new InterruptedException(); } // write header to file this.writeToFile( " FitnessCalls\t Best\t Mean\t Worst \t" + BeanInspector.toString( this.optimizationParameters.getProblem().getAdditionalDataHeader(), '\t', false, "")); if ((this.continueFlag) && (this.backupPopulation != null)) { this.recentFunctionCalls += this.backupPopulation.getFunctionCalls(); this.optimizationParameters.getOptimizer().getProblem().initializeProblem(); this.optimizationParameters.getOptimizer().addPopulationChangedEventListener(null); this.optimizationParameters.getOptimizer().setPopulation(this.backupPopulation); this.optimizationParameters .getOptimizer() .getProblem() .evaluate(this.optimizationParameters.getOptimizer().getPopulation()); this.optimizationParameters .getOptimizer() .getProblem() .evaluate(this.optimizationParameters.getOptimizer().getPopulation().getArchive()); this.optimizationParameters .getOptimizer() .initializeByPopulation(this.backupPopulation, false); this.optimizationParameters.getOptimizer().getPopulation().setFunctionCalls(0); this.optimizationParameters.addPopulationChangedEventListener(this); } else { this.recentFunctionCalls = 0; this.optimizationParameters.getOptimizer().initialize(); } // while (this.optimizationParameters.getOptimizer().getPopulation().getFunctionCalls() < // this.functionCalls) { while (!this.optimizationParameters .getTerminator() .isTerminated(this.optimizationParameters.getOptimizer().getPopulation())) { // System.out.println("Simulated Function calls "+ // this.optimizer.getPopulation().getFunctionCalls()); if (Thread.interrupted()) { throw new InterruptedException(); } optimizationParameters.getOptimizer().optimize(); } System.gc(); // @TODO if you want the final report include this // this.writeToFile(this.optimizationParameters.getProblem().getStringRepresentationForProblem(this.optimizationParameters.getOptimizer())); tmpMultiRun.add(this.tmpData); } if (this.show) { this.plot.setInfoString(this.currentExperiment, this.experimentName, 0.5f); } if (this.show) { this.draw(); } this.experimentName = this.optimizationParameters.getOptimizer().getName() + "-" + this.performedRuns.size(); } catch (InterruptedException e) { updateStatus(0); if (this.show) { this.statusField.setText("Interrupted..."); } return "Interrupted"; } if (this.outputFile != null) { try { this.outputFile.close(); } catch (IOException e) { System.out.println("Failed to close output file!"); } } if (this.show) { for (int i = 0; i < this.multiRuns; i++) { this.plot.clearGraph(1000 + i); } } updateStatus(0); if (this.show) { this.statusField.setText("Finished..."); } return "All Done"; }
/** In case of interrupted, written file is not deleted. */ private void copyFile(File sourceFile, File destFile, Reporter reporter) throws IOException, InterruptedException { if (!destFile.exists()) { destFile.createNewFile(); } FileChannel source = null; FileChannel destination = null; Throttler throttler = new Throttler((double) bytesPerSecThrottle); FileInputStream iS = null; FileOutputStream oS = null; try { iS = new FileInputStream(sourceFile); oS = new FileOutputStream(destFile); source = iS.getChannel(); destination = oS.getChannel(); long bytesSoFar = 0; long reportingBytesSoFar = 0; long size = source.size(); int transferred = 0; while (bytesSoFar < size) { // Needed to being able to be interrupted at any moment. if (Thread.interrupted()) { throw new InterruptedException(); } // Casting to int here is safe since we will transfer at most "downloadBufferSize" bytes. // This is done on purpose for being able to implement Throttling. transferred = (int) destination.transferFrom(source, bytesSoFar, downloadBufferSize); bytesSoFar += transferred; reportingBytesSoFar += transferred; throttler.incrementAndThrottle(transferred); if (reportingBytesSoFar >= bytesToReportProgress) { reporter.progress(reportingBytesSoFar); reportingBytesSoFar = 0l; } } if (reporter != null) { reporter.progress(reportingBytesSoFar); } } catch (InterruptedException e) { e.printStackTrace(); } finally { if (iS != null) { iS.close(); } if (oS != null) { oS.close(); } if (source != null) { source.close(); } if (destination != null) { destination.close(); } } }
@Override public void run() { try { Thread.currentThread().setName("ServerEngine"); System.setOut(new Misc.TimestampLogger(System.out)); System.setErr(new Misc.TimestampLogger(System.err, "./data/err.log")); address = new InetSocketAddress(host, port); System.out.println("Starting " + Constants.SERVER_NAME + " on " + address + "..."); // load shutdown hook Thread shutdownhook = new ShutDownHook(); Runtime.getRuntime().addShutdownHook(shutdownhook); PacketManager.loadPackets(); Cache.load(); // load scripts Misc.loadScripts(new File("./data/ruby/")); GlobalVariables.patchNotes = Misc.loadPatchNotes(); GlobalVariables.info = Misc.loadInfo(); GlobalVariables.npcDump = Misc.getNpcDump(); GlobalVariables.itemDump = Misc.getItemDump(); // load all xstream related files. XStreamUtil.loadAllFiles(); // item weights ItemDefinition.loadWeight(); // interfaces RSInterface.load(); // Load plugins PluginManager.loadPlugins(); // Load regions ObjectDef.loadConfig(); Region.load(); Rangable.load(); // Load objects ObjectLoader objectLoader = new ObjectLoader(); objectLoader.load(); GameObjectData.init(); // load combat manager CombatManager.init(); // Load minute timer startMinutesCounter(); // global drops GlobalGroundItem.initialize(); // load npc ls Npc.loadNpcDrops(); // mage arena timers AlchemistPlayground.loadAlchemistPlayGround(); EnchantingChamber.loadEnchantingChamber(); CreatureGraveyard.loadCreatureGraveyard(); // spawning world fishing spots FishingSpots.spawnFishingSpots(); QuestHandler.init(); NpcLoader.loadAutoSpawn("./data/npcs/spawn-config.cfg"); HighscoresManager.load(); // Start up and get a'rollin! startup(); System.out.println("Online!"); while (!Thread.interrupted()) { try { cycle(); sleep(); } catch (Exception ex) { PlayerSave.saveAllPlayers(); ex.printStackTrace(); } } scheduler.schedule( new Task() { @Override protected void execute() { if (Thread.interrupted()) { PlayerSave.saveAllPlayers(); stop(); return; } try { cycle(); } catch (Exception ex) { PlayerSave.saveAllPlayers(); ex.printStackTrace(); stop(); } } }); } catch (Exception ex) { ex.printStackTrace(); } PluginManager.close(); }
public void postBatchToPipeline(List docs) throws Exception { int numDocs = docs.size(); int requestId = requestCounter.incrementAndGet(); ArrayList<String> mutable = null; synchronized (this) { mutable = new ArrayList<String>(sessions.keySet()); } if (mutable.isEmpty()) { // completely hosed ... try to re-establish all sessions synchronized (this) { try { Thread.sleep(2000); } catch (InterruptedException ie) { Thread.interrupted(); } sessions = establishSessions(originalEndpoints, fusionUser, fusionPass, fusionRealm); mutable = new ArrayList<String>(sessions.keySet()); } if (mutable.isEmpty()) throw new IllegalStateException( "No available endpoints! " + "Check log for previous errors as to why there are no more endpoints available. This is a fatal error."); } if (mutable.size() > 1) { Exception lastExc = null; // try all the endpoints until success is reached ... or we run out of endpoints to try ... while (!mutable.isEmpty()) { String endpoint = getLbEndpoint(mutable); if (endpoint == null) { // no more endpoints available ... fail if (lastExc != null) { log.error( "No more endpoints available to retry failed request (" + requestId + ")! raising last seen error: " + lastExc); throw lastExc; } else { throw new RuntimeException( "No Fusion pipeline endpoints available to process request " + requestId + "! Check logs for previous errors."); } } if (log.isDebugEnabled()) log.debug( "POSTing batch of " + numDocs + " input docs to " + endpoint + " as request " + requestId); Exception retryAfterException = postJsonToPipelineWithRetry(endpoint, docs, mutable, lastExc, requestId); if (retryAfterException == null) { lastExc = null; break; // request succeeded ... } lastExc = retryAfterException; // try next endpoint (if available) after seeing an exception } if (lastExc != null) { // request failed and we exhausted the list of endpoints to try ... log.error("Failing request " + requestId + " due to: " + lastExc); throw lastExc; } } else { String endpoint = getLbEndpoint(mutable); if (log.isDebugEnabled()) log.debug( "POSTing batch of " + numDocs + " input docs to " + endpoint + " as request " + requestId); Exception exc = postJsonToPipelineWithRetry(endpoint, docs, mutable, null, requestId); if (exc != null) throw exc; } }