/** * Test concurrent reader and writer (GH-458). * * <p><b>Test case:</b> * * <ol> * <li/>start a long running reader; * <li/>try to start a writer: it should time out; * <li/>stop the reader; * <li/>start the writer again: it should succeed. * </ol> * * @throws Exception error during request execution */ @Test @Ignore("There is no way to stop a query on the server!") public void testReaderWriter() throws Exception { final String readerQuery = "?query=(1%20to%20100000000000000)%5b.=1%5d"; final String writerQuery = "/test.xml"; final byte[] content = Token.token("<a/>"); final Get readerAction = new Get(readerQuery); final Put writerAction = new Put(writerQuery, content); final ExecutorService exec = Executors.newFixedThreadPool(2); // start reader exec.submit(readerAction); Performance.sleep(TIMEOUT); // delay in order to be sure that the reader has started // start writer Future<HTTPResponse> writer = exec.submit(writerAction); try { final HTTPResponse result = writer.get(TIMEOUT, TimeUnit.MILLISECONDS); if (result.status.isSuccess()) fail("Database modified while a reader is running"); throw new Exception(result.toString()); } catch (final TimeoutException e) { // writer is blocked by the reader: stop it writerAction.stop = true; } // stop reader readerAction.stop = true; // start the writer again writer = exec.submit(writerAction); assertEquals(HTTPCode.CREATED, writer.get().status); }
public NonBlockingIdentityHashMap<Long, TestKey> getMapMultithreaded() throws InterruptedException, ExecutionException { final int threadCount = _items.keySet().size(); final NonBlockingIdentityHashMap<Long, TestKey> map = new NonBlockingIdentityHashMap<Long, TestKey>(); // use a barrier to open the gate for all threads at once to avoid rolling start and no actual // concurrency final CyclicBarrier barrier = new CyclicBarrier(threadCount); final ExecutorService ex = Executors.newFixedThreadPool(threadCount); final CompletionService<Integer> co = new ExecutorCompletionService<Integer>(ex); for (Integer type : _items.keySet()) { // A linked-list of things to insert List<TestKey> items = _items.get(type); TestKeyFeederThread feeder = new TestKeyFeederThread(type, items, map, barrier); co.submit(feeder); } // wait for all threads to return int itemCount = 0; for (int retCount = 0; retCount < threadCount; retCount++) { final Future<Integer> result = co.take(); itemCount += result.get(); } ex.shutdown(); return map; }
public Integer call() { count = 0; try { File[] files = directory.listFiles(); ArrayList<Future<Integer>> results = new ArrayList<Future<Integer>>(); for (File file : files) if (file.isDirectory()) { MatchCounter counter = new MatchCounter(file, keyword); FutureTask<Integer> task = new FutureTask<Integer>(counter); results.add(task); Thread t = new Thread(task); t.start(); } else { if (search(file)) count++; } for (Future<Integer> result : results) try { count += result.get(); } catch (ExecutionException e) { e.printStackTrace(); } } catch (InterruptedException e) { } return count; }
@Override public Boolean call() throws Exception { long timeoutMillis = 5000; try { getServersFile(); getZkRunning(); while (true) { while (!restartQueue.isEmpty()) { LOG.debug("Restart queue size [" + restartQueue.size() + "]"); RestartHandler handler = restartQueue.poll(); Future<ScriptContext> runner = pool.submit(handler); ScriptContext scriptContext = runner.get(); // blocking call if (scriptContext.getExitCode() != 0) restartQueue.add(handler); } try { Thread.sleep(timeoutMillis); } catch (InterruptedException e) { } } } catch (Exception e) { e.printStackTrace(); LOG.error(e); pool.shutdown(); throw e; } }
public static void load(Collection<FileDesc> files, Path root, int blocSize, Pattern pattern) throws IOException { root = root.toAbsolutePath().normalize(); Visitor visitor = new Visitor(root, blocSize, pattern); Files.walkFileTree(root, visitor); for (Future<FileDesc> future : visitor.futures()) { try { files.add(future.get()); } catch (Exception e) { log.error("", e); } } }
static String outputOf(final Process p) { try { Future<String> outputFuture = futureOutputOf(p.getInputStream()); Future<String> errorFuture = futureOutputOf(p.getErrorStream()); final String output = outputFuture.get(); final String error = errorFuture.get(); // Check for successful process completion equal(error, ""); equal(p.waitFor(), 0); equal(p.exitValue(), 0); return output; } catch (Throwable t) { unexpected(t); throw new Error(t); } }
/** * Test 2 concurrent readers (GH-458). * * <p><b>Test case:</b> * * <ol> * <li/>start a long running reader; * <li/>start a fast reader: it should succeed. * </ol> * * @throws Exception error during request execution */ @Test public void testMultipleReaders() throws Exception { final String number = "63177"; final String slowQuery = "?query=(1%20to%20100000000000000)%5b.=1%5d"; final String fastQuery = "?query=" + number; final Get slowAction = new Get(slowQuery); final Get fastAction = new Get(fastQuery); final ExecutorService exec = Executors.newFixedThreadPool(2); exec.submit(slowAction); Performance.sleep(TIMEOUT); // delay in order to be sure that the reader has started final Future<HTTPResponse> fast = exec.submit(fastAction); try { final HTTPResponse result = fast.get(TIMEOUT, TimeUnit.MILLISECONDS); assertEquals(HTTPCode.OK, result.status); assertEquals(number, result.data); } finally { slowAction.stop = true; } }
/** * Test concurrent writers (GH-458). * * <p><b>Test case:</b> * * <ol> * <li/>start several writers one after another; * <li/>all writers should succeed. * </ol> * * @throws Exception error during request execution */ @Test public void testMultipleWriters() throws Exception { final int count = 10; final String template = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<command xmlns=\"http://basex.org/rest\"><text><![CDATA[" + "ADD TO %1$d <node id=\"%1$d\"/>" + "]]></text></command>"; @SuppressWarnings("unchecked") final Future<HTTPResponse>[] tasks = new Future[count]; final ExecutorService exec = Executors.newFixedThreadPool(count); // start all writers (not at the same time, but still in parallel) for (int i = 0; i < count; i++) { final String command = String.format(template, i); tasks[i] = exec.submit(new Post("", Token.token(command))); } // check if all have finished successfully for (final Future<HTTPResponse> task : tasks) { assertEquals(HTTPCode.OK, task.get(TIMEOUT, TimeUnit.MILLISECONDS).status); } }
@Override public void handle(HttpExchange httpExchange) throws IOException { // Set common response headers httpExchange.getResponseHeaders().add("Access-Control-Allow-Origin", "*"); Future<String> json = corenlpExecutor.submit( () -> { try { // Get the document Properties props = new Properties() { { setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,depparse"); } }; Annotation doc = getDocument(props, httpExchange); if (!doc.containsKey(CoreAnnotations.SentencesAnnotation.class)) { StanfordCoreNLP pipeline = mkStanfordCoreNLP(props); pipeline.annotate(doc); } // Construct the matcher Map<String, String> params = getURLParams(httpExchange.getRequestURI()); // (get the pattern) if (!params.containsKey("pattern")) { respondError("Missing required parameter 'pattern'", httpExchange); return ""; } String pattern = params.get("pattern"); // (get whether to filter / find) String filterStr = params.getOrDefault("filter", "false"); final boolean filter = filterStr.trim().isEmpty() || "true".equalsIgnoreCase(filterStr.toLowerCase()); // (create the matcher) final SemgrexPattern regex = SemgrexPattern.compile(pattern); // Run TokensRegex return JSONOutputter.JSONWriter.objectToJSON( (docWriter) -> { if (filter) { // Case: just filter sentences docWriter.set( "sentences", doc.get(CoreAnnotations.SentencesAnnotation.class) .stream() .map( sentence -> regex .matcher( sentence.get( SemanticGraphCoreAnnotations .CollapsedCCProcessedDependenciesAnnotation .class)) .matches()) .collect(Collectors.toList())); } else { // Case: find matches docWriter.set( "sentences", doc.get(CoreAnnotations.SentencesAnnotation.class) .stream() .map( sentence -> (Consumer<JSONOutputter.Writer>) (JSONOutputter.Writer sentWriter) -> { SemgrexMatcher matcher = regex.matcher( sentence.get( SemanticGraphCoreAnnotations .CollapsedCCProcessedDependenciesAnnotation .class)); int i = 0; while (matcher.find()) { sentWriter.set( Integer.toString(i), (Consumer<JSONOutputter.Writer>) (JSONOutputter.Writer matchWriter) -> { IndexedWord match = matcher.getMatch(); matchWriter.set("text", match.word()); matchWriter.set( "begin", match.index() - 1); matchWriter.set("end", match.index()); for (String capture : matcher.getNodeNames()) { matchWriter.set( "$" + capture, (Consumer<JSONOutputter.Writer>) groupWriter -> { IndexedWord node = matcher.getNode( capture); groupWriter.set( "text", node.word()); groupWriter.set( "begin", node.index() - 1); groupWriter.set( "end", node.index()); }); } }); i += 1; } sentWriter.set("length", i); })); } }); } catch (Exception e) { e.printStackTrace(); try { respondError(e.getClass().getName() + ": " + e.getMessage(), httpExchange); } catch (IOException ignored) { } } return ""; }); // Send response byte[] response = new byte[0]; try { response = json.get(5, TimeUnit.SECONDS).getBytes(); } catch (InterruptedException | ExecutionException | TimeoutException e) { respondError("Timeout when executing Semgrex query", httpExchange); } if (response.length > 0) { httpExchange.getResponseHeaders().add("Content-Type", "text/json"); httpExchange.getResponseHeaders().add("Content-Length", Integer.toString(response.length)); httpExchange.sendResponseHeaders(HTTP_OK, response.length); httpExchange.getResponseBody().write(response); httpExchange.close(); } }
@Override public void handle(HttpExchange httpExchange) throws IOException { // Set common response headers httpExchange.getResponseHeaders().add("Access-Control-Allow-Origin", "*"); // Get sentence. Properties props; Annotation ann; StanfordCoreNLP.OutputFormat of; log("[" + httpExchange.getRemoteAddress() + "] Received message"); try { props = getProperties(httpExchange); ann = getDocument(props, httpExchange); of = StanfordCoreNLP.OutputFormat.valueOf( props.getProperty("outputFormat", "json").toUpperCase()); // Handle direct browser connections (i.e., not a POST request). if (ann.get(CoreAnnotations.TextAnnotation.class).length() == 0) { log("[" + httpExchange.getRemoteAddress() + "] Interactive connection"); staticPageHandle.handle(httpExchange); return; } log("[" + httpExchange.getRemoteAddress() + "] API call"); } catch (Exception e) { // Return error message. e.printStackTrace(); String response = e.getMessage(); httpExchange.getResponseHeaders().add("Content-Type", "text/plain"); httpExchange.sendResponseHeaders(HTTP_BAD_INPUT, response.length()); httpExchange.getResponseBody().write(response.getBytes()); httpExchange.close(); return; } try { // Annotate StanfordCoreNLP pipeline = mkStanfordCoreNLP(props); Future<Annotation> completedAnnotationFuture = corenlpExecutor.submit( () -> { pipeline.annotate(ann); return ann; }); Annotation completedAnnotation = completedAnnotationFuture.get(5, TimeUnit.SECONDS); // Get output ByteArrayOutputStream os = new ByteArrayOutputStream(); StanfordCoreNLP.createOutputter(props, AnnotationOutputter.getOptions(pipeline)) .accept(completedAnnotation, os); os.close(); byte[] response = os.toByteArray(); httpExchange.getResponseHeaders().add("Content-Type", getContentType(props, of)); httpExchange.getResponseHeaders().add("Content-Length", Integer.toString(response.length)); httpExchange.sendResponseHeaders(HTTP_OK, response.length); httpExchange.getResponseBody().write(response); httpExchange.close(); } catch (TimeoutException e) { respondError("CoreNLP request timed out", httpExchange); } catch (Exception e) { // Return error message. respondError(e.getClass().getName() + ": " + e.getMessage(), httpExchange); } }