public Collection<String> getArguments() { Collection<String> args = new ArrayList<>(); Optional.ofNullable(username).ifPresent(s -> args.addAll(Arrays.asList("-u", s))); Optional.ofNullable(password).ifPresent(s -> args.addAll(Arrays.asList("-p", s))); Optional.ofNullable(from) .ifPresent(d -> args.addAll(Arrays.asList("-f", d.format(DateTimeFormatter.ISO_DATE)))); return args; }
/** * Core sentiment detection method. doNota = none of the above * * @param captions (null/none = all) */ public Map<String, Collection<Document>> getEmotions( Indexer indexer, Collection<Document> docs, boolean doNota, boolean originalContentOnly, String... captions) { Collection<Lexicon1Lang> lexicons = getRelevantLexicon1Langs(docs); Map<String, Collection<Document>> result = new LinkedHashMap<>(); Set<Document> docs_set = Util.castOrCloneAsSet(docs); // aggregate results for each lang into result for (Lexicon1Lang lex : lexicons) { Map<String, Collection<Document>> resultsForThisLang = (doNota ? lex.getEmotionsWithNOTA(indexer, docs_set, originalContentOnly) : lex.getEmotions(indexer, docs_set, originalContentOnly, captions)); if (resultsForThisLang == null) continue; for (String caption : resultsForThisLang.keySet()) { Collection<Document> resultDocsThisLang = resultsForThisLang.get(caption); Collection<Document> resultDocs = result.get(caption); // if caption doesn't exist already, create a new entry, or else add to the existing set of // docs that match this caption if (resultDocs == null) result.put(caption, resultDocsThisLang); else resultDocs.addAll(resultDocsThisLang); } } // TODO: the result can be cached at server to avoid redundant computation (by concurrent users, // which are few for now) return result; }
/** * JUnit. * * @throws Exception If failed. */ public void testGetAndIncrement() throws Exception { Collection<Long> res = new HashSet<>(); String seqName = UUID.randomUUID().toString(); for (int i = 0; i < GRID_CNT; i++) { Set<Long> retVal = compute(grid(i).cluster().forLocal()).call(new GetAndIncrementJob(seqName, RETRIES)); for (Long l : retVal) assert !res.contains(l) : "Value already was used " + l; res.addAll(retVal); } assert res.size() == GRID_CNT * RETRIES; int gapSize = 0; for (long i = 0; i < GRID_CNT * RETRIES; i++) { if (!res.contains(i)) gapSize++; else gapSize = 0; assert gapSize <= BATCH_SIZE + 1 : "Gap above id " + i + " is " + gapSize + " more than batch size: " + (BATCH_SIZE + 1); } }
// look up and apply coarts for given rels to each sign in result private void applyCoarts(List<String> coartRels, Collection<Sign> result) { List<Sign> inputSigns = new ArrayList<Sign>(result); result.clear(); List<Sign> outputSigns = new ArrayList<Sign>(inputSigns.size()); // for each rel, lookup coarts and apply to input signs, storing results in output signs for (Iterator<String> it = coartRels.iterator(); it.hasNext(); ) { String rel = it.next(); Collection<String> preds = (Collection<String>) _coartRelsToPreds.get(rel); if (preds == null) continue; // not expected Collection<Sign> coartResult = getSignsFromRelAndPreds(rel, preds); if (coartResult == null) continue; for (Iterator<Sign> it2 = coartResult.iterator(); it2.hasNext(); ) { Sign coartSign = it2.next(); // apply to each input for (int j = 0; j < inputSigns.size(); j++) { Sign sign = inputSigns.get(j); grammar.rules.applyCoart(sign, coartSign, outputSigns); } } // switch output to input for next iteration inputSigns.clear(); inputSigns.addAll(outputSigns); outputSigns.clear(); } // add results back result.addAll(inputSigns); }
private static void copyCollection( Object target, String[] effectiveProperties, String[] ignoreProperties, Boolean nullBeCopied, PropertyDescriptor targetPd, Object sourceValue, Method writeMethod) throws IllegalAccessException, InvocationTargetException, UtilException, ClassNotFoundException, InstantiationException, NoSuchFieldException { Method targetReadMethod = targetPd.getReadMethod(); Collection targetValue = (Collection) targetReadMethod.invoke(target, null); List tempList = new ArrayList(); if (sourceValue == null) { writeMethod.invoke(target, sourceValue); return; } if (targetValue == null) { if (Set.class.isAssignableFrom(targetPd.getPropertyType())) { targetValue = new HashSet(); } else if (List.class.isAssignableFrom(targetPd.getPropertyType())) { targetValue = new ArrayList(); } else { return; } } Object[] sourceArray = ((Collection) sourceValue).toArray(); Object[] targetArray = targetValue.toArray(); for (int i = 0; i < sourceArray.length; i++) { if (targetValue.contains(sourceArray[i])) { for (int j = 0; j < targetArray.length; j++) { if (sourceArray[i].equals(targetArray[j])) { copyProperties( sourceArray[i], targetArray[j], effectiveProperties, ignoreProperties, nullBeCopied); tempList.add(targetArray[j]); break; } } } else { Object tempTarget = Class.forName(sourceArray[i].getClass().getName()).newInstance(); // 基本类型直接赋值 if (sourceArray[i].getClass().isPrimitive() || sourceArray[i] instanceof String) { tempTarget = sourceArray[i]; } else { copyProperties( sourceArray[i], tempTarget, effectiveProperties, ignoreProperties, nullBeCopied); } tempList.add(tempTarget); } } targetValue.clear(); targetValue.addAll(tempList); return; }
/** {@inheritDoc} */ @Override public Collection<UUID> nodeIds() { Collection<UUID> ids = new GridLeanSet<UUID>(); ids.add(cctx.nodeId()); ids.addAll(mappings.keySet()); return ids; }
private static Collection<Node> getNodeMatching(Node body, String regexp) { final Collection<Node> nodes = new ArrayList<>(); if (body.getNodeName().matches(regexp)) nodes.add(body); if (body.getChildNodes().getLength() == 0) return nodes; NodeList returnList = body.getChildNodes(); for (int k = 0; k < returnList.getLength(); k++) { final Node node = returnList.item(k); nodes.addAll(getNodeMatching(node, regexp)); } return nodes; }
private static Collection<PerformanceProbe> list(long period) { if (period == ALL_RECORDERS) { Collection<PerformanceProbe> result = new Vector<PerformanceProbe>(); for (Collection<PerformanceProbe> probes : _perfProbes.values()) { result.addAll(probes); } return result; } Collection<PerformanceProbe> result = _perfProbes.get(period); if (result == null) { return Collections.emptySet(); } return result; }
private static Collection<String> parseSoapResponseForUrls(byte[] data) throws SOAPException, IOException { // System.out.println(new String(data)); final Collection<String> urls = new ArrayList<>(); MessageFactory factory = MessageFactory.newInstance(WS_DISCOVERY_SOAP_VERSION); final MimeHeaders headers = new MimeHeaders(); headers.addHeader("Content-type", WS_DISCOVERY_CONTENT_TYPE); SOAPMessage message = factory.createMessage(headers, new ByteArrayInputStream(data)); SOAPBody body = message.getSOAPBody(); for (Node node : getNodeMatching(body, ".*:XAddrs")) { if (node.getTextContent().length() > 0) { urls.addAll(Arrays.asList(node.getTextContent().split(" "))); } } return urls; }
/** * Determines all files and folders that belong to a given project and adds them to the supplied * Collection. * * @param filteredFiles destination collection of Files * @param project project to examine */ public static void addProjectFiles( Collection filteredFiles, Collection rootFiles, Collection rootFilesExclusions, Project project) { FileStatusCache cache = CvsVersioningSystem.getInstance().getStatusCache(); Sources sources = ProjectUtils.getSources(project); SourceGroup[] sourceGroups = sources.getSourceGroups(Sources.TYPE_GENERIC); for (int j = 0; j < sourceGroups.length; j++) { SourceGroup sourceGroup = sourceGroups[j]; FileObject srcRootFo = sourceGroup.getRootFolder(); File rootFile = FileUtil.toFile(srcRootFo); try { getCVSRootFor(rootFile); } catch (IOException e) { // the folder is not under a versioned root continue; } rootFiles.add(rootFile); boolean containsSubprojects = false; FileObject[] rootChildren = srcRootFo.getChildren(); Set projectFiles = new HashSet(rootChildren.length); for (int i = 0; i < rootChildren.length; i++) { FileObject rootChildFo = rootChildren[i]; if (CvsVersioningSystem.FILENAME_CVS.equals(rootChildFo.getNameExt())) continue; File child = FileUtil.toFile(rootChildFo); // #67900 Added special treatment for .cvsignore files if (sourceGroup.contains(rootChildFo) || CvsVersioningSystem.FILENAME_CVSIGNORE.equals(rootChildFo.getNameExt())) { // TODO: #60516 deep scan is required here but not performed due to performace reasons projectFiles.add(child); } else { int status = cache.getStatus(child).getStatus(); if (status != FileInformation.STATUS_NOTVERSIONED_EXCLUDED) { rootFilesExclusions.add(child); containsSubprojects = true; } } } if (containsSubprojects) { filteredFiles.addAll(projectFiles); } else { filteredFiles.add(rootFile); } } }
protected void generateNFAs(Grammar g) { DOTGenerator dotGenerator = new DOTGenerator(g); Collection rules = g.getAllImportedRules(); rules.addAll(g.getRules()); for (Iterator itr = rules.iterator(); itr.hasNext(); ) { Rule r = (Rule) itr.next(); try { String dot = dotGenerator.getDOT(r.startState); if (dot != null) { writeDOTFile(g, r, dot); } } catch (IOException ioe) { ErrorManager.error(ErrorManager.MSG_CANNOT_WRITE_FILE, ioe); } } }
/** * Decode file charset. * * @param f File to process. * @return File charset. * @throws IOException in case of error. */ public static Charset decode(File f) throws IOException { SortedMap<String, Charset> charsets = Charset.availableCharsets(); String[] firstCharsets = { Charset.defaultCharset().name(), "US-ASCII", "UTF-8", "UTF-16BE", "UTF-16LE" }; Collection<Charset> orderedCharsets = U.newLinkedHashSet(charsets.size()); for (String c : firstCharsets) if (charsets.containsKey(c)) orderedCharsets.add(charsets.get(c)); orderedCharsets.addAll(charsets.values()); try (RandomAccessFile raf = new RandomAccessFile(f, "r")) { FileChannel ch = raf.getChannel(); ByteBuffer buf = ByteBuffer.allocate(4096); ch.read(buf); buf.flip(); for (Charset charset : orderedCharsets) { CharsetDecoder decoder = charset.newDecoder(); decoder.reset(); try { decoder.decode(buf); return charset; } catch (CharacterCodingException ignored) { } } } return Charset.defaultCharset(); }
private void analyzeTestDependencies(List<ISuite> suites) { for (ISuite suite : suites) { log("isuite: " + suite.getName()); // FIXME why implication that noColor = all_tests_were skipped? // in case of noColor all tests were probably skipped if (!noColor) { // FIXME will not work for more than one suite for (Map.Entry<String, ISuiteResult> entry : suite.getResults().entrySet()) { failedConfigurations = entry.getValue().getTestContext().getFailedConfigurations().getAllMethods(); failedMethods = entry.getValue().getTestContext().getFailedTests().getAllMethods(); skippedMethods = entry.getValue().getTestContext().getSkippedTests().getAllMethods(); } } else { // TODO what happens with configs and noColor - does it work? failedConfigurations = new ArrayList<ITestNGMethod>(); failedMethods = new ArrayList<ITestNGMethod>(); skippedMethods = new ArrayList<ITestNGMethod>(); } for (ITestNGMethod method : failedMethods) { log("failed method: " + getClassAndMethodString(method)); for (String depMethod : method.getMethodsDependedUpon()) { log(" dep on: " + depMethod); } } for (ITestNGMethod method : failedConfigurations) { log("failed config: " + getClassAndMethodString(method)); for (String depMethod : method.getMethodsDependedUpon()) { log(" dep on: " + depMethod); } } for (ITestNGMethod method : skippedMethods) { log("skipped method: " + getClassAndMethodString(method)); for (String depMethod : method.getMethodsDependedUpon()) { log(" dep on: " + depMethod); } for (String depGroup : method.getGroupsDependedUpon()) { log(" dep on: " + depGroup); } } // the only way to learn about which groups failed is by checking groups that failed methods // belong to for (ITestNGMethod method : failedMethods) { failedGroups.addAll(Arrays.asList(method.getGroups())); } // FIXME to be removed? there is no such thing as skipped group for (ITestNGMethod method : skippedMethods) { skippedGroups.addAll(Arrays.asList(method.getGroups())); } skippedGroups.removeAll(failedGroups); for (Map.Entry<String, Collection<ITestNGMethod>> entry : suite.getMethodsByGroups().entrySet()) { log("entry: " + entry.getKey()); for (ITestNGMethod method : entry.getValue()) { TestMethod tempMet = new TestMethod(method.getMethodName()); methods.add(tempMet); // log("method: " + method.getMethodName() + " added to methods"); // log(method.getMethodName() + " groups: " + Arrays.deepToString(method.getGroups())); // log(method.getMethodName() + " dep groups: " + // Arrays.deepToString(method.getGroupsDependedUpon())); for (String dependedUponMethod : method.getMethodsDependedUpon()) { tempMet.addMethod( dependedUponMethod.substring(dependedUponMethod.lastIndexOf(".") + 1)); // log("method: " + method.getMethodName() + " deps on " + // dependedUponMethod.substring(dependedUponMethod.lastIndexOf(".") + 1) + " added to // methods"); // log(method.getMethodName() + " depends upon " + // dependedUponMethod.substring(dependedUponMethod.lastIndexOf(".")+1)); // log("dep upon method: " + dependedUponMethod); // String[] tokens = dependedUponMethod.split("\\."); // log(Arrays.deepToString(tokens)); // log(method.getMethodName() + DEPENDS_UPON + tokens[tokens.length-2] + "." + // tokens[tokens.length-1]); } for (String dependedUponGroup : method.getGroupsDependedUpon()) { tempMet.addGroup(dependedUponGroup); // log(method.getMethodName() + DEPENDS_UPON + dependedUponGroup); // log("dep upon group: " + dependedUponGroup); } } } } for (ITestNGMethod method : failedMethods) { log("failed method: " + getClassAndMethodString(method)); for (String depMethod : method.getMethodsDependedUpon()) { log(" dep on: " + depMethod); } } for (ITestNGMethod method : failedConfigurations) { log("failed config: " + getClassAndMethodString(method)); for (String depMethod : method.getMethodsDependedUpon()) { log(" dep on: " + depMethod); } } for (ITestNGMethod method : skippedMethods) { log("skipped method: " + getClassAndMethodString(method)); for (String depMethod : method.getMethodsDependedUpon()) { log(" dep on: " + depMethod); } for (String depGroup : method.getGroupsDependedUpon()) { log(" dep on: " + depGroup); } } for (TestMethod method : methods) { // log("method: " + method.getName()); uniqueGroups.addAll(method.getGroupsDepUpon()); } }
public void _jspService(HttpServletRequest request, HttpServletResponse response) throws java.io.IOException, ServletException { PageContext pageContext = null; HttpSession session = null; ServletContext application = null; ServletConfig config = null; JspWriter out = null; Object page = this; JspWriter _jspx_out = null; PageContext _jspx_page_context = null; try { response.setContentType("text/html; charset=UTF-8"); pageContext = _jspxFactory.getPageContext(this, request, response, null, true, 8192, true); _jspx_page_context = pageContext; application = pageContext.getServletContext(); config = pageContext.getServletConfig(); session = pageContext.getSession(); out = pageContext.getOut(); _jspx_out = out; _jspx_resourceInjector = (org.apache.jasper.runtime.ResourceInjector) application.getAttribute("com.sun.appserv.jsp.resource.injector"); out.write('\n'); JobTracker tracker = (JobTracker) application.getAttribute("job.tracker"); ClusterStatus status = tracker.getClusterStatus(); String trackerName = StringUtils.simpleHostname(tracker.getJobTrackerMachine()); out.write("\n<html>\n<head>\n<title>"); out.print(trackerName); out.write( " Hadoop Locality Statistics</title>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/static/hadoop.css\">\n</head>\n<body>\n<h1>"); out.print(trackerName); out.write(" Hadoop Locality Statistics</h1>\n\n<b>State:</b> "); out.print(status.getJobTrackerState()); out.write("<br>\n<b>Started:</b> "); out.print(new Date(tracker.getStartTime())); out.write("<br>\n<b>Version:</b> "); out.print(VersionInfo.getVersion()); out.write(",\n r"); out.print(VersionInfo.getRevision()); out.write("<br>\n<b>Compiled:</b> "); out.print(VersionInfo.getDate()); out.write(" by\n "); out.print(VersionInfo.getUser()); out.write("<br>\n<b>Identifier:</b> "); out.print(tracker.getTrackerIdentifier()); out.write("<br>\n\n<hr>\n\n"); Collection<JobInProgress> jobs = new ArrayList<JobInProgress>(); jobs.addAll(tracker.completedJobs()); jobs.addAll(tracker.runningJobs()); jobs.addAll(tracker.failedJobs()); int dataLocalMaps = 0; int rackLocalMaps = 0; int totalMaps = 0; int totalReduces = 0; for (JobInProgress job : jobs) { Counters counters = job.getCounters(); dataLocalMaps += counters.getCounter(JobInProgress.Counter.DATA_LOCAL_MAPS); rackLocalMaps += counters.getCounter(JobInProgress.Counter.RACK_LOCAL_MAPS); totalMaps += counters.getCounter(JobInProgress.Counter.TOTAL_LAUNCHED_MAPS); totalReduces += counters.getCounter(JobInProgress.Counter.TOTAL_LAUNCHED_REDUCES); } int dataLocalMapPct = totalMaps == 0 ? 0 : (100 * dataLocalMaps) / totalMaps; int rackLocalMapPct = totalMaps == 0 ? 0 : (100 * rackLocalMaps) / totalMaps; int dataRackLocalMapPct = totalMaps == 0 ? 0 : (100 * (dataLocalMaps + rackLocalMaps)) / totalMaps; out.write("\n<p>\n<b>Data Local Maps:</b> "); out.print(dataLocalMaps); out.write(' '); out.write('('); out.print(dataLocalMapPct); out.write("%) <br>\n<b>Rack Local Maps:</b> "); out.print(rackLocalMaps); out.write(' '); out.write('('); out.print(rackLocalMapPct); out.write("%) <br>\n<b>Data or Rack Local:</b> "); out.print(dataLocalMaps + rackLocalMaps); out.write(' '); out.write('('); out.print(dataRackLocalMapPct); out.write("%) <br>\n<b>Total Maps:</b> "); out.print(totalMaps); out.write(" <br>\n<b>Total Reduces:</b> "); out.print(totalReduces); out.write(" <br>\n</p>\n\n"); out.println(ServletUtil.htmlFooter()); out.write('\n'); } catch (Throwable t) { if (!(t instanceof SkipPageException)) { out = _jspx_out; if (out != null && out.getBufferSize() != 0) out.clearBuffer(); if (_jspx_page_context != null) _jspx_page_context.handlePageException(t); } } finally { _jspxFactory.releasePageContext(_jspx_page_context); } }
public Collection<FieldEdge> getFieldEdges() { Collection<FieldEdge> cf = new ArrayList<FieldEdge>(); for (Collection<FieldEdge> c : fedges.values()) cf.addAll(c); return cf; }
public Collection<CallEdge> getCallEdges() { Collection<CallEdge> cf = new ArrayList<CallEdge>(); for (Collection<CallEdge> c : cedges.values()) cf.addAll(c); return cf; }
BuildStatus iterativeCompile( final ModuleChunk chunk, final Set<String> sources, final Set<String> outdated, final Set<String> removed, final Flags flags) { final Collection<String> filesToCompile = DefaultGroovyMethods.intersect(affectedFiles, sources); if (outdated != null) { for (String s : outdated) { assert (s != null); } filesToCompile.addAll(outdated); } filesToCompile.removeAll(compiledFiles); if (!filesToCompile.isEmpty() || removed != null) { final Set<String> outputFiles = new HashSet<String>(); for (String f : filesToCompile) { final Set<ClassRepr> classes = dependencyMapping.getClasses(f); if (classes != null) { for (ClassRepr cr : classes) { outputFiles.add(cr.getFileName()); } } } if (removed != null) { for (String f : removed) { final Set<ClassRepr> classes = dependencyMapping.getClasses(f); if (classes != null) { for (ClassRepr cr : classes) { outputFiles.add(cr.getFileName()); } } } } if (!outputFiles.isEmpty()) { new Logger(flags) { @Override public void log(PrintStream stream) { stream.println("Cleaning output files:"); logFilePaths(stream, outputFiles); stream.println("End of files"); } }.log(); builder.clearChunk(chunk, outputFiles, ProjectWrapper.this); } final Mappings delta = dependencyMapping.createDelta(); final Callbacks.Backend deltaBackend = delta.getCallback(); new Logger(flags) { @Override public void log(PrintStream stream) { stream.println("Compiling files:"); logFilePaths(stream, filesToCompile); stream.println("End of files"); } }.log(); boolean buildException = false; try { builder.buildChunk( chunk, flags.tests(), filesToCompile, deltaBackend, ProjectWrapper.this); } catch (Exception e) { e.printStackTrace(); buildException = true; } if (!buildException) { compiledFiles.addAll(filesToCompile); affectedFiles.removeAll(filesToCompile); final Collection<File> files = new HashSet<File>(); final Collection<File> compiled = new HashSet<File>(); for (String f : filesToCompile) { files.add(new File(f)); } for (String f : compiledFiles) { compiled.add(new File(f)); } final Collection<File> affected = new HashSet<File>(); final boolean incremental = dependencyMapping.differentiate(delta, removed, files, compiled, affected); for (File a : affected) { affectedFiles.add(FileUtil.toSystemIndependentName(a.getAbsolutePath())); } dependencyMapping.integrate(delta, files, removed); if (!incremental) { affectedFiles.addAll(sources); affectedFiles.removeAll(compiledFiles); final BuildStatus result = iterativeCompile(chunk, sources, null, null, flags); if (result == BuildStatus.FAILURE) { return result; } return BuildStatus.CONSERVATIVE; } return iterativeCompile(chunk, sources, null, null, flags); } else { return BuildStatus.FAILURE; } } else { for (Module m : chunk.getElements()) { Reporter.reportBuildSuccess(m, flags.tests()); } } return BuildStatus.INCREMENTAL; }