public String buildInterfaceTable(String rule, String[] serviceList) throws FilterParseException { StringBuffer buffer = new StringBuffer(); Filter filter = new Filter(); Map interfaces = filter.getIPServiceMap(rule); Iterator i = interfaces.keySet().iterator(); while (i.hasNext()) { String key = (String) i.next(); buffer.append("<tr><td valign=\"top\">").append(key).append("</td>"); buffer.append("<td>"); if (serviceList != null && serviceList.length != 0) { Map services = (Map) interfaces.get(key); Iterator j = services.keySet().iterator(); while (j.hasNext()) { String svc = (String) j.next(); for (int idx = 0; idx < serviceList.length; idx++) { if (svc.equals(serviceList[idx])) { buffer.append(svc).append("<br>"); } } } } else { buffer.append("All services"); } buffer.append("</td>"); buffer.append("</tr>"); } return buffer.toString(); }
// --------------------------------------------------------------------------- private void printDependencies() throws TablesawException { m_printedDependencies = new HashSet<String>(); try { PrintWriter pw = new PrintWriter(new FileWriter("dependency.txt")); pw.println("Targets marked with a * have already been printed"); // Create a reduced set of stuff to print Set<String> ruleNames = new HashSet<String>(); for (String name : m_nameRuleMap.keySet()) ruleNames.add(name); for (String name : m_nameRuleMap.keySet()) { Rule rule = m_nameRuleMap.get(name); for (String dep : rule.getDependNames()) ruleNames.remove(dep); for (Rule dep : rule.getDependRules()) { if (dep.getName() != null) ruleNames.remove(dep.getName()); } } for (String name : ruleNames) { if (!name.startsWith(NAMED_RULE_PREFIX)) printDependencies(name, pw, 0); } pw.close(); } catch (IOException ioe) { throw new TablesawException("Cannot write to file dependency.txt", -1); } }
public void testHeadNotPopular() throws Exception { VersionCounts versionCounts = VersionCounts.make(); VoteBlock vb1 = makeVoteBlock("http://test.com/foo1"); byte[] hash1 = addVersion(vb1, "content 1 for foo1"); byte[] hash2 = addVersion(vb1, "content 2 for foo1"); VoteBlock vb2 = makeVoteBlock("http://test.com/foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 2 for foo1"); VoteBlock vb3 = makeVoteBlock("http://test.com/foo1"); addVersion(vb3, "content 3 for foo1"); addVersion(vb3, "content 2 for foo1"); versionCounts.vote(vb1, participant1); versionCounts.vote(vb2, participant2); versionCounts.vote(vb3, participant3); Map<ParticipantUserData, HashResult> repairCandidates; repairCandidates = versionCounts.getRepairCandidates(0); assertSameElements( SetUtil.set(participant1, participant2, participant3), repairCandidates.keySet()); repairCandidates = versionCounts.getRepairCandidates(1); assertSameElements( SetUtil.set(participant1, participant2, participant3), repairCandidates.keySet()); repairCandidates = versionCounts.getRepairCandidates(2); assertSameElements(SetUtil.set(participant1, participant2), repairCandidates.keySet()); repairCandidates = versionCounts.getRepairCandidates(3); assertEmpty(repairCandidates.keySet()); }
public String findDups(String MyString) { if (MyString == null) { return ""; } String dups = ""; char[] a; Map<Character, Boolean> result = new HashMap<Character, Boolean>(); Map<Character, Boolean> b = new HashMap<Character, Boolean>(); a = MyString.toCharArray(); for (char elm : a) { if (b.containsKey(elm)) { if (result.containsKey(elm) != true) { result.put(elm, true); } } b.put(elm, true); } for (Character c : result.keySet()) { dups = dups + c; } return dups; }
public void testMultipleIdenticalVersions() throws Exception { VersionCounts versionCounts = VersionCounts.make(); VoteBlock vb1 = makeVoteBlock("http://test.com/foo1"); byte[] hash1 = addVersion(vb1, "content 1 for foo1"); byte[] hash2 = addVersion(vb1, "content 2 for foo1"); VoteBlock vb2 = makeVoteBlock("http://test.com/foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 1 for foo1"); addVersion(vb2, "content 2 for foo1"); VoteBlock vb3 = makeVoteBlock("http://test.com/foo1"); addVersion(vb3, "content 1 for foo1"); addVersion(vb3, "content 2 for foo1"); addVersion(vb3, "content 2 for foo1"); addVersion(vb3, "content 2 for foo1"); addVersion(vb3, "content 2 for foo1"); versionCounts.vote(vb1, participant1); versionCounts.vote(vb2, participant2); versionCounts.vote(vb3, participant3); Map<ParticipantUserData, HashResult> repairCandidates; repairCandidates = versionCounts.getRepairCandidates(2); assertSameElements( SetUtil.set(participant1, participant2, participant3), repairCandidates.keySet()); // With only three candidates, no version should reach a threshold // of 4, unless counting multiples is wrong. repairCandidates = versionCounts.getRepairCandidates(4); assertEmpty(repairCandidates.keySet()); }
@Override public Response serve(IHTTPSession session) { Map<String, String> header = session.getHeaders(); Map<String, String> parms = session.getParms(); String uri = session.getUri(); if (logRequests) { Log.i(TAG, session.getMethod() + " '" + uri + "' "); Iterator<String> e = header.keySet().iterator(); while (e.hasNext()) { String value = e.next(); Log.i(TAG, " HDR: '" + value + "' = '" + header.get(value) + "'"); } e = parms.keySet().iterator(); while (e.hasNext()) { String value = e.next(); Log.i(TAG, " PRM: '" + value + "' = '" + parms.get(value) + "'"); } } if (!webRoot.isDirectory()) { return createResponse( Response.Status.INTERNAL_ERROR, NanoHTTPD.MIME_PLAINTEXT, "INTERNAL ERRROR: given path is not a directory (" + webRoot + ")."); } return respond(Collections.unmodifiableMap(header), uri); }
public ExitCode build( final CompileContext context, final ModuleChunk chunk, DirtyFilesHolder<JavaSourceRootDescriptor, ModuleBuildTarget> dirtyFilesHolder, OutputConsumer outputConsumer) throws ProjectBuildException { if (!IS_ENABLED.get(context, Boolean.TRUE)) { return ExitCode.NOTHING_DONE; } try { final Map<File, ModuleBuildTarget> filesToCompile = new THashMap<File, ModuleBuildTarget>(FileUtil.FILE_HASHING_STRATEGY); dirtyFilesHolder.processDirtyFiles( new FileProcessor<JavaSourceRootDescriptor, ModuleBuildTarget>() { public boolean apply( ModuleBuildTarget target, File file, JavaSourceRootDescriptor descriptor) throws IOException { if (JAVA_SOURCES_FILTER.accept(file)) { filesToCompile.put(file, target); } return true; } }); if (context.isMake()) { final ProjectBuilderLogger logger = context.getLoggingManager().getProjectBuilderLogger(); if (logger.isEnabled()) { if (filesToCompile.size() > 0) { logger.logCompiledFiles(filesToCompile.keySet(), BUILDER_NAME, "Compiling files:"); } } } return compile(context, chunk, dirtyFilesHolder, filesToCompile.keySet(), outputConsumer); } catch (ProjectBuildException e) { throw e; } catch (Exception e) { String message = e.getMessage(); if (message == null) { final ByteArrayOutputStream out = new ByteArrayOutputStream(); final PrintStream stream = new PrintStream(out); try { e.printStackTrace(stream); } finally { stream.close(); } message = "Internal error: \n" + out.toString(); } context.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, message)); throw new ProjectBuildException(message, e); } }
public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Raw lexicon:\n\n"); for (String s : captionToRawQuery.keySet()) sb.append(s + ":" + captionToRawQuery.get(s) + "\n"); sb.append( "\n-----------------------------------------------------------------------------\n"); sb.append("Expanded lexicon:\n\n"); for (String s : captionToExpandedQuery.keySet()) sb.append(s + ":" + captionToExpandedQuery.get(s) + "\n"); return sb.toString(); }
protected Properties convertProperties(Map<String, Object> m) { Properties res = null; { if (m != null) { res = new Properties(); Set<String> keys = m.keySet(); for (String key : keys) { String value = null; // Set 'value': { Object o = m.get(key); if (o != null) { value = o.toString(); } } res.setProperty(key, value); } } } return res; }
/** * given a list of sequences, creates a db for use with blat * * @param seqList is the list of sequences to create the database with * @return the full path of the location of the database */ private String dumpToFile(Map<String, String> seqList) { File tmpdir; BufferedWriter out; File seqFile = null; /* open temp file */ try { seqFile = new File(tmpDirFile, "reads.fa"); out = new BufferedWriter(new FileWriter(seqFile.getPath())); /* write out the sequences to file */ for (String key : seqList.keySet()) { assert (seqList.get(key) != null); out.write(">" + key + "\n"); out.write(seqList.get(key) + "\n"); } /* close temp file */ out.close(); } catch (Exception e) { log.error(e); return null; } return seqFile.getPath(); }
// --------------------------------------------------------------------------- private void writeCache() { try { Debug.print("Writing cache"); FileOutputStream fos = new FileOutputStream(m_cacheFile); ObjectOutputStream oos = new ObjectOutputStream(fos); oos.writeObject(m_depCache); oos.writeInt(m_newModificationCache.size()); Iterator<String> it = m_newModificationCache.keySet().iterator(); while (it.hasNext()) { String key = it.next(); oos.writeUTF(key); oos.writeLong(m_newModificationCache.get(key)); } oos.close(); } catch (Exception e) { Debug.print(e.getMessage()); StringWriter sw = new StringWriter(); e.printStackTrace(new PrintWriter(sw)); Debug.print(sw.toString()); } }
/** @param aProperties the updated properties. */ @SuppressWarnings("rawtypes") final void setProperties(final Dictionary aProperties) { final Map<String, String> newProps = new HashMap<String, String>(); Enumeration keys = aProperties.keys(); while (keys.hasMoreElements()) { final String key = (String) keys.nextElement(); if (!KNOWN_KEYS.contains(key) && !IGNORED_KEYS.contains(key)) { LOG.log(Level.WARNING, "Unknown/unsupported profile key: " + key); continue; } final String value = aProperties.get(key).toString(); newProps.put(key, value.trim()); } // Verify whether all known keys are defined... final List<String> checkedKeys = new ArrayList<String>(KNOWN_KEYS); checkedKeys.removeAll(newProps.keySet()); if (!checkedKeys.isEmpty()) { throw new IllegalArgumentException( "Profile settings not complete! Missing keys are: " + checkedKeys.toString()); } this.properties.putAll(newProps); LOG.log( Level.INFO, "New device profile settings applied for {1} ({0}) ...", // new Object[] {getType(), getDescription()}); }
public static boolean mapsAreIdentical(Map map1, Map map2) { if (map1 == null && map2 == null) { return (true); } else if (map1 == null || map2 == null) { return (false); } if (map1.size() != map2.size()) { return (false); } Iterator it = map1.keySet().iterator(); while (it.hasNext()) { Object key = it.next(); Object v1 = map1.get(key); Object v2 = map2.get(key); if (!objectsAreIdentical(v1, v2)) { return (false); } } return (true); }
public static boolean HTTPRequestToFile( File file, String inUrl, String method, String data, List headers) { boolean success = false; try { if (file.exists()) file.delete(); } catch (Exception e) { Logger.getLogger(com.bombdiggity.util.HTTPUtils.class) .error( (new StringBuilder("HTTPUtils.HTTPRequestToFile delete (")) .append(file) .append("): ") .append(e.toString()) .toString()); } try { DataOutputStream printout = null; DataInputStream input = null; URL url = new URL(inUrl); URLConnection urlConn = url.openConnection(); urlConn.setDoInput(true); urlConn.setDoOutput(true); urlConn.setUseCaches(false); if (headers != null) { for (Iterator iter = headers.iterator(); iter.hasNext(); ) { Map nameValuePair = (Map) iter.next(); String key; String value; for (Iterator iter2 = nameValuePair.keySet().iterator(); iter2.hasNext(); urlConn.setRequestProperty(key, value)) { key = (String) iter2.next(); value = (String) nameValuePair.get(key); } } } if (data != null) { byte inData[] = data.getBytes("UTF-8"); printout = new DataOutputStream(urlConn.getOutputStream()); printout.write(inData); printout.flush(); printout.close(); printout = null; } input = new DataInputStream(urlConn.getInputStream()); DataOutputStream dataOut = new DataOutputStream(new FileOutputStream(file, false)); int rChunk = 0x10000; byte myData[] = new byte[rChunk]; do { int bytesRead = input.read(myData, 0, rChunk); if (bytesRead == -1) break; dataOut.write(myData, 0, bytesRead); Thread.sleep(1L); } while (true); input.close(); input = null; success = true; } catch (Exception exception) { } return success; }
public Set getKeySet() { Set keySet; synchronized (this) { keySet = map.keySet(); } return keySet; }
/** @param args the command line arguments */ public static void main(String[] args) throws Exception { BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream("/var/song.txt"), "UTF-8")); Map map = new TreeMap(); String line = null; while ((line = br.readLine()) != null) { System.out.println("line:" + line); try { String[] strs = line.split("\t"); String str = " "; if (strs != null && strs.length >= 2) str = strs[1]; map.put(new Integer(strs[0]), str); } catch (Exception e) { System.out.println("Opuszczam linie " + e.toString()); } } DataOutputStream dos = new DataOutputStream(new FileOutputStream("/var/song.bin")); dos.writeInt(map.size()); for (Iterator it = map.keySet().iterator(); it.hasNext(); ) { Integer pos = (Integer) it.next(); String txt = (String) map.get(pos); dos.writeInt(pos.intValue()); dos.writeUTF(txt); } dos.flush(); dos.close(); }
public static void main(String[] args) throws IOException { if (args.length < 2) { System.out.println("Usage: WordSort inputfile outputfile"); return; } String inputfile = args[0]; String outputfile = args[1]; /* Create the word map. Each key is a word and each value is an * Integer that represents the number of times the word occurs * in the input file. */ Map<String, Integer> map = new TreeMap<String, Integer>(); // read every line of the input file Scanner scanner = new Scanner(new File(inputfile)); while (scanner.hasNext()) { String word = scanner.next(); Integer count = map.get(word); count = (count == null ? 1 : count + 1); map.put(word, count); } scanner.close(); // get the map's keys List<String> keys = new ArrayList<String>(map.keySet()); // write the results to the output file PrintWriter out = new PrintWriter(new FileWriter(outputfile)); for (String key : keys) { out.println(key + " : " + map.get(key)); } out.close(); }
@Deprecated public static void makeCode() { File infile = new File("/home/hoshun/dmoz/dmoz-indexed-url"); File outfile = new File("/home/hoshun/dmoz/generateUrlToSpecifiedCategory"); File outformatfile = new File("/home/hoshun/dmoz/dmoz-specified-url"); CategorizedURLs urlToCategory = new CategorizedURLs(infile, true); SpecifiedTopics specifiedTopics = new SpecifiedTopics(); try { PrintWriter out = new PrintWriter(outfile); PrintWriter out2 = new PrintWriter(outformatfile); Map<Integer, Category> idToCategory = urlToCategory.idToCategory; Map<Integer, String> idToUrl = urlToCategory.idToUrl; for (int id : idToCategory.keySet()) { Category cat = idToCategory.get(id); if (specifiedTopics.containsCategory(cat)) { String url = idToUrl.get(id); out.printf( "idToCategory.put(%s,new Category(\"%s\",\"%s\"));\n", id, cat.first, cat.second); out.printf("idToURL.put(%s,\"%s\");\n", id, url); out2.printf("%s\t%s\t%s\t%s\n", id, cat.first, cat.second, url); } } out.close(); } catch (Exception e) { e.printStackTrace(); } }
/** * Inserts multiple mappings into the replica catalog. The input is a map indexed by the LFN. The * value for each LFN key is a collection of replica catalog entries. Note that this operation * will replace existing entries. * * @param x is a map from logical directory string to list of replica catalog entries. * @return the number of insertions. * @see edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry */ public int insert(Map x) { int result = 0; // shortcut sanity if (x == null || x.size() == 0) { return result; } for (Iterator i = x.keySet().iterator(); i.hasNext(); ) { String lfn = (String) i.next(); Object val = x.get(lfn); if (val instanceof ReplicaCatalogEntry) { // permit misconfigured clients result += insert(lfn, (ReplicaCatalogEntry) val); } else { // this is how it should have been for (Iterator j = ((Collection) val).iterator(); j.hasNext(); ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) j.next(); result += insert(lfn, rce); } } } return result; }
/** * {@inheritDoc} * * @param ctx */ @Override public void prepareMarshal(GridCacheSharedContext ctx) throws IgniteCheckedException { super.prepareMarshal(ctx); if (ownedVals != null) { ownedValKeys = ownedVals.keySet(); ownedValVals = ownedVals.values(); for (Map.Entry<IgniteTxKey, CacheVersionedValue> entry : ownedVals.entrySet()) { GridCacheContext cacheCtx = ctx.cacheContext(entry.getKey().cacheId()); entry.getKey().prepareMarshal(cacheCtx); entry.getValue().prepareMarshal(cacheCtx.cacheObjectContext()); } } if (retVal != null && retVal.cacheId() != 0) { GridCacheContext cctx = ctx.cacheContext(retVal.cacheId()); assert cctx != null : retVal.cacheId(); retVal.prepareMarshal(cctx); } if (filterFailedKeys != null) { for (IgniteTxKey key : filterFailedKeys) { GridCacheContext cctx = ctx.cacheContext(key.cacheId()); key.prepareMarshal(cctx); } } }
private int maxTagLength() { int tl = 0; for (String tag : gappedAlignments.keySet()) { tl = Math.max(tl, tag.length()); } return tl; }
/** Encode a query string. The input Map contains names indexing Object[]. */ public static String encodeQueryString(Map parameters) { final StringBuilder sb = new StringBuilder(100); boolean first = true; try { for (Object o : parameters.keySet()) { final String name = (String) o; final Object[] values = (Object[]) parameters.get(name); for (final Object currentValue : values) { if (currentValue instanceof String) { if (!first) sb.append('&'); sb.append(URLEncoder.encode(name, NetUtils.STANDARD_PARAMETER_ENCODING)); sb.append('='); sb.append( URLEncoder.encode((String) currentValue, NetUtils.STANDARD_PARAMETER_ENCODING)); first = false; } } } } catch (UnsupportedEncodingException e) { // Should not happen as we are using a required encoding throw new OXFException(e); } return sb.toString(); }
private static boolean checkSingleExts(OCSPResponse.SingleResponse sr, int singleExtCount) { Map<String, Extension> singleExts; try { singleExts = sr.getSingleExtensions(); } catch (NullPointerException npe) { System.out.println("Warning: Sent null singleResponse into checkSingleExts"); return false; } for (String key : singleExts.keySet()) { System.out.println("singleExtension: " + singleExts.get(key)); } if (singleExts.size() != singleExtCount) { System.out.println( "Single Extension count mismatch, " + "expected " + singleExtCount + ", got " + singleExts.size()); return false; } else { return true; } }
public Tag copy() { CompoundTag tag = new CompoundTag(getName()); for (String key : tags.keySet()) { tag.put(key, tags.get(key).copy()); } return tag; }
/** gets map for all languages */ public Map<String, String> getCaptionToQueryMap(Collection<Document> docs) { // identify all the langs in the docs, and the corresponding lexicons Set<String> languages = IndexUtils.allLanguagesInDocs(docs); Set<Lexicon1Lang> lexicons = new LinkedHashSet<Lexicon1Lang>(); for (String lang : languages) { Lexicon1Lang lex = languageToLexicon.get(lang); if (lex != null) lexicons.add(lex); // this lexicon doesn't know about this language else log.warn("Warning: no support for " + lang + " in lexicon " + name); } Map<String, String> result = new LinkedHashMap<String, String>(); // aggregate results for each lang into result for (Lexicon1Lang lex : lexicons) { Map<String, String> resultsForThisLang = lex.captionToExpandedQuery; for (String caption : resultsForThisLang.keySet()) { String queryThisLang = resultsForThisLang.get(caption); String query = result.get(caption); // if caption doesn't exist already, create a new entry, or else add to the existing set of // docs that match this caption if (query == null) result.put(caption, queryThisLang); else result.put(caption, query + "|" + queryThisLang); } } return result; }
public List<String> find() throws IOException { // Set<String> g1 = new HashSet<String>(Arrays.asList("HRAS", "NRAS", "KRAS")); // Set<String> g2 = new HashSet<String>(Arrays.asList("BRAF")); Set<String> g1 = new HashSet<String>(Arrays.asList("HRAS")); Set<String> g2 = new HashSet<String>(Arrays.asList("NRAS", "KRAS")); Map<String, Double> pvals = calcDifferencePvals(g1, g2); System.out.println("pvals.size() = " + pvals.size()); List<String> list = FDR.select(pvals, null, fdrThr); System.out.println("result size = " + list.size()); Map<String, Boolean> dirs = getChangeDirections(list, g1, g2); int up = 0; int dw = 0; for (String gene : dirs.keySet()) { Boolean d = dirs.get(gene); if (d) up++; else dw++; } System.out.println("up = " + up); System.out.println("dw = " + dw); return list; }
/** @see Graph#edgeSet() */ public Set<E> edgeSet() { if (unmodifiableEdgeSet == null) { unmodifiableEdgeSet = Collections.unmodifiableSet(edgeMap.keySet()); } return unmodifiableEdgeSet; }
@Override public Map<String, Set<RefEntity>> getOldContent() { if (myOldProblemElements == null) return null; final Map<String, Set<RefEntity>> oldContents = new com.intellij.util.containers.HashMap<String, Set<RefEntity>>(); final Set<RefEntity> elements = myOldProblemElements.keySet(); for (RefEntity element : elements) { String groupName = element instanceof RefElement ? element.getRefManager().getGroupName((RefElement) element) : element.getName(); final Set<RefEntity> collection = myContents.get(groupName); if (collection != null) { final Set<RefEntity> currentElements = new HashSet<RefEntity>(collection); if (RefUtil.contains(element, currentElements)) continue; } Set<RefEntity> oldContent = oldContents.get(groupName); if (oldContent == null) { oldContent = new HashSet<RefEntity>(); oldContents.put(groupName, oldContent); } oldContent.add(element); } return oldContents; }
private synchronized void init() throws SQLException { if (isClosed) return; // do tables exists? Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery(TABLE_NAMES_SELECT_STMT); ArrayList<String> missingTables = new ArrayList(TABLES.keySet()); while (rs.next()) { String tableName = rs.getString("name"); missingTables.remove(tableName); } for (String missingTable : missingTables) { try { Statement createStmt = conn.createStatement(); // System.out.println("Adding table "+ missingTable); createStmt.executeUpdate(TABLES.get(missingTable)); createStmt.close(); } catch (Exception e) { System.err.println(e.getClass().getName() + ": " + e.getMessage()); } } }
/** * Remove records telling what entity caps node a contact has. * * @param contact the contact */ public void removeContactCapsNode(Contact contact) { Caps caps = null; String lastRemovedJid = null; Iterator<String> iter = userCaps.keySet().iterator(); while (iter.hasNext()) { String jid = iter.next(); if (StringUtils.parseBareAddress(jid).equals(contact.getAddress())) { caps = userCaps.get(jid); lastRemovedJid = jid; iter.remove(); } } // fire only for the last one, at the end the event out // of the protocol will be one and for the contact if (caps != null) { UserCapsNodeListener[] listeners; synchronized (userCapsNodeListeners) { listeners = userCapsNodeListeners.toArray(NO_USER_CAPS_NODE_LISTENERS); } if (listeners.length != 0) { String nodeVer = caps.getNodeVer(); for (UserCapsNodeListener listener : listeners) listener.userCapsNodeRemoved(lastRemovedJid, nodeVer, false); } } }