// --------------------------------------------------------------------------- private void printDependencies() throws TablesawException { m_printedDependencies = new HashSet<String>(); try { PrintWriter pw = new PrintWriter(new FileWriter("dependency.txt")); pw.println("Targets marked with a * have already been printed"); // Create a reduced set of stuff to print Set<String> ruleNames = new HashSet<String>(); for (String name : m_nameRuleMap.keySet()) ruleNames.add(name); for (String name : m_nameRuleMap.keySet()) { Rule rule = m_nameRuleMap.get(name); for (String dep : rule.getDependNames()) ruleNames.remove(dep); for (Rule dep : rule.getDependRules()) { if (dep.getName() != null) ruleNames.remove(dep.getName()); } } for (String name : ruleNames) { if (!name.startsWith(NAMED_RULE_PREFIX)) printDependencies(name, pw, 0); } pw.close(); } catch (IOException ioe) { throw new TablesawException("Cannot write to file dependency.txt", -1); } }
// Private methods private static int uniqueTeams(List<Team> teams) { Set<Team> s = new HashSet<Team>(); for (Team t : teams) { s.add(t); } return s.size(); }
private static PencilPosition findShortestRoute(int[][] maze) { // all found solutions to the maze PriorityQueue<PencilPosition> solutions = new PriorityQueue<PencilPosition>(5, new PencilPositionComparator()); // bread-first search queue Queue<PencilPosition> routes = new LinkedList<PencilPosition>(); // set of already visited positions Set<PencilPosition> visitedPositions = new HashSet<PencilPosition>(); // add the starting positions, which is always (0,0) routes.add(new PencilPosition(0, 0, false, null)); while (!routes.isEmpty()) { PencilPosition position = routes.poll(); // if this is the destinations position then we've found a solution if (0 == maze[position.row][position.column]) { solutions.add(position); continue; } // if we haven't already visited this position if (!visitedPositions.contains(position)) { routes.addAll(findPossibleRoutes(position, maze)); visitedPositions.add(position); } } return solutions.poll(); }
private static boolean checkDependants( final IdeaPluginDescriptor pluginDescriptor, final Function<PluginId, IdeaPluginDescriptor> pluginId2Descriptor, final Condition<PluginId> check, final Set<PluginId> processed) { processed.add(pluginDescriptor.getPluginId()); final PluginId[] dependentPluginIds = pluginDescriptor.getDependentPluginIds(); final Set<PluginId> optionalDependencies = new HashSet<PluginId>(Arrays.asList(pluginDescriptor.getOptionalDependentPluginIds())); for (final PluginId dependentPluginId : dependentPluginIds) { if (processed.contains(dependentPluginId)) continue; // TODO[yole] should this condition be a parameter? if (isModuleDependency(dependentPluginId) && (ourAvailableModules.isEmpty() || ourAvailableModules.contains(dependentPluginId.getIdString()))) { continue; } if (!optionalDependencies.contains(dependentPluginId)) { if (!check.value(dependentPluginId)) { return false; } final IdeaPluginDescriptor dependantPluginDescriptor = pluginId2Descriptor.fun(dependentPluginId); if (dependantPluginDescriptor != null && !checkDependants(dependantPluginDescriptor, pluginId2Descriptor, check, processed)) { return false; } } } return true; }
/** @see Graph#getAllEdges(Object, Object) */ public Set<E> getAllEdges(V sourceVertex, V targetVertex) { Set<E> edges = null; if (containsVertex(sourceVertex) && containsVertex(targetVertex)) { edges = new ArrayUnenforcedSet<E>(); Iterator<E> iter = getEdgeContainer(sourceVertex).vertexEdges.iterator(); while (iter.hasNext()) { E e = iter.next(); boolean equalStraight = sourceVertex.equals(getEdgeSource(e)) && targetVertex.equals(getEdgeTarget(e)); boolean equalInverted = sourceVertex.equals(getEdgeTarget(e)) && targetVertex.equals(getEdgeSource(e)); if (equalStraight || equalInverted) { edges.add(e); } } } return edges; }
public static void main(String[] args) { try { if (args.length == 1) { URL url = new URL(args[0]); System.out.println("Content-Type: " + url.openConnection().getContentType()); // Vector links = extractLinks(url); // for (int n = 0; n < links.size(); n++) { // System.out.println((String) links.elementAt(n)); // } Set links = extractLinksWithText(url).entrySet(); Iterator it = links.iterator(); while (it.hasNext()) { Map.Entry en = (Map.Entry) it.next(); String strLink = (String) en.getKey(); String strText = (String) en.getValue(); System.out.println(strLink + " \"" + strText + "\" "); } return; } else if (args.length == 2) { writeURLtoFile(new URL(args[0]), args[1]); return; } } catch (Exception e) { System.err.println("An error occured: "); e.printStackTrace(); // System.err.println(e.toString()); } System.err.println("Usage: java SaveURL <url> [<file>]"); System.err.println("Saves a URL to a file."); System.err.println("If no file is given, extracts hyperlinks on url to console."); }
public static void main(final String[] args) throws FileNotFoundException { if (args.length == 0) { System.out.println(usage); return; } Scanner sc = new Scanner(new File(dictFile)); ArrayList<String> dict = new ArrayList<String>(); while (sc.hasNext()) { dict.add(sc.next()); } if (args[0].equals("list") && args.length == 2) { String word = args[1]; System.out.println(fmtList(wordLadder(word, dict))); } else if (args[0].equals("top") && args.length == 2) { int num = Integer.parseInt(args[1]); System.out.println(fmtPairs(mostLadderable(num, dict))); } else if (args[0].equals("chain") && args.length == 3) { int steps = Integer.parseInt(args[1]); List<String> start = new ArrayList<String>(); start.add(args[2]); Set<String> chain = wordChain(steps, start, dict); System.out.println(chain.size()); } else { System.out.println(usage); return; } }
public void processMethodMapping( String className, int firstLineNumber, int lastLineNumber, String methodReturnType, String methodName, String methodArguments, String newMethodName) { // Original class name -> obfuscated method names. Map methodMap = (Map) classMethodMap.get(className); if (methodMap == null) { methodMap = new HashMap(); classMethodMap.put(className, methodMap); } // Obfuscated method name -> methods. Set methodSet = (Set) methodMap.get(newMethodName); if (methodSet == null) { methodSet = new LinkedHashSet(); methodMap.put(newMethodName, methodSet); } // Add the method information. methodSet.add( new MethodInfo( firstLineNumber, lastLineNumber, methodReturnType, methodArguments, methodName)); }
/** * Returns an array containing all records in the given section grouped into RRsets. * * @see RRset * @see Section */ public RRset[] getSectionRRsets(int section) { if (sections[section] == null) return emptyRRsetArray; List sets = new LinkedList(); Record[] recs = getSectionArray(section); Set hash = new HashSet(); for (int i = 0; i < recs.length; i++) { Name name = recs[i].getName(); boolean newset = true; if (hash.contains(name)) { for (int j = sets.size() - 1; j >= 0; j--) { RRset set = (RRset) sets.get(j); if (set.getType() == recs[i].getRRsetType() && set.getDClass() == recs[i].getDClass() && set.getName().equals(name)) { set.addRR(recs[i]); newset = false; break; } } } if (newset) { RRset set = new RRset(recs[i]); sets.add(set); hash.add(name); } } return (RRset[]) sets.toArray(new RRset[sets.size()]); }
/** * Compute the set of all IA32 opcodes that have emit methods in the Assembler. This method uses * the stylized form of all emit method names in the Assembler to extract the opcode of each one. * It returns a set of all such distinct names, as a set of Strings. * * @param emitters the set of all emit methods in the Assembler * @return the set of all opcodes handled by the Assembler */ private static Set<String> getOpcodes(Method[] emitters) { Set<String> s = new HashSet<String>(); for (int i = 0; i < emitters.length; i++) { String name = emitters[i].getName(); if (DEBUG) System.err.println(name); if (name.startsWith("emit")) { int posOf_ = name.indexOf('_'); if (posOf_ != -1) { String opcode = name.substring(4, posOf_); if (!excludedOpcodes.contains(opcode)) { s.add(opcode); } } else { String opcode = name.substring(4); // make sure it is an opcode if (opcode.equals(opcode.toUpperCase(Locale.getDefault()))) { if (!excludedOpcodes.contains(opcode)) { s.add(opcode); } } } } } return s; }
private static void addToCache( PersistentHashMap<String, Set<String>> cache, String key, String value) throws IOException { Set<String> values = cache.get(key); if (values == null) values = new THashSet<String>(); values.add(value); cache.put(key, values); }
/** gets map for all languages */ public Map<String, String> getCaptionToQueryMap(Collection<Document> docs) { // identify all the langs in the docs, and the corresponding lexicons Set<String> languages = IndexUtils.allLanguagesInDocs(docs); Set<Lexicon1Lang> lexicons = new LinkedHashSet<Lexicon1Lang>(); for (String lang : languages) { Lexicon1Lang lex = languageToLexicon.get(lang); if (lex != null) lexicons.add(lex); // this lexicon doesn't know about this language else log.warn("Warning: no support for " + lang + " in lexicon " + name); } Map<String, String> result = new LinkedHashMap<String, String>(); // aggregate results for each lang into result for (Lexicon1Lang lex : lexicons) { Map<String, String> resultsForThisLang = lex.captionToExpandedQuery; for (String caption : resultsForThisLang.keySet()) { String queryThisLang = resultsForThisLang.get(caption); String query = result.get(caption); // if caption doesn't exist already, create a new entry, or else add to the existing set of // docs that match this caption if (query == null) result.put(caption, queryThisLang); else result.put(caption, query + "|" + queryThisLang); } } return result; }
// returns a macro adder for the given morph item private MacroAdder getMacAdder(MorphItem mi) { // check map MacroAdder retval = macAdderMap.get(mi); if (retval != null) return retval; // set up macro adder IntHashSetMap macrosFromLex = new IntHashSetMap(); String[] newMacroNames = mi.getMacros(); List<MacroItem> macroItems = new ArrayList<MacroItem>(); for (int i = 0; i < newMacroNames.length; i++) { Set<FeatureStructure> featStrucs = (Set<FeatureStructure>) _macros.get(newMacroNames[i]); if (featStrucs != null) { for (Iterator<FeatureStructure> fsIt = featStrucs.iterator(); fsIt.hasNext(); ) { FeatureStructure fs = fsIt.next(); macrosFromLex.put(fs.getIndex(), fs); } } MacroItem macroItem = _macroItems.get(newMacroNames[i]); if (macroItem != null) { macroItems.add(macroItem); } else { // should be checked earlier too System.err.println( "Warning: macro " + newMacroNames[i] + " not found for word '" + mi.getWord() + "'"); } } retval = new MacroAdder(macrosFromLex, macroItems); // update map and return macAdderMap.put(mi, retval); return retval; }
private void checkStartup( Map<String, ServiceData> map, List<ServiceData> start, ServiceData sd, Set<ServiceData> cyclic) { if (sd.after.isEmpty() || start.contains(sd)) return; if (cyclic.contains(sd)) { reporter.error("Cyclic dependency for " + sd.name); return; } cyclic.add(sd); for (String dependsOn : sd.after) { if (dependsOn.equals("boot")) continue; ServiceData deps = map.get(dependsOn); if (deps == null) { reporter.error("No such service " + dependsOn + " but " + sd.name + " depends on it"); } else { checkStartup(map, start, deps, cyclic); } } start.add(sd); }
/** * Read the topic-document relations * * @param fp * @return */ public static Map<Integer, Set<Integer>> readTopicDocuments(String fp) { Map<Integer, Set<Integer>> res = new HashMap<Integer, Set<Integer>>(); if (fp == null || fp.length() == 0) return res; try { BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(new File(fp)))); String line = null; while ((line = reader.readLine()) != null) { String[] fields = line.split(" "); int docid = Integer.parseInt(fields[0]); int classid = Integer.parseInt(fields[1]); if (res.containsKey(classid)) { res.get(classid).add(docid); } else { Set<Integer> cset = new HashSet<Integer>(); cset.add(docid); res.put(classid, cset); } } reader.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return Collections.unmodifiableMap(res); }
public void writeMap(Cluster cluster, float yBar) { Set<Submission> subSet = new HashSet<Submission>(cluster.size()); String documents = ""; for (int i = 0; i < cluster.size(); i++) { Submission sub = submissions.elementAt(cluster.getSubmissionAt(i)); documents += sub.name + " "; subSet.add(sub); } documents = documents.trim(); String theme = ThemeGenerator.generateThemes(subSet, this.program.get_themewords(), false, this.program); mapString += "<area shape=\"rect\" coords=\"" + (cluster.x - 2) + "," + (yBar) + "," + (cluster.x + 2) + "," + (cluster.y + 2) + "\" onMouseover=\"set('" + cluster.size() + "','" + trimStringToLength(String.valueOf(cluster.getSimilarity()), 6) + "','" + trimStringToLength(documents, 50) + "','" + theme + "')\" "; // if (cluster.size() == 1) // mapString += "href=\"submission"+cluster.getSubmissionAt(0)+".html\">\n"; // else mapString += "nohref>\n"; }
/** Check if the getSubBuilders properly predicts the output. */ public static void testSubBuilders() throws Exception { Workspace ws = Workspace.getWorkspace(new File("test/ws")); Project project = ws.getProject("p4-sub"); Collection<? extends Builder> bs = project.getSubBuilders(); assertNotNull(bs); assertEquals(3, bs.size()); Set<String> names = new HashSet<String>(); for (Builder b : bs) { names.add(b.getBsn()); } assertTrue(names.contains("p4-sub.a")); assertTrue(names.contains("p4-sub.b")); assertTrue(names.contains("p4-sub.c")); File[] files = project.build(); assertTrue(project.check()); System.err.println(Processor.join(project.getErrors(), "\n")); System.err.println(Processor.join(project.getWarnings(), "\n")); assertEquals(0, project.getErrors().size()); assertEquals(0, project.getWarnings().size()); assertNotNull(files); assertEquals(3, files.length); for (File file : files) { Jar jar = new Jar(file); Manifest m = jar.getManifest(); assertTrue(names.contains(m.getMainAttributes().getValue("Bundle-SymbolicName"))); } }
private void handleRefresh(Set<IFileData> oldfiles) { IvyXmlWriter xw = pybase_main.beginMessage("RESOURCE"); int ctr = 0; for (IFileData fd : all_files) { IFileData old = null; for (IFileData ofd : oldfiles) { if (ofd.getFile().equals(fd.getFile())) { old = ofd; break; } } if (old == null) { outputDelta(xw, "ADDED", fd); ++ctr; } else if (old.getLastDateLastModified() != fd.getLastDateLastModified()) { oldfiles.remove(old); outputDelta(xw, "CHANGED", fd); ++ctr; } else { oldfiles.remove(old); } } for (IFileData fd : oldfiles) { outputDelta(xw, "REMOVED", fd); ++ctr; } if (ctr > 0) { pybase_main.finishMessage(xw); } }
public void testFindCorrectEntries() throws Exception { Set expected = new HashSet(); for (String link : links) { expected.add(srcUrl + link); } assertEquals(expected, extractUrls(withLinks)); }
private static void fetchMulitpleOutlinkMappings( HashMap<String, Set<String>> outlinkmapping, JSONArray arr, HashMap<String, JSONArray> inlinkmapping, Set<String> crawledDocuments) throws FileNotFoundException { // HashMap<String, Set<String>> outlinkmapping = new HashMap<String, Set<String>>(); for (int i = 0; i < arr.size(); i++) { Set<String> urlset = new HashSet<String>(); // System.out.println(arr.get(i)); try { JSONObject obj = (JSONObject) arr.get(i); String id = (String) obj.get("_id"); // System.out.println(id); JSONArray outlinks = new JSONArray(); if (obj.containsKey("fields") && ((JSONObject) obj.get("fields")).containsKey("out_links")) outlinks = (JSONArray) ((JSONObject) obj.get("fields")).get("out_links"); // System.out.println(outlinks); for (int j = 0; j < outlinks.size(); j++) { try { if (crawledDocuments.contains((String) outlinks.get(j))) urlset.add((String) outlinks.get(j)); // System.out.println("done"); } catch (Exception e) { System.out.println("Did not get the outlink " + e.toString()); } } outlinkmapping.put(id, urlset); // System.out.println("done"); } catch (Exception e) { System.out.println("Did not get the outlink " + e.toString()); } } }
static { Set<EncodingRule> rules = new HashSet<EncodingRule>(); rules.add(new EncodingRule("*", "%2A")); rules.add(new EncodingRule("+", "%20")); rules.add(new EncodingRule("%7E", "~")); ENCODING_RULES = Collections.unmodifiableSet(rules); }
/** * Parse a repository document. * * @param url * @throws IOException * @throws XmlPullParserException * @throws Exception */ void parseDocument(URL url) throws IOException, XmlPullParserException, Exception { if (!visited.contains(url)) { visited.add(url); try { System.out.println("Visiting: " + url); InputStream in = null; if (url.getPath().endsWith(".zip")) { ZipInputStream zin = new ZipInputStream(url.openStream()); ZipEntry entry = zin.getNextEntry(); while (entry != null) { if (entry.getName().equals("repository.xml")) { in = zin; break; } entry = zin.getNextEntry(); } } else { in = url.openStream(); } Reader reader = new InputStreamReader(in); XmlPullParser parser = new KXmlParser(); parser.setInput(reader); parseRepository(parser); } catch (MalformedURLException e) { System.out.println("Cannot create connection to url"); } } }
/** * Creates an NL fragment project along with the locale specific properties files. * * @throws CoreException * @throws IOException * @throws InvocationTargetException * @throws InterruptedException */ private void internationalizePlugins(List plugins, List locales, Map overwrites) throws CoreException, IOException, InvocationTargetException, InterruptedException { Set created = new HashSet(); for (Iterator it = plugins.iterator(); it.hasNext(); ) { IPluginModelBase plugin = (IPluginModelBase) it.next(); for (Iterator iter = locales.iterator(); iter.hasNext(); ) { Locale locale = (Locale) iter.next(); IProject project = getNLProject(plugin, locale); if (created.contains(project) || overwriteWithoutAsking || !project.exists() || OVERWRITE == overwrites.get(project.getName())) { if (!created.contains(project) && project.exists()) { project.delete(true, getProgressMonitor()); } if (!created.contains(project)) { createNLFragment(plugin, project, locale); created.add(project); project.getFolder(RESOURCE_FOLDER_PARENT).create(false, true, getProgressMonitor()); } project .getFolder(RESOURCE_FOLDER_PARENT) .getFolder(locale.toString()) .create(true, true, getProgressMonitor()); createLocaleSpecificPropertiesFile(project, plugin, locale); } } } }
/** * This method construct hashTag graph to calculate the degree * * @param tweetList * @return */ private HashMap<String, Set<String>> getHashTagGraph(List<Tweet> tweetList) { HashMap<String, Set<String>> hashTagGraph = new HashMap<String, Set<String>>(20); // Iterate through each tweet's hastags to construct the graph for (Tweet tweet : tweetList) { Set<String> hashTags = tweet.getHashTags(); if (hashTags.size() >= 2) { for (String hashTag : hashTags) { for (String innerHashTag : hashTags) { if (!hashTag.equalsIgnoreCase(innerHashTag)) { Set<String> entries = hashTagGraph.get(hashTag); if (entries == null) { entries = new HashSet<String>(); } entries.add(innerHashTag); hashTagGraph.put(hashTag, entries); } } } } } return hashTagGraph; }
// --------------------------------------------------------------------------- public Rule findTargetRule(String target) throws TablesawException { Rule rule = null; if (m_resolved) { ArrayList<String> posTargets; String posTarget = target; if (m_noRulesList.contains(target)) return (null); if ((rule = m_locatedRules.get(target)) != null) return (rule); // First look in name map rule = m_nameRuleMap.get(target); if (rule == null) { // Now look for targets rule = m_targetRuleMap.get(posTarget); if (rule == null) { posTargets = m_fileManager.getPossibleFiles(posTarget); for (String t : posTargets) { rule = m_targetRuleMap.get(t); if (rule != null) break; } } } Debug.print("Rule for " + target + " is " + rule); if (rule != null) { m_locatedRules.put(target, rule); } else m_noRulesList.add(target); } return (rule); }
/** TODO */ public AnnotatedLexicalEntry integrate(LexicalEntry e) { String gloss = e.getAnnotations().get(CrownAnnotations.Gloss.class); POS pos = e.getPos(); List<Relation> relations = e.getAnnotations().get(CrownAnnotations.Relations.class); // Try getting a synonym operation first Set<String> synonyms = new HashSet<String>(); for (Relation r : relations) { if (r.getType().equals(Relation.RelationType.SYNONYM)) synonyms.add(r.getTargetLemma()); } if (synonyms.size() > 0) { Duple<CrownOperations.Reason, ISynset> synonymOp = getEstimatedSynonym(e.getLemma(), synonyms, pos, gloss); if (synonymOp != null && synonymOp.y != null) { AnnotatedLexicalEntry ale = new AnnotatedLexicalEntryImpl(e); ale.setOp(CrownOperations.Synonym.class, synonymOp.x, synonymOp.y); return ale; } } return null; }
/** * This function prints a parsed TCRL refinement law. Only for debug purposes * * @param r The AST object */ public static void printRefFunction(TCRL_AST r) { System.out.println(); System.out.println("**************************************************"); System.out.println("RefFunction"); System.out.println("**************************************************"); System.out.println("Nombre: " + r.getName()); System.out.println("Preambulo: " + r.getPreamble()); System.out.println("Reglas: "); NodeRules rules = r.getRules(); NodeRule rule; Set<String> keys = rules.getKeys(); Iterator<String> iter = keys.iterator(); while (iter.hasNext()) { rule = rules.getRule(iter.next()); if (rule instanceof RuleSynonym) { printRuleSynonym((RuleSynonym) rule); } else { printRuleRefinement((RuleRefinement) rule); } } }
/** * read the transition matrix from the file * * @param fp * @return */ public static Map<Integer, Set<Integer>> readTransitionMatrix(String fp) { Map<Integer, Set<Integer>> res = new HashMap<Integer, Set<Integer>>(); if (fp == null || fp.length() == 0) return res; try { BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(new File(fp)))); String line = null; while ((line = reader.readLine()) != null) { String[] fields = line.split(" "); int from = Integer.parseInt(fields[0]); int to = Integer.parseInt(fields[1]); if (res.containsKey(from)) { res.get(from).add(to); } else { Set<Integer> cset = new HashSet<Integer>(); cset.add(to); res.put(from, cset); } } } catch (FileNotFoundException e) { e.printStackTrace(); } catch (NumberFormatException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return Collections.unmodifiableMap(res); }
/** * Get the list of ids of sequences in this sequence database * * @return the list of sequence ids. */ public Set<Integer> getSequenceIDs() { Set<Integer> set = new HashSet<Integer>(); for (Sequence sequence : getSequences()) { set.add(sequence.getId()); } return set; }
/* Drops terms that contain other terms (prefixes, suffixes, etc.) and trims the questionlist to the target size.*/ private ArrayList<MemoryQuestion> dropPrefixSuffixfromSortedList( List<MemoryQuestion> questionlist, int targetSize) { ArrayList<MemoryQuestion> resultList = new ArrayList<MemoryQuestion>(); Set<Integer> badIndexSet = new LinkedHashSet<Integer>(); int terms = questionlist.size(); for (int first = 0; first < terms; first++) { MemoryQuestion mq1 = questionlist.get(first); String name1 = mq1.correctAnswer; if (badIndexSet.contains(first)) continue; resultList.add(mq1); if (resultList.size() >= targetSize) return resultList; for (int second = first + 1; second < terms; second++) { MemoryQuestion mq2 = questionlist.get(second); String name2 = mq2.correctAnswer; if (name1.contains(name2) || name2.contains(name1)) { badIndexSet.add(second); } } } return resultList; }