/** * Compute the set of all IA32 opcodes that have emit methods in the Assembler. This method uses * the stylized form of all emit method names in the Assembler to extract the opcode of each one. * It returns a set of all such distinct names, as a set of Strings. * * @param emitters the set of all emit methods in the Assembler * @return the set of all opcodes handled by the Assembler */ private static Set<String> getOpcodes(Method[] emitters) { Set<String> s = new HashSet<String>(); for (int i = 0; i < emitters.length; i++) { String name = emitters[i].getName(); if (DEBUG) System.err.println(name); if (name.startsWith("emit")) { int posOf_ = name.indexOf('_'); if (posOf_ != -1) { String opcode = name.substring(4, posOf_); if (!excludedOpcodes.contains(opcode)) { s.add(opcode); } } else { String opcode = name.substring(4); // make sure it is an opcode if (opcode.equals(opcode.toUpperCase(Locale.getDefault()))) { if (!excludedOpcodes.contains(opcode)) { s.add(opcode); } } } } } return s; }
/** * Creates an NL fragment project along with the locale specific properties files. * * @throws CoreException * @throws IOException * @throws InvocationTargetException * @throws InterruptedException */ private void internationalizePlugins(List plugins, List locales, Map overwrites) throws CoreException, IOException, InvocationTargetException, InterruptedException { Set created = new HashSet(); for (Iterator it = plugins.iterator(); it.hasNext(); ) { IPluginModelBase plugin = (IPluginModelBase) it.next(); for (Iterator iter = locales.iterator(); iter.hasNext(); ) { Locale locale = (Locale) iter.next(); IProject project = getNLProject(plugin, locale); if (created.contains(project) || overwriteWithoutAsking || !project.exists() || OVERWRITE == overwrites.get(project.getName())) { if (!created.contains(project) && project.exists()) { project.delete(true, getProgressMonitor()); } if (!created.contains(project)) { createNLFragment(plugin, project, locale); created.add(project); project.getFolder(RESOURCE_FOLDER_PARENT).create(false, true, getProgressMonitor()); } project .getFolder(RESOURCE_FOLDER_PARENT) .getFolder(locale.toString()) .create(true, true, getProgressMonitor()); createLocaleSpecificPropertiesFile(project, plugin, locale); } } } }
/** Check if the getSubBuilders properly predicts the output. */ public static void testSubBuilders() throws Exception { Workspace ws = Workspace.getWorkspace(new File("test/ws")); Project project = ws.getProject("p4-sub"); Collection<? extends Builder> bs = project.getSubBuilders(); assertNotNull(bs); assertEquals(3, bs.size()); Set<String> names = new HashSet<String>(); for (Builder b : bs) { names.add(b.getBsn()); } assertTrue(names.contains("p4-sub.a")); assertTrue(names.contains("p4-sub.b")); assertTrue(names.contains("p4-sub.c")); File[] files = project.build(); assertTrue(project.check()); System.err.println(Processor.join(project.getErrors(), "\n")); System.err.println(Processor.join(project.getWarnings(), "\n")); assertEquals(0, project.getErrors().size()); assertEquals(0, project.getWarnings().size()); assertNotNull(files); assertEquals(3, files.length); for (File file : files) { Jar jar = new Jar(file); Manifest m = jar.getManifest(); assertTrue(names.contains(m.getMainAttributes().getValue("Bundle-SymbolicName"))); } }
/** * Builds the argument invocation string for calling a constructor * * @param classVariables The collection of member variables as XMLElements * @param constructorArguments The set of names for constructor arguments * @param allocatedMemberVariables The set of member variables already allocated * @param element The specific 'new' xml element */ private static String buildArguments( Vector classVariables, Set constructorArguments, Set allocatedMemberVariables, IXMLElement element) throws IOException { XMLUtil.checkExpectedNode("new", element); boolean comma = false; String arguments = ""; for (Enumeration e = element.enumerateChildren(); e.hasMoreElements(); ) { IXMLElement argument = (IXMLElement) e.nextElement(); String argType = ModelAccessor.getValueType(argument); String value = ModelAccessor.getValue(argument); /* Exclude cases where we do nothing to the value */ if (!primitives.contains(argType) && !value.equals("true") && !value.equals("false")) { // CASE 0: Translate 'this' if (value.equalsIgnoreCase("this")) { value = "m_id"; } // CASE 1: It is a singleton member variable which must be mapped to the singleton else if (allocatedMemberVariables.contains(value)) { String singletonType = getVariableType(classVariables, value); if (primitives.contains(singletonType)) { value = "(" + singletonType + ") singleton(" + value + ")"; } else value = "singleton(" + value + ")"; } // CASE 2: It is a constructor argument else if (constructorArguments.contains(value)) { // Do nothing, just use the string as is. } // CASE 3: Test for error - using a member before initializing it else if (!allocatedMemberVariables.contains(value) && isMember(classVariables, value)) { value = "!ERROR: Cannot use " + value + " without prior initialization."; } // Otherwise it is a string or enumerataion else if (argType.equals("string") || // It is an enumeartion of some kind ModelAccessor.isEnumeration(argType) || argType.equals("symbol")) { value = "LabelStr(\"" + XMLUtil.escapeQuotes(value) + "\")"; } // If we fall through to here, there is an error. else value = "!ERROR:BAD ASSIGNMENT"; } // CASE 3: It is a true or false value if (comma) arguments = arguments + ", "; arguments = arguments + value; comma = true; } return arguments; }
/** generate mention annotations (with entity numbers) based on the ACE entities and mentions. */ static void addMentionTags(Document doc, AceDocument aceDoc) { ArrayList<AceEntity> entities = aceDoc.entities; for (int i = 0; i < entities.size(); i++) { AceEntity entity = entities.get(i); ArrayList<AceEntityMention> mentions = entity.mentions; for (int j = 0; j < mentions.size(); j++) { AceEntityMention mention = (AceEntityMention) mentions.get(j); // we compute a jetSpan not including trailing whitespace Span aceSpan = mention.head; // skip mentions in ChEnglish APF not aligned to any English text if (aceSpan.start() < 0) continue; Span jetSpan = new Span(aceSpan.start(), aceSpan.end() + 1); FeatureSet features = new FeatureSet("entity", new Integer(i)); if (flags.contains("types")) { features.put("type", entity.type.substring(0, 3)); if (entity.subtype != null) features.put("subtype", entity.subtype); } if (flags.contains("extents")) { String cleanExtent = mention.text.replaceAll("\n", " "); features.put("extent", AceEntityMention.addXmlEscapes(cleanExtent)); } doc.annotate("mention", jetSpan, features); } } }
private static boolean checkDependants( final IdeaPluginDescriptor pluginDescriptor, final Function<PluginId, IdeaPluginDescriptor> pluginId2Descriptor, final Condition<PluginId> check, final Set<PluginId> processed) { processed.add(pluginDescriptor.getPluginId()); final PluginId[] dependentPluginIds = pluginDescriptor.getDependentPluginIds(); final Set<PluginId> optionalDependencies = new HashSet<PluginId>(Arrays.asList(pluginDescriptor.getOptionalDependentPluginIds())); for (final PluginId dependentPluginId : dependentPluginIds) { if (processed.contains(dependentPluginId)) continue; // TODO[yole] should this condition be a parameter? if (isModuleDependency(dependentPluginId) && (ourAvailableModules.isEmpty() || ourAvailableModules.contains(dependentPluginId.getIdString()))) { continue; } if (!optionalDependencies.contains(dependentPluginId)) { if (!check.value(dependentPluginId)) { return false; } final IdeaPluginDescriptor dependantPluginDescriptor = pluginId2Descriptor.fun(dependentPluginId); if (dependantPluginDescriptor != null && !checkDependants(dependantPluginDescriptor, pluginId2Descriptor, check, processed)) { return false; } } } return true; }
private void onFinishWebUITests(ITestContext testContext) { // List of test results which we will delete later because of duplication or because the test // eventually passed List<ITestResult> testsToBeRemoved = new ArrayList<ITestResult>(); // collect all id's from passed test Set<Integer> passedTestIds = new HashSet<Integer>(); for (ITestResult passedTest : testContext.getPassedTests().getAllResults()) { passedTestIds.add(getTestId(passedTest)); } Set<Integer> failedTestIds = new HashSet<Integer>(); for (ITestResult failedTest : testContext.getFailedTests().getAllResults()) { int failedTestId = getTestId(failedTest); // if this test failed before mark as to be deleted // or delete this failed test if there is at least one passed version if (failedTestIds.contains(failedTestId) || passedTestIds.contains(failedTestId)) { testsToBeRemoved.add(failedTest); } else { failedTestIds.add(failedTestId); } } // finally delete all tests that are marked for (Iterator<ITestResult> iterator = testContext.getFailedTests().getAllResults().iterator(); iterator.hasNext(); ) { ITestResult testResult = iterator.next(); if (testsToBeRemoved.contains(testResult)) { iterator.remove(); } } }
public static void addAnnotations(Document doc, AceDocument aceDoc) { boolean monocase = Ace.allLowerCase(doc); if (year.equals("2004")) gazetteer.setMonocase(monocase); if (flags.contains("sentences")) addSentences(doc); if (flags.contains("timex")) addTimexTags(doc, aceDoc); if (flags.contains("mentions")) addMentionTags(doc, aceDoc); if (flags.contains("names")) addENAMEXtags(doc, aceDoc); }
private static void loadCommonJavacOptions(CompileContext context) { final List<String> options = new ArrayList<String>(); final List<String> vmOptions = new ArrayList<String>(); final JpsProject project = context.getProjectDescriptor().getProject(); final JpsJavaCompilerConfiguration compilerConfig = JpsJavaExtensionService.getInstance().getOrCreateCompilerConfiguration(project); final JpsJavaCompilerOptions compilerOptions = compilerConfig.getCurrentCompilerOptions(); if (compilerOptions.DEBUGGING_INFO) { options.add("-g"); } if (compilerOptions.DEPRECATION) { options.add("-deprecation"); } if (compilerOptions.GENERATE_NO_WARNINGS) { options.add("-nowarn"); } if (compilerOptions instanceof EclipseCompilerOptions) { final EclipseCompilerOptions eclipseOptions = (EclipseCompilerOptions) compilerOptions; if (eclipseOptions.PROCEED_ON_ERROR) { options.add("-proceedOnError"); } } final String customArgs = compilerOptions.ADDITIONAL_OPTIONS_STRING; if (customArgs != null) { final StringTokenizer customOptsTokenizer = new StringTokenizer(customArgs, " \t\r\n"); boolean skip = false; while (customOptsTokenizer.hasMoreTokens()) { final String userOption = customOptsTokenizer.nextToken(); if (FILTERED_OPTIONS.contains(userOption)) { skip = true; continue; } if (!skip) { if (!FILTERED_SINGLE_OPTIONS.contains(userOption)) { if (userOption.startsWith("-J-")) { vmOptions.add(userOption.substring("-J".length())); } else { options.add(userOption); } } } } } if (useEclipseCompiler(context)) { for (String option : options) { if (option.startsWith("-proceedOnError")) { Utils.PROCEED_ON_ERROR_KEY.set(context, Boolean.TRUE); break; } } } JAVAC_OPTIONS.set(context, options); JAVAC_VM_OPTIONS.set(context, vmOptions); }
boolean isEnabled(long m) { if (enabledMechanisms != null) { return enabledMechanisms.contains(Long.valueOf(m)); } if (disabledMechanisms != null) { return !disabledMechanisms.contains(Long.valueOf(m)); } return true; }
/** keySet returns a Set containing all the keys */ public void testDescendingKeySet() { ConcurrentNavigableMap map = dmap5(); Set s = map.keySet(); assertEquals(5, s.size()); assertTrue(s.contains(m1)); assertTrue(s.contains(m2)); assertTrue(s.contains(m3)); assertTrue(s.contains(m4)); assertTrue(s.contains(m5)); }
public boolean isDirectTransferInProgress(final FrostUploadItem ulItem) { final String id = ulItem.getGqIdentifier(); if (directPUTsInProgress.contains(id)) { return true; } if (directPUTsWithoutAnswer.contains(id)) { return true; } return false; }
/** keySet returns a Set containing all the keys */ public void testKeySet() { ConcurrentNavigableMap map = map5(); Set s = map.keySet(); assertEquals(5, s.size()); assertTrue(s.contains(one)); assertTrue(s.contains(two)); assertTrue(s.contains(three)); assertTrue(s.contains(four)); assertTrue(s.contains(five)); }
private void handleRtpPacket(RawPacket pkt) { if (pkt != null && pkt.getPayloadType() == vp8PayloadType) { int ssrc = pkt.getSSRC(); if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) { synchronized (activeVideoSsrcs) { if (!activeVideoSsrcs.contains(ssrc & 0xffffffffL)) { activeVideoSsrcs.add(ssrc & 0xffffffffL); rtcpFeedbackSender.sendFIR(ssrc); } } } } }
// --------------------------------------------------------------------------- public Rule findTargetRule(String target) throws TablesawException { Rule rule = null; if (m_resolved) { ArrayList<String> posTargets; String posTarget = target; if (m_noRulesList.contains(target)) return (null); if ((rule = m_locatedRules.get(target)) != null) return (rule); // First look in name map rule = m_nameRuleMap.get(target); if (rule == null) { // Now look for targets rule = m_targetRuleMap.get(posTarget); if (rule == null) { posTargets = m_fileManager.getPossibleFiles(posTarget); for (String t : posTargets) { rule = m_targetRuleMap.get(t); if (rule != null) break; } } } Debug.print("Rule for " + target + " is " + rule); if (rule != null) { m_locatedRules.put(target, rule); } else m_noRulesList.add(target); } return (rule); }
private LevelResults compareZones(BxPage expected, BxPage actual) { Map<BxChunk, BxZone> map = BxModelUtils.mapChunksToZones(actual); LevelResults results = new LevelResults(); for (BxZone expectedZone : expected) { if (ignoredLabels.contains(expectedZone.getLabel())) { continue; } Set<BxZone> actualZones = new HashSet<BxZone>(); for (BxLine line : expectedZone) { for (BxWord word : line) { for (BxChunk chunk : word) { actualZones.add(map.get(chunk)); } } } if (actualZones.size() == 1) { for (BxZone actualZone : actualZones) { if (BxModelUtils.countChunks(actualZone) == BxModelUtils.countChunks(expectedZone)) { results.matched++; } } } results.all++; } return results; }
/** * Parse a repository document. * * @param url * @throws IOException * @throws XmlPullParserException * @throws Exception */ void parseDocument(URL url) throws IOException, XmlPullParserException, Exception { if (!visited.contains(url)) { visited.add(url); try { System.out.println("Visiting: " + url); InputStream in = null; if (url.getPath().endsWith(".zip")) { ZipInputStream zin = new ZipInputStream(url.openStream()); ZipEntry entry = zin.getNextEntry(); while (entry != null) { if (entry.getName().equals("repository.xml")) { in = zin; break; } entry = zin.getNextEntry(); } } else { in = url.openStream(); } Reader reader = new InputStreamReader(in); XmlPullParser parser = new KXmlParser(); parser.setInput(reader); parseRepository(parser); } catch (MalformedURLException e) { System.out.println("Cannot create connection to url"); } } }
private static void fetchMulitpleOutlinkMappings( HashMap<String, Set<String>> outlinkmapping, JSONArray arr, HashMap<String, JSONArray> inlinkmapping, Set<String> crawledDocuments) throws FileNotFoundException { // HashMap<String, Set<String>> outlinkmapping = new HashMap<String, Set<String>>(); for (int i = 0; i < arr.size(); i++) { Set<String> urlset = new HashSet<String>(); // System.out.println(arr.get(i)); try { JSONObject obj = (JSONObject) arr.get(i); String id = (String) obj.get("_id"); // System.out.println(id); JSONArray outlinks = new JSONArray(); if (obj.containsKey("fields") && ((JSONObject) obj.get("fields")).containsKey("out_links")) outlinks = (JSONArray) ((JSONObject) obj.get("fields")).get("out_links"); // System.out.println(outlinks); for (int j = 0; j < outlinks.size(); j++) { try { if (crawledDocuments.contains((String) outlinks.get(j))) urlset.add((String) outlinks.get(j)); // System.out.println("done"); } catch (Exception e) { System.out.println("Did not get the outlink " + e.toString()); } } outlinkmapping.put(id, urlset); // System.out.println("done"); } catch (Exception e) { System.out.println("Did not get the outlink " + e.toString()); } } }
/** * Returns an array containing all records in the given section grouped into RRsets. * * @see RRset * @see Section */ public RRset[] getSectionRRsets(int section) { if (sections[section] == null) return emptyRRsetArray; List sets = new LinkedList(); Record[] recs = getSectionArray(section); Set hash = new HashSet(); for (int i = 0; i < recs.length; i++) { Name name = recs[i].getName(); boolean newset = true; if (hash.contains(name)) { for (int j = sets.size() - 1; j >= 0; j--) { RRset set = (RRset) sets.get(j); if (set.getType() == recs[i].getRRsetType() && set.getDClass() == recs[i].getDClass() && set.getName().equals(name)) { set.addRR(recs[i]); newset = false; break; } } } if (newset) { RRset set = new RRset(recs[i]); sets.add(set); hash.add(name); } } return (RRset[]) sets.toArray(new RRset[sets.size()]); }
/** * Tests reading a 3.0.2 install with a mix of classic and OSGi plug-ins. * * @throws Exception */ public void testClassicPlugins() throws Exception { // extract the 3.0.2 skeleton IPath location = extractClassicPlugins(); // the new way ITargetDefinition definition = getNewTarget(); ITargetLocation container = getTargetService().newDirectoryLocation(location.toOSString()); definition.setTargetLocations(new ITargetLocation[] {container}); Set urls = getAllBundleURLs(definition); assertTrue("Must be bundles", urls.size() > 0); Preferences store = PDECore.getDefault().getPluginPreferences(); boolean restore = store.getBoolean(ICoreConstants.TARGET_PLATFORM_REALIZATION); try { store.setValue(ICoreConstants.TARGET_PLATFORM_REALIZATION, false); // the old way URL[] pluginPaths = PluginPathFinder.getPluginPaths(location.toOSString()); for (int i = 0; i < pluginPaths.length; i++) { URL url = pluginPaths[i]; if (!urls.contains(url)) { System.err.println(url.toString()); } } assertEquals("Wrong number of bundles", pluginPaths.length, urls.size()); } finally { store.setValue(ICoreConstants.TARGET_PLATFORM_REALIZATION, restore); } }
@SuppressWarnings("static-method") public Map<String, Object> processOperations( CodegenConfig config, String tag, List<CodegenOperation> ops) { Map<String, Object> operations = new HashMap<String, Object>(); Map<String, Object> objs = new HashMap<String, Object>(); objs.put("classname", config.toApiName(tag)); objs.put("pathPrefix", config.toApiVarName(tag)); // check for operationId uniqueness Set<String> opIds = new HashSet<String>(); int counter = 0; for (CodegenOperation op : ops) { String opId = op.nickname; if (opIds.contains(opId)) { counter++; op.nickname += "_" + counter; } opIds.add(opId); } objs.put("operation", ops); operations.put("operations", objs); operations.put("package", config.apiPackage()); Set<String> allImports = new LinkedHashSet<String>(); for (CodegenOperation op : ops) { allImports.addAll(op.imports); } List<Map<String, String>> imports = new ArrayList<Map<String, String>>(); for (String nextImport : allImports) { Map<String, String> im = new LinkedHashMap<String, String>(); String mapping = config.importMapping().get(nextImport); if (mapping == null) { mapping = config.toModelImport(nextImport); } if (mapping != null) { im.put("import", mapping); imports.add(im); } } operations.put("imports", imports); // add a flag to indicate whether there's any {{import}} if (imports.size() > 0) { operations.put("hasImport", true); } config.postProcessOperations(operations); if (objs.size() > 0) { List<CodegenOperation> os = (List<CodegenOperation>) objs.get("operation"); if (os != null && os.size() > 0) { CodegenOperation op = os.get(os.size() - 1); op.hasMore = null; } } return operations; }
public LinkedHashSet<Path> scan(FileSystem fs, Path filePath, Set<String> consumedFiles) { LinkedHashSet<Path> pathSet = Sets.newLinkedHashSet(); try { LOG.debug("Scanning {} with pattern {}", filePath, this.filePatternRegexp); FileStatus[] files = fs.listStatus(filePath); for (FileStatus status : files) { Path path = status.getPath(); String filePathStr = path.toString(); if (consumedFiles.contains(filePathStr)) { continue; } if (ignoredFiles.contains(filePathStr)) { continue; } if (acceptFile(filePathStr)) { LOG.debug("Found {}", filePathStr); pathSet.add(path); } else { // don't look at it again ignoredFiles.add(filePathStr); } } } catch (FileNotFoundException e) { LOG.warn("Failed to list directory {}", filePath, e); } catch (IOException e) { throw new RuntimeException(e); } return pathSet; }
/** * Tests identification of source bundles in a 3.0.2 install. * * @throws Exception */ public void testClassicSourcePlugins() throws Exception { // extract the 3.0.2 skeleton IPath location = extractClassicPlugins(); // the new way ITargetDefinition definition = getNewTarget(); ITargetLocation container = getTargetService().newDirectoryLocation(location.toOSString()); definition.setTargetLocations(new ITargetLocation[] {container}); definition.resolve(null); TargetBundle[] bundles = definition.getBundles(); List source = new ArrayList(); for (int i = 0; i < bundles.length; i++) { TargetBundle sb = bundles[i]; if (sb.isSourceBundle()) { source.add(sb); } } assertEquals("Wrong number of source bundles", 4, source.size()); Set names = new HashSet(); for (int i = 0; i < source.size(); i++) { names.add(((TargetBundle) source.get(i)).getBundleInfo().getSymbolicName()); } String[] expected = new String[] { "org.eclipse.platform.source", "org.eclipse.jdt.source", "org.eclipse.pde.source", "org.eclipse.platform.source.win32.win32.x86" }; for (int i = 0; i < expected.length; i++) { assertTrue("Missing source for " + expected[i], names.contains(expected[i])); } }
/* Drops terms that contain other terms (prefixes, suffixes, etc.) and trims the questionlist to the target size.*/ private ArrayList<MemoryQuestion> dropPrefixSuffixfromSortedList( List<MemoryQuestion> questionlist, int targetSize) { ArrayList<MemoryQuestion> resultList = new ArrayList<MemoryQuestion>(); Set<Integer> badIndexSet = new LinkedHashSet<Integer>(); int terms = questionlist.size(); for (int first = 0; first < terms; first++) { MemoryQuestion mq1 = questionlist.get(first); String name1 = mq1.correctAnswer; if (badIndexSet.contains(first)) continue; resultList.add(mq1); if (resultList.size() >= targetSize) return resultList; for (int second = first + 1; second < terms; second++) { MemoryQuestion mq2 = questionlist.get(second); String name2 = mq2.correctAnswer; if (name1.contains(name2) || name2.contains(name1)) { badIndexSet.add(second); } } } return resultList; }
public static void viewTagInfo(String inurl, String tag) throws Exception { // pr = new PrintStream(new FileOutputStream("/semplest/lluis/keywordExp/urldata.txt")); pr = System.out; long start = System.currentTimeMillis(); pr.println(inurl + "****************************************************************"); printList(cleanUrlText(TextUtils.HTMLText(inurl, tag))); String urls = TextUtils.HTMLLinkString(inurl, inurl); String[] url = urls.split("\\s+"); Set<String> urlMap = new HashSet<String>(url.length); urlMap.add(inurl); for (String ur : url) { if (!urlMap.contains(ur)) { pr.println(ur + "***************************************************************"); try { printList(cleanUrlText(TextUtils.HTMLText(ur, tag))); } catch (Exception e) { System.out.println("Error with url :" + ur); e.printStackTrace(); logger.error("Problem", e); } urlMap.add(ur); } } pr.println("Time elapsed" + (start - System.currentTimeMillis())); }
/** * Tests that resetting the target platform should work OK (i.e. is equivalent to the models in * the default target platform). * * @throws CoreException */ public void testResetTargetPlatform() throws Exception { ITargetDefinition definition = getDefaultTargetPlatorm(); Set urls = getAllBundleURLs(definition); Set fragments = new HashSet(); TargetBundle[] bundles = definition.getBundles(); for (int i = 0; i < bundles.length; i++) { if (bundles[i].isFragment()) { fragments.add(new File(bundles[i].getBundleInfo().getLocation()).toURL()); } } // current platform IPluginModelBase[] models = TargetPlatformHelper.getPDEState().getTargetModels(); // should be equivalent assertEquals("Should have same number of bundles", urls.size(), models.length); for (int i = 0; i < models.length; i++) { String location = models[i].getInstallLocation(); URL url = new File(location).toURL(); assertTrue("Missing plug-in " + location, urls.contains(url)); if (models[i].isFragmentModel()) { assertTrue("Missing fragmnet", fragments.remove(url)); } } assertTrue("Different number of fragments", fragments.isEmpty()); }
static void writeNewData(String datafile) { try { BufferedReader bufr = new BufferedReader(new FileReader(datafile)); BufferedWriter bufw = new BufferedWriter(new FileWriter(datafile + ".nzf")); String line; String[] tokens; while ((line = bufr.readLine()) != null) { tokens = line.split(" "); bufw.write(tokens[0]); for (int i = 1; i < tokens.length; i++) { Integer index = Integer.valueOf(tokens[i].split(":")[0]); if (nnzFeas.contains(index)) { bufw.write(" " + tokens[i]); } } bufw.newLine(); } bufw.close(); bufr.close(); } catch (Exception e) { e.printStackTrace(); System.exit(0); } }
private void checkStartup( Map<String, ServiceData> map, List<ServiceData> start, ServiceData sd, Set<ServiceData> cyclic) { if (sd.after.isEmpty() || start.contains(sd)) return; if (cyclic.contains(sd)) { reporter.error("Cyclic dependency for " + sd.name); return; } cyclic.add(sd); for (String dependsOn : sd.after) { if (dependsOn.equals("boot")) continue; ServiceData deps = map.get(dependsOn); if (deps == null) { reporter.error("No such service " + dependsOn + " but " + sd.name + " depends on it"); } else { checkStartup(map, start, deps, cyclic); } } start.add(sd); }
private static PencilPosition findShortestRoute(int[][] maze) { // all found solutions to the maze PriorityQueue<PencilPosition> solutions = new PriorityQueue<PencilPosition>(5, new PencilPositionComparator()); // bread-first search queue Queue<PencilPosition> routes = new LinkedList<PencilPosition>(); // set of already visited positions Set<PencilPosition> visitedPositions = new HashSet<PencilPosition>(); // add the starting positions, which is always (0,0) routes.add(new PencilPosition(0, 0, false, null)); while (!routes.isEmpty()) { PencilPosition position = routes.poll(); // if this is the destinations position then we've found a solution if (0 == maze[position.row][position.column]) { solutions.add(position); continue; } // if we haven't already visited this position if (!visitedPositions.contains(position)) { routes.addAll(findPossibleRoutes(position, maze)); visitedPositions.add(position); } } return solutions.poll(); }
// look up and apply coarts for w to each sign in result @SuppressWarnings("unchecked") private void applyCoarts(Word w, SignHash result) throws LexException { List<Sign> inputSigns = new ArrayList<Sign>(result.asSignSet()); result.clear(); List<Sign> outputSigns = new ArrayList<Sign>(inputSigns.size()); // for each surface attr, lookup coarts and apply to input signs, storing results in output // signs for (Iterator<Pair<String, String>> it = w.getSurfaceAttrValPairs(); it.hasNext(); ) { Pair<String, String> p = it.next(); String attr = (String) p.a; if (!_indexedCoartAttrs.contains(attr)) continue; String val = (String) p.b; Word coartWord = Word.createWord(attr, val); SignHash coartResult = getSignsFromWord(coartWord, null, null, null); for (Iterator<Sign> it2 = coartResult.iterator(); it2.hasNext(); ) { Sign coartSign = it2.next(); // apply to each input for (int j = 0; j < inputSigns.size(); j++) { Sign sign = inputSigns.get(j); grammar.rules.applyCoart(sign, coartSign, outputSigns); } } // switch output to input for next iteration inputSigns.clear(); inputSigns.addAll(outputSigns); outputSigns.clear(); } // add results back result.addAll(inputSigns); }