@Test public void test_2GB_over() throws IOException { Assume.assumeTrue(CC.FULL_TEST); byte[] data = new byte[51111]; int dataHash = Arrays.hashCode(data); Set<Long> recids = new TreeSet<Long>(); for (int i = 0; i < 1e5; i++) { long recid = engine.recordPut(data, Serializer.BYTE_ARRAY_SERIALIZER); recids.add(recid); // if(i%10000==0){ // System.out.println(recid); // for(Long l:recids){ // byte[] b = engine.recordGet(l, Serializer.BYTE_ARRAY_SERIALIZER); // int hash = Arrays.hashCode(b); // assertEquals(l,dataHash, hash); // } // } } engine.commit(); for (Long l : recids) { byte[] b = engine.recordGet(l, Serializer.BYTE_ARRAY_SERIALIZER); int hash = Arrays.hashCode(b); assertEquals(dataHash, hash); } }
@Test public void testGetExistingAccount() throws Exception { Set<Authority> authorities = new HashSet<>(); Authority authority = new Authority(); authority.setName(AuthoritiesConstants.ADMIN); authorities.add(authority); User user = new User(); user.setLogin("test"); user.setFirstName("john"); user.setLastName("doe"); user.setEmail("*****@*****.**"); user.setAuthorities(authorities); when(mockUserService.getUserWithAuthorities()).thenReturn(user); restUserMockMvc .perform(get("/api/account").accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$.login").value("test")) .andExpect(jsonPath("$.firstName").value("john")) .andExpect(jsonPath("$.lastName").value("doe")) .andExpect(jsonPath("$.email").value("*****@*****.**")) .andExpect(jsonPath("$.authorities").value(AuthoritiesConstants.ADMIN)); }
/** * Get the list of ids of sequences in this sequence database * * @return the list of sequence ids. */ public Set<Integer> getSequenceIDs() { Set<Integer> set = new HashSet<Integer>(); for (Sequence sequence : getSequences()) { set.add(sequence.getId()); } return set; }
private static boolean checkDependants( final IdeaPluginDescriptor pluginDescriptor, final Function<PluginId, IdeaPluginDescriptor> pluginId2Descriptor, final Condition<PluginId> check, final Set<PluginId> processed) { processed.add(pluginDescriptor.getPluginId()); final PluginId[] dependentPluginIds = pluginDescriptor.getDependentPluginIds(); final Set<PluginId> optionalDependencies = new HashSet<PluginId>(Arrays.asList(pluginDescriptor.getOptionalDependentPluginIds())); for (final PluginId dependentPluginId : dependentPluginIds) { if (processed.contains(dependentPluginId)) continue; // TODO[yole] should this condition be a parameter? if (isModuleDependency(dependentPluginId) && (ourAvailableModules.isEmpty() || ourAvailableModules.contains(dependentPluginId.getIdString()))) { continue; } if (!optionalDependencies.contains(dependentPluginId)) { if (!check.value(dependentPluginId)) { return false; } final IdeaPluginDescriptor dependantPluginDescriptor = pluginId2Descriptor.fun(dependentPluginId); if (dependantPluginDescriptor != null && !checkDependants(dependantPluginDescriptor, pluginId2Descriptor, check, processed)) { return false; } } } return true; }
private static PencilPosition findShortestRoute(int[][] maze) { // all found solutions to the maze PriorityQueue<PencilPosition> solutions = new PriorityQueue<PencilPosition>(5, new PencilPositionComparator()); // bread-first search queue Queue<PencilPosition> routes = new LinkedList<PencilPosition>(); // set of already visited positions Set<PencilPosition> visitedPositions = new HashSet<PencilPosition>(); // add the starting positions, which is always (0,0) routes.add(new PencilPosition(0, 0, false, null)); while (!routes.isEmpty()) { PencilPosition position = routes.poll(); // if this is the destinations position then we've found a solution if (0 == maze[position.row][position.column]) { solutions.add(position); continue; } // if we haven't already visited this position if (!visitedPositions.contains(position)) { routes.addAll(findPossibleRoutes(position, maze)); visitedPositions.add(position); } } return solutions.poll(); }
public List<String> getVariationList() throws IOException { Set<String> vars = new HashSet<String>(); List<SizeResult> results = queryTable(new Column("bp", "max", 10)); for (SizeResult sr : results) vars.add(sr.getVariation()); return new ArrayList<String>(vars); }
private Variance calculateArgumentProjectionKindFromSuper( @NotNull TypeProjection argument, @NotNull List<TypeProjectionAndVariance> projectionsFromSuper) { Set<Variance> projectionKindsInSuper = Sets.newLinkedHashSet(); for (TypeProjectionAndVariance projectionAndVariance : projectionsFromSuper) { projectionKindsInSuper.add(projectionAndVariance.typeProjection.getProjectionKind()); } Variance defaultProjectionKind = argument.getProjectionKind(); if (projectionKindsInSuper.size() == 0) { return defaultProjectionKind; } else if (projectionKindsInSuper.size() == 1) { Variance projectionKindInSuper = projectionKindsInSuper.iterator().next(); if (defaultProjectionKind == INVARIANT || defaultProjectionKind == projectionKindInSuper) { return projectionKindInSuper; } else { reportError( "Incompatible projection kinds in type arguments of super methods' return types: " + projectionsFromSuper + ", defined in current: " + argument); return defaultProjectionKind; } } else { reportError( "Incompatible projection kinds in type arguments of super methods' return types: " + projectionsFromSuper); return defaultProjectionKind; } }
/** * Adds the given POP application satisfier set as a subset of this set, with the given * exceptions. The exceptions set must be a subset of the given satisfier set. If the given POP * application was already a subset of this set, then the new exceptions set is the intersection * of the given exceptions set with the old one. Otherwise, the exceptions set is the given one * minus any individual elements of this set that satisfy the given POP application. */ public void addSatisfiers(NumberVar popApp, ObjectSet satisfiers, Set newExceptions) { if (popAppSatisfiers.containsKey(popApp)) { // already in set; assume satisfiers the same Set curExceptions = (Set) popAppExceptions.get(popApp); int oldNumExceptions = curExceptions.size(); curExceptions.retainAll(newExceptions); size += (oldNumExceptions - curExceptions.size()); } else { popAppSatisfiers.put(popApp, satisfiers); Set oldIndivs = (Set) popAppIndivs.remove(popApp); for (Iterator iter = oldIndivs.iterator(); iter.hasNext(); ) { individuals.remove(iter.next()); } Set curExceptions = new HashSet(newExceptions); curExceptions.removeAll(oldIndivs); popAppExceptions.put(popApp, curExceptions); size += (satisfiers.size() - oldIndivs.size() // because they were already // here - curExceptions.size()); // because they weren't added } }
/** * Returns a <code>ThrowableSet</code> representing the set of exceptions included in <code> * include</code> minus the set of exceptions included in <code>exclude</code>. Creates a new * <code>ThrowableSet</code> only if there was not already one whose contents correspond to * <code>include</code> - <code>exclude</code>. * * @param include A set of {@link RefLikeType} objects representing exception types included in * the result; may be <code>null</code> if there are no included types. * @param exclude A set of {@link AnySubType} objects representing exception types excluded from * the result; may be <code>null</code> if there are no excluded types. * @return a <code>ThrowableSet</code> representing the set of exceptions corresponding to * <code>include</code> - <code>exclude</code>. */ private ThrowableSet registerSetIfNew(Set include, Set exclude) { if (INSTRUMENTING) { registrationCalls++; } if (include == null) { include = Collections.EMPTY_SET; } if (exclude == null) { exclude = Collections.EMPTY_SET; } int size = include.size() + exclude.size(); Integer sizeKey = new Integer(size); List sizeList = (List) sizeToSets.get(sizeKey); if (sizeList == null) { sizeList = new LinkedList(); sizeToSets.put(sizeKey, sizeList); } for (Iterator i = sizeList.iterator(); i.hasNext(); ) { ThrowableSet set = (ThrowableSet) i.next(); if (set.exceptionsIncluded.equals(include) && set.exceptionsExcluded.equals(exclude)) { return set; } } if (INSTRUMENTING) { registeredSets++; } ThrowableSet result = new ThrowableSet(include, exclude); sizeList.add(result); return result; }
@NotNull private Set<VcsRef> readBranches(@NotNull GitRepository repository) { StopWatch sw = StopWatch.start("readBranches in " + repository.getRoot().getName()); VirtualFile root = repository.getRoot(); repository.update(); Collection<GitLocalBranch> localBranches = repository.getBranches().getLocalBranches(); Collection<GitRemoteBranch> remoteBranches = repository.getBranches().getRemoteBranches(); Set<VcsRef> refs = new THashSet<VcsRef>(localBranches.size() + remoteBranches.size()); for (GitLocalBranch localBranch : localBranches) { refs.add( myVcsObjectsFactory.createRef( localBranch.getHash(), localBranch.getName(), GitRefManager.LOCAL_BRANCH, root)); } for (GitRemoteBranch remoteBranch : remoteBranches) { refs.add( myVcsObjectsFactory.createRef( remoteBranch.getHash(), remoteBranch.getNameForLocalOperations(), GitRefManager.REMOTE_BRANCH, root)); } String currentRevision = repository.getCurrentRevision(); if (currentRevision != null) { // null => fresh repository refs.add( myVcsObjectsFactory.createRef( HashImpl.build(currentRevision), "HEAD", GitRefManager.HEAD, root)); } sw.report(); return refs; }
/** * 获取所有服务器上一分钟状态 * @return * @throws IOException * @throws ClassNotFoundException */ public String getAllServer() throws Exception { //服务器列表 HashMap<String, String> serverList = createServerList(); Set<String> aSet = serverList.keySet(); Iterator<String> aaSet = aSet.iterator(); int i = 0; String dataString = ""; while(aaSet.hasNext()) { String ip = aaSet.next(); // String des = serverList.get(ip); String data = getOneServer(ip, ""); String lastNum = "0"; String lastTime = "0:0"; if(!data.isEmpty() && !data.equals("0=0")) { String[] dataList = data.split(","); if(dataList.length > 2) { String[] numList = dataList[(dataList.length -2)].split("="); lastNum = numList[0]; lastTime = numList[1]; } else { String[] numList = dataList[(dataList.length -1)].split("="); lastNum = numList[0]; lastTime = numList[1]; } } if(i != 0) dataString += ","; dataString += ip.replace("192.168.0.", "") + "-" + lastNum + "-" + lastTime; i++; } return dataString; }
public static final void evaluate(final PlayerContext pc) { if (pc == null) { return; } final Profile profile = pc.profile; final Set<Integer> achieved = profile.achievements; final int size = ALL.length; boolean any = false; for (int i = 0; i < size; i++) { final Integer key = Integer.valueOf(i); if (achieved.contains(key)) { continue; } final Achievement ach = ALL[i]; if (ach.isMet(pc)) { achieved.add(key); final int award = ach.award; FurGuardiansGame.notify( pc, ach.getName() + ", " + award + " Gem bonus", new Gem(FurGuardiansGame.gemAchieve)); pc.addGems(award); any = true; } } if (any) { profile.save(); } }
private int getHttpsPort() { try { MBeanServer mBeanServer = MBeanServerFactory.findMBeanServer(null).get(0); QueryExp query = Query.eq(Query.attr("Scheme"), Query.value("https")); Set<ObjectName> objectNames = mBeanServer.queryNames(null, query); if (objectNames != null && objectNames.size() > 0) { for (ObjectName objectName : objectNames) { String name = objectName.toString(); if (name.indexOf("port=") > -1) { String[] parts = name.split("port="); String port = parts[1]; try { int portNum = Integer.parseInt(port); return portNum; } catch (NumberFormatException e) { logger.error("Error parsing https port:" + port); return -1; } } } } } catch (Throwable t) { logger.error("Error getting https port:", t); } return -1; }
public Set<K2> secondKeySet() { Set<K2> keys = Generics.newHashSet(); for (K1 k1 : map.keySet()) { keys.addAll(get(k1).firstKeySet()); } return keys; }
/** {@inheritDoc} */ public void applyFederationFilter(final Collection list, final Class objectType) { final Set<Long> manageableShopIds = shopFederationStrategy.getAccessibleShopIdsByCurrentManager(); final Iterator<CarrierDTO> carriersIt = list.iterator(); while (carriersIt.hasNext()) { final CarrierDTO carrier = carriersIt.next(); try { final Map<ShopDTO, Boolean> shops = carrierService.getAssignedCarrierShops(carrier.getCarrierId()); boolean manageable = false; for (final ShopDTO shop : shops.keySet()) { if (manageableShopIds.contains(shop.getShopId())) { manageable = true; break; } } if (!manageable) { carriersIt.remove(); } } catch (Exception exp) { carriersIt.remove(); } } }
/** * This function prints a parsed TCRL refinement law. Only for debug purposes * * @param r The AST object */ public static void printRefFunction(TCRL_AST r) { System.out.println(); System.out.println("**************************************************"); System.out.println("RefFunction"); System.out.println("**************************************************"); System.out.println("Nombre: " + r.getName()); System.out.println("Preambulo: " + r.getPreamble()); System.out.println("Reglas: "); NodeRules rules = r.getRules(); NodeRule rule; Set<String> keys = rules.getKeys(); Iterator<String> iter = keys.iterator(); while (iter.hasNext()) { rule = rules.getRule(iter.next()); if (rule instanceof RuleSynonym) { printRuleSynonym((RuleSynonym) rule); } else { printRuleRefinement((RuleRefinement) rule); } } }
// relativePaths are guaranteed to fit into command line length limitations. @Override @NotNull public Collection<VirtualFile> untrackedFilesNoChunk( @NotNull Project project, @NotNull VirtualFile root, @Nullable List<String> relativePaths) throws VcsException { final Set<VirtualFile> untrackedFiles = new HashSet<VirtualFile>(); GitSimpleHandler h = new GitSimpleHandler(project, root, GitCommand.LS_FILES); h.setNoSSH(true); h.setSilent(true); h.addParameters("--exclude-standard", "--others", "-z"); h.endOptions(); if (relativePaths != null) { h.addParameters(relativePaths); } final String output = h.run(); if (StringUtil.isEmptyOrSpaces(output)) { return untrackedFiles; } for (String relPath : output.split("\u0000")) { VirtualFile f = root.findFileByRelativePath(relPath); if (f == null) { // files was created on disk, but VirtualFile hasn't yet been created, // when the GitChangeProvider has already been requested about changes. LOG.info(String.format("VirtualFile for path [%s] is null", relPath)); } else { untrackedFiles.add(f); } } return untrackedFiles; }
public GraphIndex(Graph graph) { LOG.info("Indexing graph..."); for (String feedId : graph.getFeedIds()) { for (Agency agency : graph.getAgencies(feedId)) { Map<String, Agency> agencyForId = agenciesForFeedId.getOrDefault(feedId, new HashMap<>()); agencyForId.put(agency.getId(), agency); this.agenciesForFeedId.put(feedId, agencyForId); } } Collection<Edge> edges = graph.getEdges(); /* We will keep a separate set of all vertices in case some have the same label. * Maybe we should just guarantee unique labels. */ Set<Vertex> vertices = Sets.newHashSet(); for (Edge edge : edges) { vertices.add(edge.getFromVertex()); vertices.add(edge.getToVertex()); if (edge instanceof TablePatternEdge) { TablePatternEdge patternEdge = (TablePatternEdge) edge; TripPattern pattern = patternEdge.getPattern(); patternForId.put(pattern.code, pattern); } } for (Vertex vertex : vertices) { vertexForId.put(vertex.getLabel(), vertex); if (vertex instanceof TransitStop) { TransitStop transitStop = (TransitStop) vertex; Stop stop = transitStop.getStop(); stopForId.put(stop.getId(), stop); stopVertexForStop.put(stop, transitStop); stopsForParentStation.put(stop.getParentStation(), stop); } } for (TransitStop stopVertex : stopVertexForStop.values()) { Envelope envelope = new Envelope(stopVertex.getCoordinate()); stopSpatialIndex.insert(envelope, stopVertex); } for (TripPattern pattern : patternForId.values()) { patternsForFeedId.put(pattern.getFeedId(), pattern); patternsForRoute.put(pattern.route, pattern); for (Trip trip : pattern.getTrips()) { patternForTrip.put(trip, pattern); tripForId.put(trip.getId(), trip); } for (Stop stop : pattern.getStops()) { patternsForStop.put(stop, pattern); } } for (Route route : patternsForRoute.asMap().keySet()) { routeForId.put(route.getId(), route); } // Copy these two service indexes from the graph until we have better ones. calendarService = graph.getCalendarService(); serviceCodes = graph.serviceCodes; this.graph = graph; LOG.info("Done indexing graph."); }
public Object sample(int n) { if ((n < 0) || (n >= size)) { throw new IllegalArgumentException( "Can't get element " + n + " from set " + this + " of size " + size); } if (n < individuals.size()) { return individuals.get(n); } int indexSoFar = individuals.size(); for (Iterator iter = popAppSatisfiers.entrySet().iterator(); iter.hasNext(); ) { Map.Entry entry = (Map.Entry) iter.next(); NumberVar popApp = (NumberVar) entry.getKey(); ObjectSet satisfiers = (ObjectSet) entry.getValue(); Set exceptions = (Set) popAppExceptions.get(popApp); int numInSet = satisfiers.size() - exceptions.size(); if (n < indexSoFar + numInSet) { return sampleFromPOPApp(satisfiers, exceptions, n - indexSoFar); } indexSoFar += numInSet; } return null; // shouldn't get here }
@Nullable public FilteringTreeStructure.FilteringNode selectPsiElement(PsiElement element) { Set<PsiElement> parents = getAllParents(element); FilteringTreeStructure.FilteringNode node = (FilteringTreeStructure.FilteringNode) myAbstractTreeBuilder.getRootElement(); while (node != null) { boolean changed = false; for (FilteringTreeStructure.FilteringNode n : node.children()) { final PsiElement psiElement = getPsi(n); if (psiElement != null && parents.contains(psiElement)) { node = n; changed = true; break; } } if (!changed) { myAbstractTreeBuilder.select(node); if (myAbstractTreeBuilder.getSelectedElements().isEmpty()) { TreeUtil.selectFirstNode(myTree); } myInitialNodeIsLeaf = node.getChildren().length == 0; return node; } } TreeUtil.selectFirstNode(myTree); return null; }
public void testMethodKey() throws Exception { MethodKey factory = (MethodKey) KeyFactory.create(MethodKey.class); Set methodSet = new HashSet(); methodSet.add(factory.newInstance(Number.class, new Class[] {int.class})); assertTrue(methodSet.contains(factory.newInstance(Number.class, new Class[] {int.class}))); assertTrue(!methodSet.contains(factory.newInstance(Number.class, new Class[] {Integer.class}))); }
private Set<String> getListenerClassNames(final String xml, final String path) throws IOException, SAXException { final Digester digester = getDigester(); digester.addObjectCreate(XmlConfiguration.listeners, ArrayList.class); digester.addObjectCreate(path, StringBuffer.class); // TODO rather than StringBuffer can digester.addCallMethod(path, "append", 0); // TODO this be a String? digester.addSetRoot(path, "add"); final Set<String> classNames = new HashSet<String>(); final StringReader includeReader = new StringReader(xml); Object o = digester.parse(includeReader); if (o == null) { // return empty Set return classNames; } Collection<StringBuffer> classNamesAsStringBuffers = (Collection<StringBuffer>) o; /** When the configuration contains no listener settings, return the empty Set */ if (classNamesAsStringBuffers == null) { return classNames; } for (StringBuffer classNamesAsStringBuffer : classNamesAsStringBuffers) { classNames.add(classNamesAsStringBuffer.toString()); } return classNames; }
/** Dynamically generate the set of Routes passing though a Stop on demand. */ public Set<Route> routesForStop(Stop stop) { Set<Route> routes = Sets.newHashSet(); for (TripPattern p : patternsForStop.get(stop)) { routes.add(p.route); } return routes; }
protected Set<Data> keySetInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine .getOperationService() .invokeOnAllPartitions( CollectionService.SERVICE_NAME, new MultiMapOperationFactory(proxyId, OperationFactoryType.KEY_SET)); Set<Data> keySet = new HashSet<Data>(); for (Object result : results.values()) { if (result == null) { continue; } CollectionResponse response = nodeEngine.toObject(result); if (response.getCollection() != null) { keySet.addAll(response.getCollection()); } } return keySet; } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } }
/** * Returns actions that may be taken when {@link ServiceErrorID#TRANSPORT_ERROR} occurs * * @return <code>ErrorHandlingAction</code>s caused because of transport(JMS) */ protected Set<ErrorHandlingAction> getActionsForTransportError() { Set<ErrorHandlingAction> actions = new LinkedHashSet<>(); actions.add(ErrorHandlingActionFactory.createErrorHandlingAction(ErrorHandlingAction.LOG)); actions.add( ErrorHandlingActionFactory.createErrorHandlingAction(ErrorHandlingAction.STOP_SERVICE)); return actions; }
private Request createRequest( Location location, int radiusInMeters, int resultsLimit, String searchText, Set<String> extraFields, Session session) { Request request = Request.newPlacesSearchRequest( session, location, radiusInMeters, resultsLimit, searchText, null); Set<String> fields = new HashSet<String>(extraFields); String[] requiredFields = new String[] {ID, NAME, LOCATION, CATEGORY, WERE_HERE_COUNT}; fields.addAll(Arrays.asList(requiredFields)); String pictureField = adapter.getPictureFieldSpecifier(); if (pictureField != null) { fields.add(pictureField); } Bundle parameters = request.getParameters(); parameters.putString("fields", TextUtils.join(",", fields)); request.setParameters(parameters); return request; }
@Test public void testCloseable() throws IOException { Set<HostAndPort> jedisClusterNode = new HashSet<HostAndPort>(); jedisClusterNode.add(new HostAndPort(nodeInfo1.getHost(), nodeInfo1.getPort())); JedisCluster jc = null; try { jc = new JedisCluster(jedisClusterNode); jc.set("51", "foo"); } finally { if (jc != null) { jc.close(); } } Iterator<JedisPool> poolIterator = jc.getClusterNodes().values().iterator(); while (poolIterator.hasNext()) { JedisPool pool = poolIterator.next(); try { pool.getResource(); fail("JedisCluster's internal pools should be already destroyed"); } catch (JedisConnectionException e) { // ok to go... } } }
@Test public void testJedisClusterRunsWithMultithreaded() throws InterruptedException, ExecutionException, IOException { Set<HostAndPort> jedisClusterNode = new HashSet<HostAndPort>(); jedisClusterNode.add(new HostAndPort("127.0.0.1", 7379)); final JedisCluster jc = new JedisCluster(jedisClusterNode); jc.set("foo", "bar"); ThreadPoolExecutor executor = new ThreadPoolExecutor(10, 100, 0, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(10)); List<Future<String>> futures = new ArrayList<Future<String>>(); for (int i = 0; i < 50; i++) { executor.submit( new Callable<String>() { @Override public String call() throws Exception { // FIXME : invalidate slot cache from JedisCluster to test // random connection also does work return jc.get("foo"); } }); } for (Future<String> future : futures) { String value = future.get(); assertEquals("bar", value); } jc.close(); }
public Action choose(LightsOut.Player actor, Set<Action> options) { if (presses == null) { presses = new HashSet<Location>(); // linear algebra solution boolean[][] state = actor.state(); boolean[] s = stateVector(state); boolean[][] M = stateMatrix(state[0].length, state.length); gauss(M, s); // translate to locations Location[][] locations = actor.locations(); for (int x = 0; x < s.length; x++) { if (s[x]) { presses.add(locations[x / state.length][x % state[0].length]); } } } Location next = Groups.first(presses); presses.remove(next); for (Action action : Groups.ofType(LightsOut.Move.class, options)) { if (((LightsOut.Move) action).target.location().equals(next)) { return action; } } return null; }
// --------------------------------------------------------------------------- public Rule findTargetRule(String target) throws TablesawException { Rule rule = null; if (m_resolved) { ArrayList<String> posTargets; String posTarget = target; if (m_noRulesList.contains(target)) return (null); if ((rule = m_locatedRules.get(target)) != null) return (rule); // First look in name map rule = m_nameRuleMap.get(target); if (rule == null) { // Now look for targets rule = m_targetRuleMap.get(posTarget); if (rule == null) { posTargets = m_fileManager.getPossibleFiles(posTarget); for (String t : posTargets) { rule = m_targetRuleMap.get(t); if (rule != null) break; } } } Debug.print("Rule for " + target + " is " + rule); if (rule != null) { m_locatedRules.put(target, rule); } else m_noRulesList.add(target); } return (rule); }