// --------------------------------------------------------------------------- private void printDependencies() throws TablesawException { m_printedDependencies = new HashSet<String>(); try { PrintWriter pw = new PrintWriter(new FileWriter("dependency.txt")); pw.println("Targets marked with a * have already been printed"); // Create a reduced set of stuff to print Set<String> ruleNames = new HashSet<String>(); for (String name : m_nameRuleMap.keySet()) ruleNames.add(name); for (String name : m_nameRuleMap.keySet()) { Rule rule = m_nameRuleMap.get(name); for (String dep : rule.getDependNames()) ruleNames.remove(dep); for (Rule dep : rule.getDependRules()) { if (dep.getName() != null) ruleNames.remove(dep.getName()); } } for (String name : ruleNames) { if (!name.startsWith(NAMED_RULE_PREFIX)) printDependencies(name, pw, 0); } pw.close(); } catch (IOException ioe) { throw new TablesawException("Cannot write to file dependency.txt", -1); } }
@Override public Response serve(IHTTPSession session) { Map<String, String> header = session.getHeaders(); Map<String, String> parms = session.getParms(); String uri = session.getUri(); if (!this.quiet) { System.out.println(session.getMethod() + " '" + uri + "' "); Iterator<String> e = header.keySet().iterator(); while (e.hasNext()) { String value = e.next(); System.out.println(" HDR: '" + value + "' = '" + header.get(value) + "'"); } e = parms.keySet().iterator(); while (e.hasNext()) { String value = e.next(); System.out.println(" PRM: '" + value + "' = '" + parms.get(value) + "'"); } } for (File homeDir : this.rootDirs) { // Make sure we won't die of an exception later if (!homeDir.isDirectory()) { return getInternalErrorResponse("given path is not a directory (" + homeDir + ")."); } } return respond(Collections.unmodifiableMap(header), session, uri); }
private void compareKeySetsAndRemoveMissing( CompareContext context, String type, Map<String, ModelNode> currentMap, Map<String, ModelNode> legacyMap) { compareSetsAndRemoveMissing(context, type, currentMap.keySet(), legacyMap.keySet()); }
/** * Fetches data for the given primary keys. * * @param pksToDo a Map of the primary keys to fetch * @param results a Map to hold results that are to be added to the cache * @param cldToObjectsForCld a Map of Lists of objects relevant to PrimaryKeys * @param time1 the time that processing started * @throws ObjectStoreException if something goes wrong */ protected void doPks( Map<PrimaryKey, ClassDescriptor> pksToDo, Map<InterMineObject, Set<InterMineObject>> results, Map<ClassDescriptor, List<InterMineObject>> cldToObjectsForCld, long time1) throws ObjectStoreException { Set<Integer> fetchedObjectIds = Collections.synchronizedSet(new HashSet<Integer>()); Map<PrimaryKey, ClassDescriptor> pksNotDone = new IdentityHashMap<PrimaryKey, ClassDescriptor>(pksToDo); while (!pksToDo.isEmpty()) { int startPksToDoSize = pksToDo.size(); Iterator<PrimaryKey> pkIter = pksToDo.keySet().iterator(); while (pkIter.hasNext()) { PrimaryKey pk = pkIter.next(); ClassDescriptor cld = pksToDo.get(pk); if (canDoPkNow(pk, cld, pksNotDone)) { // LOG.error("Running pk " + cld.getName() + "." + pk.getName()); doPk(pk, cld, results, cldToObjectsForCld.get(cld), fetchedObjectIds); pkIter.remove(); pksNotDone.remove(pk); } else { // LOG.error("Cannot do pk " + cld.getName() + "." + pk.getName() + " yet"); } } if (pksToDo.size() == startPksToDoSize) { throw new RuntimeException("Error - cannot fetch any pks: " + pksToDo.keySet()); } } long time2 = System.currentTimeMillis(); timeSpentPrefetchEquiv += time2 - time1; dataTracker.prefetchIds(fetchedObjectIds); time1 = System.currentTimeMillis(); timeSpentPrefetchTracker += time1 - time2; }
private void writeHeaders(Map headers) { if (this.callback != null) { Map<String, XmlHttpProxy.Cookie> cookies = callback.getCookies(); Iterator it = cookies.keySet().iterator(); while (it.hasNext()) { XmlHttpProxy.Cookie c = cookies.get(it.next()); if (headers == null) headers = new HashMap(); headers.put( "Cookie", c.name + "=" + c.value // + "; Path=" + c.path ); } } // set headers if (headers != null) { Iterator it = headers.keySet().iterator(); if (it != null) { while (it.hasNext()) { String key = (String) it.next(); String value = (String) headers.get(key); // System.out.println("Set Request Header: "+key + "->"+value); this.urlConnection.setRequestProperty(key, value); } } } }
/** * Méthode appelée pour le calcul de la cohésion entre 2 fonctions. * * <p>Calcule le pourcentage de types utilisés en commun dans les deux fonctions sur le nombre * total de types utilisés dans les deux fonctions. * * @param f1 Une fonction d'un modèle de code source * @param f2 Une autre fonction d'un modèle de code source * @return Un double entre 0.0 et 100.0 * @see #cohesion(Function, Function) */ private double cohesionTypes(Function f1, Function f2) { double result = 0.0; Map<ComplexType, Integer> usedTypesFct1 = f1.getTotalComplexTypes(); Map<ComplexType, Integer> usedTypesFct2 = f2.getTotalComplexTypes(); Set<ComplexType> usedTypesTotal = new HashSet<ComplexType>(); Set<ComplexType> usedTypesCommon = new HashSet<ComplexType>(); for (ComplexType t : usedTypesFct1.keySet()) { if (t.equals(PrimitiveType.voidType) == false && t.equals(ComplexType.anonymousType) == false) { usedTypesTotal.add(t); } } for (ComplexType t : usedTypesFct2.keySet()) { if (t.equals(PrimitiveType.voidType) == false && t.equals(ComplexType.anonymousType) == false) { boolean newType = usedTypesTotal.add(t); if (newType == false) { usedTypesCommon.add(t); } } } if (usedTypesTotal.size() > 0) { result = (100.0 * usedTypesCommon.size()) / usedTypesTotal.size(); } return result; }
/** * Méthode appelée pour le calcul de la cohésion entre 2 fonctions. * * <p>Calcule le pourcentage de variables globales au programme accédées en commun sur le nombre * total de variables globales au programme accédées dans les deux fonctions. * * @param f1 Une fonction d'un modèle de code source * @param f2 Une autre fonction d'un modèle de code source * @return Un double entre 0.0 et 100.0 * @see #cohesion(Function, Function) */ private double cohesionGlobalVars(Function f1, Function f2) { double result = 0.0; Map<GlobalVariable, Integer> globalVars1 = f1.getCoreGlobalVariables(); Map<GlobalVariable, Integer> globalVars2 = f2.getCoreGlobalVariables(); Set<GlobalVariable> globalVarsTotal = new HashSet<GlobalVariable>(); Set<GlobalVariable> globalVarsCommon = new HashSet<GlobalVariable>(); for (GlobalVariable var : globalVars1.keySet()) { globalVarsTotal.add(var); } for (GlobalVariable var : globalVars2.keySet()) { boolean newVar = globalVarsTotal.add(var); if (newVar == false) { globalVarsCommon.add(var); } } if (globalVarsTotal.size() > 0) { result = (100.0 * globalVarsCommon.size()) / globalVarsTotal.size(); } return result; }
@Override public String toString() { return ToStringGenerator.getDerived(super.toString()) .append("incomingIDs", m_aIncoming == null ? null : m_aIncoming.keySet()) .append("outgoingIDs", m_aOutgoing == null ? null : m_aOutgoing.keySet()) .toString(); }
public Set<ScribeMark> winner() { if (winner != null) { return winner; } if (isFull() == false) { Set<ScribeMark> result = new TreeSet<ScribeMark>(); result.add(ScribeMark.EMPTY); return result; } Map<ScribeMark, Integer> points = points(); /* * Find maximum points. */ int max = 0; for (ScribeMark key : points.keySet()) { if (max < points.get(key)) { max = points.get(key); } } /* * Remove all elements with points less than maximum. */ for (ScribeMark key : points.keySet()) { if (points.get(key) < max) { points.remove(key); } } return points.keySet(); }
/** Test that the iterator order matches the keySet order. */ public void testMapIteratorOrder() { if (!supportsFullIterator()) { return; } final OrderedMapIterator<K, V> it = makeObject(); final Map<K, V> map = getMap(); assertEquals( "keySet() not consistent", new ArrayList<K>(map.keySet()), new ArrayList<K>(map.keySet())); final Iterator<K> it2 = map.keySet().iterator(); assertEquals(true, it.hasNext()); assertEquals(true, it2.hasNext()); final List<K> list = new ArrayList<K>(); while (it.hasNext()) { final K key = it.next(); assertEquals(it2.next(), key); list.add(key); } assertEquals(map.size(), list.size()); while (it.hasPrevious()) { final K key = it.previous(); assertEquals(list.get(list.size() - 1), key); list.remove(list.size() - 1); } assertEquals(0, list.size()); }
public void deserialize(String value) { if (allKeyValueMap == null) { saveEntity = value; return; } // always reset the list of items when the item changes keyValueMap_used = new HashMap<>(); if (value != null && !value.isEmpty()) { String[] lines = value.split(PROPERTIES_DELIMETER); String[] splitLine; for (String line : lines) { if (line.isEmpty()) { continue; } splitLine = line.split(KEY_VALUE_DELIMETER, 2); String key = splitLine[0]; if (allKeyValueMap.containsKey(key)) { keyValueMap_used.put(key, splitLine[1]); } } } init(allKeyValueMap.keySet(), keyValueMap_used.keySet()); }
/** * Tests a classifier on a data set * * @param cls the classifier to test * @param data the data set to test on * @return the performance for each class */ public static Map<Object, PerformanceMeasure> testDataset(Classifier cls, Dataset data) { Map<Object, PerformanceMeasure> out = new HashMap<Object, PerformanceMeasure>(); for (Object o : data.classes()) { out.put(o, new PerformanceMeasure()); } for (Instance instance : data) { Object prediction = cls.classify(instance); if (instance.classValue().equals(prediction)) { // prediction // ==class for (Object o : out.keySet()) { if (o.equals(instance.classValue())) { out.get(o).tp++; } else { out.get(o).tn++; } } } else { // prediction != class for (Object o : out.keySet()) { /* prediction is positive class */ if (prediction.equals(o)) { out.get(o).fp++; } /* instance is positive class */ else if (o.equals(instance.classValue())) { out.get(o).fn++; } /* none is positive class */ else { out.get(o).tn++; } } } } return out; }
public static void main(String[] args) { AstroDate astroi = new AstroDate(2009, 4, 21, 6, 41, 43); TimeElement time = new TimeElement(astroi, TimeElement.Scale.UNIVERSAL_TIME_UTC); ObserverElement obs = ObserverElement.MADRID; // The ephemeris object defines the target body and how to calculate ephemeris. The algorithm // is set to Moshier, which is the best way for general calculations EphemerisElement eph = new EphemerisElement( Target.Jupiter, EphemerisElement.Ephem.APPARENT, EphemerisElement.EQUINOX_OF_DATE, EphemerisElement.TOPOCENTRIC, Precession.Method.IAU2000, EphemerisElement.Frame.J2000); Map<Target, MoonEphemElement> e = MoonEphem.galileanSatellitesEphemerides_L1(time, obs, eph); System.out.println(e.keySet()); for (Target t : e.keySet()) { System.out.println(t + " + " + e.get(t).rightAscension); } for (Target t : e.keySet()) { System.out.println(t + " + " + e.get(t).mutualPhenomena); } }
@Override public FieldProviderResponse overrideViaXml( OverrideViaXmlRequest overrideViaXmlRequest, Map<String, FieldMetadata> metadata) { Map<String, FieldMetadataOverride> overrides = getTargetedOverride( overrideViaXmlRequest.getRequestedConfigKey(), overrideViaXmlRequest.getRequestedCeilingEntity()); if (overrides != null) { for (String propertyName : overrides.keySet()) { final FieldMetadataOverride localMetadata = overrides.get(propertyName); for (String key : metadata.keySet()) { if (key.equals(propertyName)) { try { if (metadata.get(key) instanceof AdornedTargetCollectionMetadata) { AdornedTargetCollectionMetadata serverMetadata = (AdornedTargetCollectionMetadata) metadata.get(key); if (serverMetadata.getTargetClass() != null) { Class<?> targetClass = Class.forName(serverMetadata.getTargetClass()); Class<?> parentClass = null; if (serverMetadata.getOwningClass() != null) { parentClass = Class.forName(serverMetadata.getOwningClass()); } String fieldName = serverMetadata.getFieldName(); Field field = overrideViaXmlRequest .getDynamicEntityDao() .getFieldManager() .getField(targetClass, fieldName); Map<String, FieldMetadata> temp = new HashMap<String, FieldMetadata>(1); temp.put(field.getName(), serverMetadata); FieldInfo info = buildFieldInfo(field); buildAdornedTargetCollectionMetadata( parentClass, targetClass, temp, info, localMetadata, overrideViaXmlRequest.getDynamicEntityDao()); serverMetadata = (AdornedTargetCollectionMetadata) temp.get(field.getName()); metadata.put(key, serverMetadata); if (overrideViaXmlRequest.getParentExcluded()) { if (LOG.isDebugEnabled()) { LOG.debug( "applyAdornedTargetCollectionMetadataOverrides:Excluding " + key + "because parent is marked as excluded."); } serverMetadata.setExcluded(true); } } } } catch (Exception e) { throw new RuntimeException(e); } } } } } return FieldProviderResponse.HANDLED; }
public String toStringInstrs() { StringBuilder buf = new StringBuilder(); for (BasicBlock b : getSortedBasicBlocks()) { buf.append(b.toStringInstrs()); } buf.append("\n\n------ Rescue block map ------\n"); for (BasicBlock bb : rescuerMap.keySet()) { buf.append("BB ") .append(bb.getID()) .append(" --> BB ") .append(rescuerMap.get(bb).getID()) .append("\n"); } buf.append("\n\n------ Ensure block map ------\n"); for (BasicBlock bb : ensurerMap.keySet()) { buf.append("BB ") .append(bb.getID()) .append(" --> BB ") .append(ensurerMap.get(bb).getID()) .append("\n"); } List<IRClosure> closures = scope.getClosures(); if (!closures.isEmpty()) { buf.append("\n\n------ Closures encountered in this scope ------\n"); for (IRClosure c : closures) { buf.append(c.toStringBody()); } buf.append("------------------------------------------------\n"); } return buf.toString(); }
/** * Gives back the leaf nodes related to a given node * * @param position this node is the starting point to find leaves * @param leaves collection of all leaves */ public Map<INode, IAttribute> collectLeaves(INode position, Map<INode, IAttribute> leaves) { // Create leaves set if not exists if (leaves == null) { leaves = new HashMap<INode, IAttribute>(); } // If position is leaf if (position.isLeaf()) { System.out.println(position.getAttributesString()); leaves.put(position, position.getAttributeValue(position)); return leaves; } // If position is no leaf Iterator<INode> children = position.getChildren(); while (children.hasNext()) { INode child = children.next(); Map<INode, IAttribute> tempLeaves = (collectLeaves(child, leaves)); for (INode tempLeaf : tempLeaves.keySet()) { if (!leaves.keySet().contains(tempLeaf)) { leaves.put(tempLeaf, tempLeaf.getAttributeValue(tempLeaf)); } } } return leaves; }
/** * This method retrieves all the values accessible through a getter ( <code>getX()</code> method) * in order to build the corresponding set of {@link Objective}s. At the opposite of {@link * #createFromGetters(Class)}, an additional filter is used: we build an {@link Objective} for * each getter which does not correspond to a setter (<code>setX()</code> method with the same * <code>X</code> than the getter). This method is adapted for {@link Solution} implementations * which provide setters only for their fundamental values (e.g. the path of a TSP {@link * Solution}) and use getters only for the computed values (e.g. the length of such a path).<br> * <br> * Notice that, if all the relevant getters are not present, the corresponding {@link Objective}s * will not be retrieved. On the opposite, any additional getter which does not correspond to a * relevant {@link Objective} will be mistakenly retrieved. So be sure that the relevant elements * (and only these ones) have their getter (and no setter). Otherwise, you should use a different * method or generate the {@link Objective}s manually. * * @param solutionClass the {@link Solution} class to analyze * @return the set of {@link Objective}s retrieved from this class */ public <Solution> Collection<Objective<Solution, ?>> createFromGettersWithoutSetters( Class<Solution> solutionClass) { Map<String, Method> getters = new HashMap<>(); Map<String, Method> setters = new HashMap<>(); for (Method method : solutionClass.getMethods()) { if (isGetter(method)) { String name = method.getName().substring(3); getters.put(name, method); } else if (isSetter(method)) { String name = method.getName().substring(3); setters.put(name, method); } else { // not a getter/setter, ignore it } } getters.keySet().removeAll(setters.keySet()); Collection<Objective<Solution, ?>> objectives = new LinkedList<>(); for (Entry<String, Method> entry : getters.entrySet()) { String name = entry.getKey(); Method getter = entry.getValue(); objectives.add(createObjectiveOn(solutionClass, getter, name, getter.getReturnType())); } return objectives; }
/** * Calculates the spawn timer (default 5 seconds in between spawns) <br> * * @param delta */ @Override public void update(float delta) { releaseTimer += delta; if (checkIfAllGhostsAreDead()) { for (int key : ghosts.keySet()) { ghosts.get(key).setReleaseable(true); } } for (int key : ghosts.keySet()) { // updating the ghost AI if (!ghosts.get(key).isReleaseable() && releaseTimer >= releaseInterval && ghostReleasable) { releaseTimer = 0.0f; ghosts.get(key).setReleaseable(true); continue; } ghosts.get(key).update(delta); if (pacman.collision(ghosts.get(key))) { if (ghosts.get(key).isInvincible()) { killPacman(); } else { killGhost(key); } } } }
@Override public Map<Symbol, Symbol> visitProject(ProjectNode node, Set<Symbol> lookupSymbols) { // Map from output Symbols to source Symbols Map<Symbol, Symbol> directSymbolTranslationOutputMap = Maps.transformValues( Maps.filterValues(node.getAssignments(), SymbolReference.class::isInstance), Symbol::from); Map<Symbol, Symbol> outputToSourceMap = lookupSymbols .stream() .filter(directSymbolTranslationOutputMap.keySet()::contains) .collect(toImmutableMap(identity(), directSymbolTranslationOutputMap::get)); checkState( !outputToSourceMap.isEmpty(), "No lookup symbols were able to pass through the projection"); // Map from source Symbols to underlying index source Symbols Map<Symbol, Symbol> sourceToIndexMap = node.getSource().accept(this, ImmutableSet.copyOf(outputToSourceMap.values())); // Generate the Map the connects lookup symbols to underlying index source symbols Map<Symbol, Symbol> outputToIndexMap = Maps.transformValues( Maps.filterValues(outputToSourceMap, in(sourceToIndexMap.keySet())), Functions.forMap(sourceToIndexMap)); return ImmutableMap.copyOf(outputToIndexMap); }
@Test public void testGetAllConfigurationSettings_TwoHits() throws Exception { createConfigSetting("setting1", "testValue"); createConfigSetting( "setting2" + APPConfigurationServiceBean.CRYPT_KEY_SUFFIX, APPConfigurationServiceBean.CRYPT_PREFIX + "testValue"); createConfigSetting( "setting3" + APPConfigurationServiceBean.CRYPT_KEY_SUFFIX_PASS, APPConfigurationServiceBean.CRYPT_PREFIX + "testValue"); PlatformConfigurationKey[] keys = PlatformConfigurationKey.values(); for (int i = 0; i < keys.length; i++) { createConfigSetting(keys[i].name(), "testValue"); } Map<String, String> result = runTX( new Callable<Map<String, String>>() { @Override public Map<String, String> call() throws Exception { return cs.getAllProxyConfigurationSettings(); } }); assertNotNull(result); assertEquals(keys.length + 3, result.keySet().size()); assertTrue(result.keySet().contains("setting1")); assertTrue(result.keySet().contains("setting2_PWD")); assertTrue(result.keySet().contains("setting3_PASS")); assertEquals("testValue", result.get("setting1")); assertEquals("testValue", result.get("setting2_PWD")); assertEquals("testValue", result.get("setting3_PASS")); }
/** * Méthode appelée pour le calcul de la cohésion entre 2 fonctions. * * <p>Calcule le pourcentage de variables locales similaires entre les deux fonctions sur le * nombre total de variables locales utilisées dans les deux fonctions. * * @param f1 Une fonction d'un modèle de code source * @param f2 Une autre fonction d'un modèle de code source * @return Un double entre 0.0 et 100.0 * @see #cohesion(Function, Function) */ private double cohesionLocalVars(Function f1, Function f2) { double result = 0.0; double nbCommon = 0; Map<LocalVariable, Integer> localVars1 = f1.getLocalVariables(); Map<LocalVariable, Integer> localVars2 = f2.getLocalVariables(); for (LocalVariable var1 : localVars1.keySet()) { for (LocalVariable var2 : localVars2.keySet()) { if (var1.getName().length() >= VARNAME_MIN_LEN && var2.getName().length() >= VARNAME_MIN_LEN) { if (var1.getType().equals(var2.getType())) { if (similarity.similar(var1.getName(), var2.getName())) { ++nbCommon; } } } } } double nbPairs = localVars1.size() * localVars2.size(); if (nbPairs > 0) { result = 100.0 * nbCommon / nbPairs; } return result; }
/** * Updates a set of possibly existing properties. Returns changes between new and existing * property sets. * * @param key The Key * @param newProperties New object properties * @param forceReplace Skip smart cache check for changes and replace all property values * @return Changed properties. */ public Set<String> update(Key key, Map<String, Object> newProperties, boolean forceReplace) { Map<String, String> existing = cache.get().get(key); if (existing != null) { if (forceReplace) { return newProperties.keySet(); } Set<String> changed = new HashSet<>(newProperties.size()); for (Map.Entry<String, Object> newEntry : newProperties.entrySet()) { Object existingProp = existing.get(newEntry.getKey()); // property does not exist yet or has different value then new property Object newEntryValue = newEntry.getValue(); if (newEntryValue != null) { String newEntryHash = getPropertyHash(newEntryValue); if (existingProp == null || !existingProp.equals(newEntryHash)) { changed.add(newEntry.getKey()); } } else { // set to null, write it if (existingProp != null) { changed.add(newEntry.getKey()); } } } insert(key, newProperties); return changed; } insert(key, newProperties); return newProperties.keySet(); }
public void testFailFastKeySet() { if (!isAddRemoveModifiable()) { return; } if (!isFailFastExpected()) { return; } resetFull(); Iterator it = map.keySet().iterator(); final Object val = it.next(); map.remove(val); try { it.next(); fail(); } catch (ConcurrentModificationException expected) { } resetFull(); it = map.keySet().iterator(); it.next(); map.clear(); try { it.next(); fail(); } catch (ConcurrentModificationException expected) { } }
// Computes the saturation of all current aspects and calculates the overall happiness as well as // if the // Node will stop growing. private void computeOverallSaturation() { if (isSaturated) return; double completeSaturation = 0D; int aspects = aspectSaturation.keySet().size(); for (Aspect a : aspectSaturation.keySet()) { completeSaturation += aspectSaturation.get(a); } // Not just 0.0-1.0! // Values vary from 0.0 to SATURATION_CAP (or a bit higher) double percentSaturation = completeSaturation / ((double) aspects); double satCmp = SATURATION_CAP; satCmp *= 8.5; satCmp /= 10; if (overallHappiness > HAPPINESS_CAP) { overallHappiness /= 10; owningNode.triggerVortexExplosion(); } // If the saturation is in the upper 85% if (percentSaturation > satCmp) { this.isSaturated = true; } }
public Intent fromUri(URI uri) { uri = resolveUri(uri); Intent intent = new Intent("android.intent.action.VIEW", uri); Iterator iterator = conditionalComponentNameMap.keySet().iterator(); do { if (!iterator.hasNext()) { break; } Predicate predicate = (Predicate) iterator.next(); if (predicate.apply(uri)) { intent.setComponent((ComponentName) conditionalComponentNameMap.get(predicate)); } } while (true); iterator = conditionalPackageNameMap.keySet().iterator(); do { if (!iterator.hasNext()) { break; } Predicate predicate1 = (Predicate) iterator.next(); if (predicate1.apply(uri)) { intent.setPackage((String) conditionalPackageNameMap.get(predicate1)); } } while (true); return intent; }
public void writeExternal(Element element) throws WriteExternalException { Element passwords = new Element(PASSWORDS); for (final String cvsRoot : myCvsRootToStoringPasswordMap.keySet()) { Element password = new Element(PASSWORD); password.setAttribute(CVSROOT_ATTR, cvsRoot); password.setAttribute( PASSWORD_ATTR, PServerPasswordScrambler.getInstance() .scramble(myCvsRootToStoringPasswordMap.get(cvsRoot))); passwords.addContent(password); } element.addContent(passwords); passwords = new Element(PPKPASSWORDS); for (final String cvsRoot : myCvsRootToStoringPPKPasswordMap.keySet()) { Element password = new Element(PASSWORD); password.setAttribute(CVSROOT_ATTR, cvsRoot); password.setAttribute( PASSWORD_ATTR, PServerPasswordScrambler.getInstance() .scramble(myCvsRootToStoringPPKPasswordMap.get(cvsRoot))); passwords.addContent(password); } element.addContent(passwords); }
public static void countWords() { try { WordGrabber wordgrabber = new WordGrabber("H:/k/klostermann_aiko/P1/aufgabenblatt_6/pg62.txt"); // Pfad angeben Map<Word, Counter> map = new HashMap<Word, Counter>(); Word word = null; while (wordgrabber.hasNext()) { word = new Word(wordgrabber.next()); if (map.containsKey(word)) { map.get(word).inc(); } else { map.put(word, new Counter()); } // else } // while Map<Word, Counter> tree = new TreeMap<Word, Counter>(); for (Word outputWord : map.keySet()) { tree.put(outputWord, map.get(outputWord)); } // for System.out.println("contained words:"); for (Word outputWord : tree.keySet()) { System.out.println(outputWord.getWord() + " (" + tree.get(outputWord).getCounter() + ")"); } // for } catch (IOException e) { e.printStackTrace(); } // try catch } // countWords
/** * 描述:更近sc统计相关信息条数 <br> * 创建时间:2013-5-11 下午11:51:02 * * @author liyang * @param lockMode * @param cacheName * @param sc * @return */ public Object statBySearchCriterion( final LockModeType lockMode, final ConstantsEnum.EHCACHE cacheName, SearchCriterion sc) { if (sc == null) { sc = new SearchCriterion(); } Map<String, Object> map = sc.getFilterMap(); Query query = entityManager.createQuery("select count(*) " + this.getJPQL(sc)); if (map != null && !map.isEmpty()) { for (String str : map.keySet()) { if (str.split("\\.").length > 1) { query.setParameter(str.split("\\.")[1], map.get(str)); } else { query.setParameter(str, map.get(str)); } } } map = sc.getFilterValue(); if (map != null && !map.isEmpty()) { for (String str : map.keySet()) { if (str.split("\\.").length > 1) { query.setParameter(str.split("\\.")[1], map.get(str)); } else { query.setParameter(str, map.get(str)); } } } if (lockMode != null) { query.setLockMode(lockMode); } if (cacheName != null) { query.setHint("org.hibernate.cacheable", true); query.setHint("org.hibernate.cacheRegion", cacheName.getStr()); } return query.getSingleResult(); }
/** @should return person data for each obs in the passed context */ @Override public EvaluatedObsData evaluate(ObsDataDefinition definition, EvaluationContext context) throws EvaluationException { EvaluatedObsData c = new EvaluatedObsData(definition, context); // create a map of obs ids -> patient ids HqlQueryBuilder q = new HqlQueryBuilder(); q.select("o.obsId", "o.personId"); q.from(Obs.class, "o"); q.whereObsIn("o.obsId", context); Map<Integer, Integer> convertedIds = evaluationService.evaluateToMap(q, Integer.class, Integer.class, context); if (!convertedIds.keySet().isEmpty()) { // create a new (person) evaluation context using the retrieved ids PersonEvaluationContext personEvaluationContext = new PersonEvaluationContext(); personEvaluationContext.setBasePersons( new PersonIdSet(new HashSet<Integer>(convertedIds.values()))); // evaluate the joined definition via this person context PersonToObsDataDefinition def = (PersonToObsDataDefinition) definition; EvaluatedPersonData pd = Context.getService(PersonDataService.class) .evaluate(def.getJoinedDefinition(), personEvaluationContext); // now create the result set by mapping the results in the person data set to obs ids for (Integer obsId : convertedIds.keySet()) { c.addData(obsId, pd.getData().get(convertedIds.get(obsId))); } } return c; }
private void updatePortList() { ArrayList<Short> addedPorts = new ArrayList<Short>(); ArrayList<Short> removedPorts = new ArrayList<Short>(); synchronized (FVConfig.class) { // update our local copy this.localFlowSpace = getLocalFlowSpace(); } Set<Short> ports = FlowSpaceUtil.getPortsBySlice( this.fvClassifier.getSwitchInfo().getDatapathId(), this.sliceName, this.localFlowSpace); if (ports.contains(OFPort.OFPP_ALL.getValue())) { // this switch has access to ALL PORTS; feed them in from the // features request ports.clear(); // remove the OFPP_ALL virtual port this.allowAllPorts = true; for (OFPhysicalPort phyPort : this.fvClassifier.getSwitchInfo().getPorts()) ports.add(phyPort.getPortNumber()); } for (Short port : ports) { if (!allowedPorts.keySet().contains(port)) { FVLog.log(LogLevel.DEBUG, this, "adding access to port ", port); allowedPorts.put(port, Boolean.TRUE); addedPorts.add(port); } } for (Iterator<Short> it = allowedPorts.keySet().iterator(); it.hasNext(); ) { Short port = it.next(); if (!ports.contains(port)) { FVLog.log(LogLevel.DEBUG, this, "removing access to port ", port); it.remove(); removedPorts.add(port); } } updatePortStatus(addedPorts, removedPorts); }