private static void findIndividualPerSequenceCoverage( Calibrator calibrator, Map<String, Integer> sequenceLengths, Map<String, String> readGroupToSampleId, final Map<String, HashMap<String, CalibrationStats>> local, RegionRestriction restriction) { final Covariate rgCovariate = calibrator.getCovariate(calibrator.getCovariateIndex(CovariateEnum.READGROUP)); final Covariate seqCovariate = calibrator.getCovariate(calibrator.getCovariateIndex(CovariateEnum.SEQUENCE)); for (final Map.Entry<String, Integer> entry : sequenceLengths.entrySet()) { final String sequenceName = entry.getKey(); if (restriction != null && !sequenceName.equals(restriction.getSequenceName())) { continue; } for (final Map.Entry<String, String> e2 : readGroupToSampleId.entrySet()) { final String readGroup = e2.getKey(); final String sampleName = e2.getValue(); final int rgValue = rgCovariate.valueOf(readGroup); final int seqValue = seqCovariate.valueOf(sequenceName); if (rgValue == -1 || seqValue == -1) { add(local, sampleName, sequenceName, new CalibrationStats(null)); } else { final Calibrator.QuerySpec spec = calibrator.initQuery(); spec.setValue(CovariateEnum.READGROUP, rgValue); spec.setValue(CovariateEnum.SEQUENCE, seqValue); calibrator.processStats(new LocalStatsProcessor(local, sampleName, sequenceName), spec); } } } }
/** * The assumption is that this method will be invoked only by cache.putAll and cache.removeAll * methods. */ @Override public Map<K, ValueHolder<V>> bulkCompute( final Set<? extends K> keys, final Function< Iterable<? extends Map.Entry<? extends K, ? extends V>>, Iterable<? extends Map.Entry<? extends K, ? extends V>>> remappingFunction) throws StoreAccessException { Map<K, ValueHolder<V>> valueHolderMap = new HashMap<K, ValueHolder<V>>(); if (remappingFunction instanceof Ehcache.PutAllFunction) { Ehcache.PutAllFunction<K, V> putAllFunction = (Ehcache.PutAllFunction) remappingFunction; Map<K, V> entriesToRemap = putAllFunction.getEntriesToRemap(); for (Map.Entry<K, V> entry : entriesToRemap.entrySet()) { PutStatus putStatus = silentPut(entry.getKey(), entry.getValue()); if (putStatus == PutStatus.PUT || putStatus == PutStatus.UPDATE) { putAllFunction.getActualPutCount().incrementAndGet(); valueHolderMap.put(entry.getKey(), new ClusteredValueHolder<V>(entry.getValue())); } } } else if (remappingFunction instanceof Ehcache.RemoveAllFunction) { Ehcache.RemoveAllFunction<K, V> removeAllFunction = (Ehcache.RemoveAllFunction) remappingFunction; for (K key : keys) { boolean removed = silentRemove(key); if (removed) { removeAllFunction.getActualRemoveCount().incrementAndGet(); } } } else { throw new UnsupportedOperationException( "This compute method is not yet capable of handling generic computation functions"); } return valueHolderMap; }
public void do_alarms() { // every entry may be re-added immediately after its method execution, so it's safe // to iterate over a copy of the hashmap HashMap<String, Integer> local_alarm = new HashMap<>(alarm); // iterate through the hashmap for (Map.Entry a : local_alarm.entrySet()) { if ((int) a.getValue() <= 0) { // remove the executed alarm alarm.remove(a.getKey().toString()); // execute alarm method Method method; //noinspection TryWithIdenticalCatches try { method = this.getClass().getMethod("alarm_" + a.getKey()); method.invoke(this); } catch (NoSuchMethodException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } } else // decrease the alarm timer alarm.put(a.getKey().toString(), (int) a.getValue() - 1); } }
private void compareValueType( CompareContext context, String id, ModelNode current, ModelNode legacy) { ModelNode currentValueType = current.get(VALUE_TYPE); ModelNode legacyValueType = legacy.get(VALUE_TYPE); if (!currentValueType.isDefined() && !legacyValueType.isDefined()) { return; } if (isType(legacyValueType) || isType(currentValueType)) { if (!currentValueType.equals(legacyValueType)) { context.println( "Different 'value-type' for " + id + ". Current: " + current.get(VALUE_TYPE) + "; legacy: " + legacy.get(VALUE_TYPE)); } } else { Map<String, ModelNode> legacyValueTypes = createMapIndexedByKey(legacyValueType); Map<String, ModelNode> currentValueTypes = createMapIndexedByKey(currentValueType); compareKeySetsAndRemoveMissing( context, "value-type for " + id, currentValueTypes, legacyValueTypes); for (Map.Entry<String, ModelNode> entry : currentValueTypes.entrySet()) { ModelNode currentEntry = entry.getValue(); ModelNode legacyEntry = legacyValueTypes.get(entry.getKey()); compareAttributeOrOperationParameter( context, "value-type key '" + entry.getKey() + "' for " + id, currentEntry, legacyEntry); } } }
public void laplace() { double alpha = 0.5; // HashMap<Vertex, Vector3> centerList = new HashMap(); for (Vertex v : vertexList) { ArrayList<Vertex> neighboursVertexes; Vector3 sum = new Vector3(0, 0, 0); neighboursVertexes = GetNeighboursVertexes(v); for (Vertex neighbour : neighboursVertexes) { sum = sum.add(neighbour.getPosition()); } sum = sum.devideValueByVector(1); centerList.put(v, sum); } for (Map.Entry<Vertex, Vector3> entry : centerList.entrySet()) { Vector3 ap = entry.getKey().getPosition().multiply(alpha); Vector3 ac = entry.getValue().multiply(1 - alpha); Vector3 newPosition = new Vector3(ap).add(ac); entry.setValue(newPosition); } vertexList = new ArrayList<>(); for (Map.Entry<Vertex, Vector3> entry : centerList.entrySet()) { Vertex vertex = new Vertex(entry.getValue()); vertex.setHalfEgde(entry.getKey().getHalfEdge()); vertex.setColor(entry.getKey().getColor()); vertexList.add(vertex); } triangulatedMesh.computeTriangleNormals(); triangulatedMesh.computeVertexNormals(); }
@SuppressWarnings("unchecked") protected Task<Object> performInvocation( HandlerContext ctx, final Invocation invocation, final LocalObjects.LocalObjectEntry entry, final LocalObjects.LocalObjectEntry target) { if (logger.isDebugEnabled()) { logger.debug("Invoking {} ", invocation); } try { if (target == null || target.isDeactivated()) { // if the entry is deactivated, forward the message back to the net. ctx.write(invocation); return Task.fromValue(null); } if (target.getObject() == null) { if (target instanceof ObserverEntry) { return Task.fromException(new ObserverNotFound()); } ctx.write(invocation); return Task.fromValue(null); } final ObjectInvoker invoker = DefaultDescriptorFactory.get().getInvoker(target.getObject().getClass()); final ActorTaskContext context = ActorTaskContext.current(); if (invocation.getHeaders() != null && invocation.getHeaders().size() > 0 && runtime.getStickyHeaders() != null) { for (Map.Entry e : invocation.getHeaders().entrySet()) { if (runtime.getStickyHeaders().contains(e.getKey())) { context.setProperty(String.valueOf(e.getKey()), e.getValue()); } } } // todo: it would be nice to separate this last part into another handler (InvocationHandler) // to be able intercept the invocation right before it actually happens, good for logging and // metrics if (context != null) { context.setRuntime(runtime); } else { runtime.bind(); } final Task result = invoker.safeInvoke(target.getObject(), invocation.getMethodId(), invocation.getParams()); if (invocation.getCompletion() != null) { InternalUtils.linkFutures(result, invocation.getCompletion()); } return result; } catch (Throwable exception) { if (logger.isDebugEnabled()) { logger.debug("Unknown application error. ", exception); } if (invocation.getCompletion() != null) { invocation.getCompletion().completeExceptionally(exception); } return Task.fromException(exception); } }
/** * Finds a suitable analyzer class for a magic signature * * @param signature the magic signature look up * @return the analyzer factory to use */ private static FileAnalyzerFactory find(byte[] signature) throws IOException { // XXX this assumes ISO-8859-1 encoding (and should work in most cases // for US-ASCII, UTF-8 and other ISO-8859-* encodings, but not always), // we should try to be smarter than this... char[] chars = new char[signature.length > 8 ? 8 : signature.length]; for (int i = 0; i < chars.length; i++) { chars[i] = (char) (0xFF & signature[i]); } String sig = new String(chars); FileAnalyzerFactory a = magics.get(sig); if (a == null) { String sigWithoutBOM = stripBOM(signature); for (Map.Entry<String, FileAnalyzerFactory> entry : magics.entrySet()) { if (sig.startsWith(entry.getKey())) { return entry.getValue(); } // See if text files have the magic sequence if we remove the // byte-order marker if (sigWithoutBOM != null && entry.getValue().getGenre() == Genre.PLAIN && sigWithoutBOM.startsWith(entry.getKey())) { return entry.getValue(); } } } return a; }
Entry encode(final T o, final String parentDN) throws LDAPPersistException { // Get the attributes that should be included in the entry. final LinkedHashMap<String, Attribute> attrMap = new LinkedHashMap<String, Attribute>(); attrMap.put("objectClass", objectClassAttribute); for (final Map.Entry<String, FieldInfo> e : fieldMap.entrySet()) { final FieldInfo i = e.getValue(); if (!i.includeInAdd()) { continue; } final Attribute a = i.encode(o, false); if (a != null) { attrMap.put(e.getKey(), a); } } for (final Map.Entry<String, GetterInfo> e : getterMap.entrySet()) { final GetterInfo i = e.getValue(); if (!i.includeInAdd()) { continue; } final Attribute a = i.encode(o); if (a != null) { attrMap.put(e.getKey(), a); } } final String dn = constructDN(o, parentDN, attrMap); final Entry entry = new Entry(dn, attrMap.values()); if (postEncodeMethod != null) { try { postEncodeMethod.invoke(o, entry); } catch (Throwable t) { debugException(t); if (t instanceof InvocationTargetException) { t = ((InvocationTargetException) t).getTargetException(); } throw new LDAPPersistException( ERR_OBJECT_HANDLER_ERROR_INVOKING_POST_ENCODE_METHOD.get( postEncodeMethod.getName(), type.getName(), getExceptionMessage(t)), t); } } setDNAndEntryFields(o, entry); if (superclassHandler != null) { final Entry e = superclassHandler.encode(o, parentDN); for (final Attribute a : e.getAttributes()) { entry.addAttribute(a); } } return entry; }
public static Map<String, UiConfigImpl> filterPropertyNamesInCriteria( Map<String, UiConfigImpl> uiConfigs) { Map<String, UiConfigImpl> propertyNamesInCriterion = new LinkedHashMap<String, UiConfigImpl>(); for (Map.Entry<String, UiConfigImpl> entry : uiConfigs.entrySet()) { if (!entry.getValue().isExcludedFromCriteria() && !entry.getKey().endsWith("AsString") && !CriterionOperator.getSupportedOperators(entry.getValue().getPropertyType()) .isEmpty()) { UiConfigImpl config = entry.getValue(); Set<String> cssClasses = config.getCssClasses(); if (cssClasses.contains("date")) { config.getCssClasses().clear(); config.getCssClasses().add("date"); } else if (cssClasses.contains("datetime")) { config.getCssClasses().clear(); config.getCssClasses().add("datetime"); } else if (cssClasses.contains("time")) { config.getCssClasses().clear(); config.getCssClasses().add("time"); } else { config.getCssClasses().clear(); } propertyNamesInCriterion.put(entry.getKey(), config); } } return propertyNamesInCriterion; }
@Override public void handleRequest(HttpServerExchange exchange) throws Exception { final String incomingSessionId = servletContext.getSessionConfig().findSessionId(exchange); if (incomingSessionId == null || !data.containsKey(incomingSessionId)) { next.handleRequest(exchange); return; } // we have some old data PersistentSession result = data.remove(incomingSessionId); if (result != null) { long time = System.currentTimeMillis(); if (time < result.getExpiration().getTime()) { final HttpSessionImpl session = servletContext.getSession(exchange, true); final HttpSessionEvent event = new HttpSessionEvent(session); for (Map.Entry<String, Object> entry : result.getSessionData().entrySet()) { if (entry.getValue() instanceof HttpSessionActivationListener) { ((HttpSessionActivationListener) entry.getValue()).sessionDidActivate(event); } if (entry.getKey().startsWith(HttpSessionImpl.IO_UNDERTOW)) { session.getSession().setAttribute(entry.getKey(), entry.getValue()); } else { session.setAttribute(entry.getKey(), entry.getValue()); } } } } next.handleRequest(exchange); }
private static void dumpConfig(Configuration conf, StringBuilder sb) { Iterator<Map.Entry<String, String>> configIter = conf.iterator(); List<Map.Entry<String, String>> configVals = new ArrayList<>(); while (configIter.hasNext()) { configVals.add(configIter.next()); } Collections.sort( configVals, new Comparator<Map.Entry<String, String>>() { @Override public int compare(Map.Entry<String, String> ent, Map.Entry<String, String> ent2) { return ent.getKey().compareTo(ent2.getKey()); } }); for (Map.Entry<String, String> entry : configVals) { // use get() to make sure variable substitution works if (entry.getKey().toLowerCase().contains("path")) { StringTokenizer st = new StringTokenizer(conf.get(entry.getKey()), File.pathSeparator); sb.append(entry.getKey()).append("=\n"); while (st.hasMoreTokens()) { sb.append(" ").append(st.nextToken()).append(File.pathSeparator).append('\n'); } } else { sb.append(entry.getKey()).append('=').append(conf.get(entry.getKey())).append('\n'); } } }
@Test public void testIteratorWithDeprecatedKeys() { Configuration conf = new Configuration(); Configuration.addDeprecation("dK", new String[] {"nK"}); conf.set("k", "v"); conf.set("dK", "V"); assertEquals("V", conf.get("dK")); assertEquals("V", conf.get("nK")); conf.set("nK", "VV"); assertEquals("VV", conf.get("dK")); assertEquals("VV", conf.get("nK")); boolean kFound = false; boolean dKFound = false; boolean nKFound = false; for (Map.Entry<String, String> entry : conf) { if (entry.getKey().equals("k")) { assertEquals("v", entry.getValue()); kFound = true; } if (entry.getKey().equals("dK")) { assertEquals("VV", entry.getValue()); dKFound = true; } if (entry.getKey().equals("nK")) { assertEquals("VV", entry.getValue()); nKFound = true; } } assertTrue("regular Key not found", kFound); assertTrue("deprecated Key not found", dKFound); assertTrue("new Key not found", nKFound); }
@Override public final void runBare() throws Throwable { // Patch a bug with maven that does not pass properly the system property // with an empty value if ("org.hsqldb.jdbcDriver".equals(System.getProperty("gatein.test.datasource.driver"))) { System.setProperty("gatein.test.datasource.password", ""); } // log.info("Running unit test:" + getName()); for (Map.Entry<?, ?> entry : System.getProperties().entrySet()) { if (entry.getKey() instanceof String) { String key = (String) entry.getKey(); log.debug(key + "=" + entry.getValue()); } } // beforeRunBare(); // try { super.runBare(); log.info("Unit test " + getName() + " completed"); } catch (Throwable throwable) { log.error("Unit test " + getName() + " did not complete", throwable); // throw throwable; } finally { afterRunBare(); } }
@Override public synchronized List<T> getTopK() { Comparator<T> comparator = new Comparator<T>() { public int compare(T key1, T key2) { return Longs.compare(counts.get(key1), counts.get(key2)); } }; PriorityQueue<T> topK = new PriorityQueue<T>(k, comparator); for (Map.Entry<T, Long> entry : counts.entrySet()) { if (topK.size() < k) { topK.offer(entry.getKey()); } else if (entry.getValue() > counts.get(topK.peek())) { topK.offer(entry.getKey()); topK.poll(); } } LinkedList<T> sortedTopK = new LinkedList<T>(); while (!topK.isEmpty()) { sortedTopK.addFirst(topK.poll()); } return sortedTopK; }
@Override protected boolean runTest(DatabaseRegistryEntry dbre) { boolean result = true; for (Map.Entry<String, String[]> method_tags : getMandatoryTags().entrySet()) { Vector<String> quoted_tags = new Vector<String>(); for (String t : method_tags.getValue()) { quoted_tags.add(String.format("'%s'", t)); } List<String> mlsss = getTemplate(dbre) .queryForDefaultObjectList( String.format( QUERY, StringUtils.join(quoted_tags, ","), method_tags.getKey(), method_tags.getValue().length), String.class); if (mlsss.size() > 0) { ReportManager.problem( this, dbre.getConnection(), "MLSSs for " + method_tags.getKey() + " found with no statistics: " + StringUtils.join(mlsss, ",")); result = false; } else { ReportManager.correct(this, dbre.getConnection(), "PASSED "); } } return result; }
/** * Moves the operators from this process to another process, keeping all connections intact. TODO: * Test more rigorously. Do we register/unregister everything correctly? * * @return the number of ports the connections of which could not be restored */ public int stealOperatorsFrom(ExecutionUnit otherUnit) { int failedReconnects = 0; // remember source and sink connections so we can reconnect them later. Map<String, InputPort> sourceMap = new HashMap<String, InputPort>(); Map<String, OutputPort> sinkMap = new HashMap<String, OutputPort>(); for (OutputPort source : otherUnit.getInnerSources().getAllPorts()) { if (source.isConnected()) { sourceMap.put(source.getName(), source.getDestination()); } } otherUnit.getInnerSources().disconnectAll(); for (InputPort sink : otherUnit.getInnerSinks().getAllPorts()) { if (sink.isConnected()) { sinkMap.put(sink.getName(), sink.getSource()); } } otherUnit.getInnerSinks().disconnectAll(); // Move operators Iterator<Operator> i = otherUnit.operators.iterator(); while (i.hasNext()) { Operator operator = i.next(); i.remove(); otherUnit.unregister(operator); Process otherProcess = operator.getProcess(); if (otherProcess != null) { operator.unregisterOperator(otherProcess); } this.operators.add(operator); operator.setEnclosingProcess(null); // operator.unregisterOperator(operator.getProcess()); registerOperator(operator, true); // operator.registerOperator(this.getEnclosingOperator().getProcess()); } // Rewire sources and sinks for (Map.Entry<String, InputPort> entry : sourceMap.entrySet()) { OutputPort mySource = getInnerSources().getPortByName(entry.getKey()); if (mySource != null) { mySource.connectTo(entry.getValue()); } else { failedReconnects++; } } getInnerSources().unlockPortExtenders(); for (Map.Entry<String, OutputPort> entry : sinkMap.entrySet()) { InputPort mySink = getInnerSinks().getPortByName(entry.getKey()); if (mySink != null) { entry.getValue().connectTo(mySink); } else { failedReconnects++; } } getInnerSinks().unlockPortExtenders(); fireUpdate(this); return failedReconnects; }
public String[] getAvailableLanguageIds() { Set<String> availableLanguageIds = new TreeSet<String>(); Map<Locale, String> nameMap = getNameMap(); for (Map.Entry<Locale, String> entry : nameMap.entrySet()) { Locale locale = entry.getKey(); String value = entry.getValue(); if (Validator.isNotNull(value)) { availableLanguageIds.add(LocaleUtil.toLanguageId(locale)); } } Map<Locale, String> descriptionMap = getDescriptionMap(); for (Map.Entry<Locale, String> entry : descriptionMap.entrySet()) { Locale locale = entry.getKey(); String value = entry.getValue(); if (Validator.isNotNull(value)) { availableLanguageIds.add(LocaleUtil.toLanguageId(locale)); } } return availableLanguageIds.toArray(new String[availableLanguageIds.size()]); }
public void processExpressions(Map<String, String> exps, CmsRfcRelation rfc) { for (Map.Entry<String, String> attr : exps.entrySet()) { if (attr.getKey().startsWith("value")) { processValue(attr.getKey(), attr.getValue(), rfc.getAttributes()); } } }
public static void printMap(Map mp) { Iterator it = mp.entrySet().iterator(); String symbol = ""; double highest = 0; while (it.hasNext()) { Map.Entry pair = (Map.Entry) it.next(); System.out.println(pair.getKey() + " = " + pair.getValue()); double std = std((ArrayList<String>) pair.getValue()); if (highest < std) { highest = std; symbol = (String) pair.getKey(); } /* ArrayList arList = (ArrayList) pair.getValue(); for (Object object : arList) { System.out.println(object.toString()); }*/ it.remove(); // avoids a ConcurrentModificationException } System.out.println("Symbol:" + symbol + ": High:" + highest); }
private void updateVmapAvg( Map<String, Double> vMap, Map<String, AvgValue> vMapAvg, Map<String, Double> fxiyi, Map<String, Double> fxizi) { Set<Map.Entry<String, Double>> entrySet = fxiyi.entrySet(); Map<String, Object> changed = new HashMap<String, Object>(); for (Map.Entry<String, Double> entry : entrySet) { String key = entry.getKey(); changed.put(key, new Object()); } entrySet = fxizi.entrySet(); for (Map.Entry<String, Double> entry : entrySet) { String key = entry.getKey(); changed.put(key, new Object()); } Set<Map.Entry<String, Object>> changedSet = changed.entrySet(); for (Map.Entry<String, Object> changedSetEntry : changedSet) { String key = changedSetEntry.getKey(); if (vMapAvg.containsKey(key)) { AvgValue avg = vMapAvg.get(key); Double sum = avg.getD() + vMap.get(key); int count = avg.getCount() + 1; vMapAvg.put(key, new AvgValue(sum, count)); } else { vMapAvg.put(key, new AvgValue(vMap.get(key), 1)); } } }
@Override public List<Category> getCategoryByParameters(Map<String, Object> parameters) { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<Category> cq = cb.createQuery(Category.class); Root<Category> r = cq.from(Category.class); Join<RssUser, Category> ru = r.join("rssUserList", JoinType.LEFT); Join<RssUser, Rss> rss = ru.join("rss", JoinType.LEFT); Predicate p = cb.conjunction(); for (Map.Entry<String, Object> param : parameters.entrySet()) { if (param.getKey().equals("rssId")) { p = cb.and(p, cb.equal(rss.get("rssId"), param.getValue())); } else if (param.getKey().equals("guid")) { p = cb.and(p, cb.equal(r.get(param.getKey()), param.getValue())); } } p = cb.or(p, cb.equal(r.get("categoryId"), DEFAULT_CATEGORY_ID)); cq.distinct(true); cq.multiselect().where(p); TypedQuery typedQuery = em.createQuery(cq); List<Category> resultList = typedQuery.getResultList(); return resultList; }
private Map<String, Double> updateV( Map<String, Double> vMap, Map<String, Double> fxiyi, Map<String, Double> fxizi) { Set<Map.Entry<String, Double>> entrySet = fxiyi.entrySet(); for (Map.Entry<String, Double> entry : entrySet) { String key = entry.getKey(); Double value = entry.getValue(); if (vMap.containsKey(key)) { vMap.put(key, vMap.get(key) + value); } else { vMap.put(key, value); } } entrySet = fxizi.entrySet(); for (Map.Entry<String, Double> entry : entrySet) { String key = entry.getKey(); Double value = entry.getValue(); if (vMap.containsKey(key)) { vMap.put(key, vMap.get(key) - value); } else { vMap.put(key, -1 * value); } } return vMap; }
protected Collection<KnowledgePackage> compileResources( Map<Resource, ResourceType> resources, boolean enablePropertySpecificFacts) { KnowledgeBuilderConfiguration conf = KnowledgeBuilderFactory.newKnowledgeBuilderConfiguration(); if (enablePropertySpecificFacts) { conf.setOption(PropertySpecificOption.ALLOWED); } else { conf.setOption(PropertySpecificOption.DISABLED); } KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(conf); for (Map.Entry<Resource, ResourceType> entry : resources.entrySet()) { kbuilder.add(entry.getKey(), entry.getValue()); if (kbuilder.hasErrors()) { Logger.getLogger(Case1.class.getName()) .log(Level.SEVERE, "Compilation Errors in {0}", entry.getKey()); Iterator<KnowledgeBuilderError> iterator = kbuilder.getErrors().iterator(); while (iterator.hasNext()) { KnowledgeBuilderError knowledgeBuilderError = iterator.next(); Logger.getLogger(Case1.class.getName()) .log(Level.SEVERE, knowledgeBuilderError.getMessage()); System.out.println(knowledgeBuilderError.getMessage()); } throw new IllegalStateException("Compilation Errors"); } } return kbuilder.getKnowledgePackages(); }
private void updateProjectCache() { // Ensure we have a cache at all: initProjectCache(); // Loop through project path preference keys: for (Map.Entry<String, ?> entry : preferences.getAll().entrySet()) if (entry.getKey().startsWith(PREF_PROJECT_PATH_PREFIX) && entry.getKey().endsWith(PREF_PROJECT_PATH_POSTFIX)) { int projectID = getProjectID(entry.getKey()); int projectFingerPrint = getProjectFingerPrint(entry.getKey()); if (getCachedProject(projectID, projectFingerPrint) == null) { // Parse the project if it is not already in the cache: Project p = ProjectLoader.ParseProject(entry.getValue().toString()); if (p != null) { if (p.getFingerPrint() != projectFingerPrint) { Log.w( TAG, "XML finger print of project " + p.toString() + " has changed, possibly the " + ProjectLoader.PROJECT_FILE + " file (located in " + entry.getValue().toString() + ") was manually edited!"); // Remove old pref key: removeProjectPathPrefKey(projectID, projectFingerPrint); // Add new pref key: storeProjectPathPrefKey(p); } // Cache the project object: cacheProject(p); } } } }
private void compareSubsystemModels() { System.out.println("====== Comparing subsystem models ======"); ResourceDefinition rootCurrentDefinition = new ResourceDefinition(trimNonSubsystem(currentResourceDefinitions), currentModelVersions); ResourceDefinition rootLegacyDefinition = new ResourceDefinition(trimNonSubsystem(legacyResourceDefinitions), legacyModelVersions); Map<String, ModelNode> currentSubsystems = rootCurrentDefinition.getChildren(SUBSYSTEM); Map<String, ModelNode> legacySubsystems = rootLegacyDefinition.getChildren(SUBSYSTEM); CompareContext context = new CompareContext( PathAddress.EMPTY_ADDRESS, PathAddress.EMPTY_ADDRESS, true, rootCurrentDefinition, rootLegacyDefinition); compareKeySetsAndRemoveMissing(context, "subsystems", currentSubsystems, legacySubsystems); for (Map.Entry<String, ModelNode> legacyEntry : legacySubsystems.entrySet()) { PathAddress subsystemAddress = PathAddress.pathAddress(PathElement.pathElement(SUBSYSTEM, legacyEntry.getKey())); ResourceDefinition currentDefinition = new ResourceDefinition(currentSubsystems.get(legacyEntry.getKey()), currentModelVersions); ResourceDefinition legacyDefinition = new ResourceDefinition(legacyEntry.getValue(), legacyModelVersions); context = new CompareContext( subsystemAddress, subsystemAddress, false, currentDefinition, legacyDefinition); if (!context.continuteWithCheck()) { continue; } compareModel(context); } }
private static void handleMapField( Record record, Field field, String fieldPath, Map<String, Set<Descriptors.FieldDescriptor>> messageTypeToExtensionMap, Map<String, Object> defaultValueMap, Descriptors.FieldDescriptor fieldDescriptor, DynamicMessage.Builder builder) throws DataGeneratorException { Descriptors.Descriptor mapEntryDescriptor = fieldDescriptor.getMessageType(); // MapEntry contains key and value fields Map<String, Field> sdcMapField = field.getValueAsMap(); for (Map.Entry<String, Field> entry : sdcMapField.entrySet()) { builder.addRepeatedField( fieldDescriptor, DynamicMessage.newBuilder(mapEntryDescriptor) .setField(mapEntryDescriptor.findFieldByName(KEY), entry.getKey()) .setField( mapEntryDescriptor.findFieldByName(VALUE), getValue( mapEntryDescriptor.findFieldByName(VALUE), entry.getValue(), record, fieldPath + FORWARD_SLASH + entry.getKey(), messageTypeToExtensionMap, defaultValueMap)) .build()); } }
static void itTest4(Map s, int size, int pos) { IdentityHashMap seen = new IdentityHashMap(size); reallyAssert(s.size() == size); int sum = 0; timer.start("Iter XEntry ", size); Iterator it = s.entrySet().iterator(); Object k = null; Object v = null; for (int i = 0; i < size - pos; ++i) { Map.Entry x = (Map.Entry) (it.next()); k = x.getKey(); v = x.getValue(); seen.put(k, k); if (x != MISSING) ++sum; } reallyAssert(s.containsKey(k)); it.remove(); reallyAssert(!s.containsKey(k)); while (it.hasNext()) { Map.Entry x = (Map.Entry) (it.next()); Object k2 = x.getKey(); seen.put(k2, k2); if (x != MISSING) ++sum; } reallyAssert(s.size() == size - 1); s.put(k, v); reallyAssert(seen.size() == size); timer.finish(); reallyAssert(sum == size); reallyAssert(s.size() == size); }
protected Map handleAttributes(Map attributes) { Map undo = super.handleAttributes(attributes); if (attributes != null) { if (undo == null) undo = new Hashtable(); Iterator it = attributes.entrySet().iterator(); while (it.hasNext()) { Map.Entry entry = (Map.Entry) it.next(); Object cell = entry.getKey(); Map properties = (Map) entry.getValue(); if (cell instanceof JGraphBusinessObject) { JGraphBusinessObject bo = (JGraphBusinessObject) cell; Map deltaOld = new Hashtable(); Iterator it2 = properties.entrySet().iterator(); while (it2.hasNext()) { Map.Entry property = (Map.Entry) it2.next(); Object key = property.getKey(); Object oldValue = bo.putProperty(key, property.getValue()); if (oldValue != null) deltaOld.put(key, oldValue); } undo.put(cell, deltaOld); } } } return undo; }
/** * Get the batch containing the given row. NOTE: the returned batch may be empty or may begin with * a row other than the one specified. * * @param row * @return * @throws TeiidComponentException * <p>TODO: a method to get the raw batch */ public TupleBatch getBatch(int row) throws TeiidComponentException { TupleBatch result = null; if (row > rowCount) { result = new TupleBatch(rowCount + 1, new List[] {}); } else if (this.batchBuffer != null && row > rowCount - this.batchBuffer.size()) { result = new TupleBatch(rowCount - this.batchBuffer.size() + 1, batchBuffer); if (forwardOnly) { this.batchBuffer = null; } } else { if (this.batchBuffer != null && !this.batchBuffer.isEmpty()) { // this is just a sanity check to ensure we're not holding too many // hard references to batches. saveBatch(false); } Map.Entry<Integer, Long> entry = batches.floorEntry(row); Assertion.isNotNull(entry); Long batch = entry.getValue(); List<List<?>> rows = manager.getBatch(batch, !forwardOnly); result = new TupleBatch(entry.getKey(), rows); if (isFinal && result.getEndRow() == rowCount) { result.setTerminationFlag(true); } if (forwardOnly) { batches.remove(entry.getKey()); } } if (isFinal && result.getEndRow() == rowCount) { result.setTerminationFlag(true); } return result; }
/** * Return JPQL string statment reference to the SampleParameter * * @return JPQL string statement * @throws NoParametersException */ private String getSampleParameterJPQL() throws NoParametersException { String ret = ""; for (Map.Entry<String, Parameter> e : datafileParameter.entrySet()) ret += ", IN(" + Queries.DATAFILE_NAME + ".datafileParameterCollection) " + e.getKey(); for (Map.Entry<String, Parameter> e : datasetParameter.entrySet()) ret += ", IN(" + Queries.DATASET_NAME + ".datasetParameterCollection) " + e.getKey(); for (Map.Entry<String, Parameter> e : sampleParameter.entrySet()) ret += ", IN(" + Queries.PARAM_NAME_JPQL + ".sampleParameterCollection) " + e.getKey(); if (ret.isEmpty()) throw new NoParametersException(); String parameter = ""; if (!datafileParameter.isEmpty()) parameter += ", IN(" + Queries.PARAM_NAME_JPQL + ".investigationId.datasetCollection) " + Queries.DATASET_NAME + ", IN(" + Queries.DATASET_NAME + ".datafileCollection) " + Queries.DATAFILE_NAME; if (datafileParameter.isEmpty() && !datasetParameter.isEmpty()) parameter += ", IN(" + Queries.PARAM_NAME_JPQL + ".investigationId.datasetCollection) " + Queries.DATASET_NAME; if (parameter.isEmpty()) return ret.substring(2); return parameter.substring(2) + ret; }