@Test public void testSearch() throws LdapDAOException { Set<LdapUser> users = this.ldapManager.searchUsers("cstamas"); assertEquals(1, users.size()); LdapUser user = users.iterator().next(); assertEquals("cstamas", user.getUsername()); assertTrue(this.isPasswordsEncrypted() || ("cstamas123".equals(user.getPassword()))); users = this.ldapManager.searchUsers("br"); assertEquals(1, users.size()); user = users.iterator().next(); assertEquals("brianf", user.getUsername()); // assertEquals( "Brian Fox", user.getRealName() ); assertTrue(this.isPasswordsEncrypted() || ("brianf123".equals(user.getPassword()))); users = this.ldapManager.searchUsers("j"); assertEquals(1, users.size()); user = users.iterator().next(); assertEquals("jvanzyl", user.getUsername()); // assertEquals( "Jason Van Zyl", user.getRealName() ); assertTrue(this.isPasswordsEncrypted() || ("jvanzyl123".equals(user.getPassword()))); users = this.ldapManager.searchUsers("INVALID"); assertEquals(0, users.size()); }
private void read(final KafkaStream<String, String> stream) { while (stream.iterator().hasNext()) { final int phase = phaser.register(); final MessageAndMetadata<String, String> msg = stream.iterator().next(); final long offset = msg.offset(); final long partition = msg.partition(); unacknowledgedOffsets.add(offset); lastCommittedOffset.compareAndSet(0, offset); currentPartition.compareAndSet(-1, partition); final String jsonString = msg.message(); handler.handle( configuration.getVertxAddress(), jsonString, () -> { unacknowledgedOffsets.remove(offset); phaser.arriveAndDeregister(); }); if (unacknowledgedOffsets.size() >= configuration.getMaxUnacknowledged() || partititionChanged(partition) || tooManyUncommittedOffsets(offset)) { LOG.info( "Got {} unacknowledged messages, waiting for ACKs in order to commit", unacknowledgedOffsets.size()); if (!waitForAcks(phase)) { return; } LOG.info("Continuing message processing"); commitOffsetsIfAllAcknowledged(offset); } } }
@Override public ResourceSchema getSchema(String location, Job job) throws IOException { if (!partitionKeysSet) { Set<String> keys = getPartitionColumns(location, job); if (!(keys == null || keys.size() == 0)) { // re-edit the pigSchema to contain the new partition keys. ResourceFieldSchema[] fields = pigSchema.getFields(); LOG.debug("Schema: " + Arrays.toString(fields)); ResourceFieldSchema[] newFields = Arrays.copyOf(fields, fields.length + keys.size()); int index = fields.length; for (String key : keys) { newFields[index++] = new ResourceFieldSchema(new FieldSchema(key, DataType.CHARARRAY)); } pigSchema.setFields(newFields); LOG.debug("Added partition fields: " + keys + " to loader schema"); LOG.debug("Schema is: " + Arrays.toString(newFields)); } partitionKeysSet = true; } return pigSchema; }
/** Called each time a new audio device has been added or removed. */ private void onAudioManagerChangedState() { Log.d( TAG, "onAudioManagerChangedState: devices=" + audioDevices + ", selected=" + selectedAudioDevice); // Enable the proximity sensor if there are two available audio devices // in the list. Given the current implementation, we know that the choice // will then be between EARPIECE and SPEAKER_PHONE. if (audioDevices.size() == 2) { AppRTCUtils.assertIsTrue( audioDevices.contains(AudioDevice.EARPIECE) && audioDevices.contains(AudioDevice.SPEAKER_PHONE)); // Start the proximity sensor. proximitySensor.start(); } else if (audioDevices.size() == 1) { // Stop the proximity sensor since it is no longer needed. proximitySensor.stop(); } else { Log.e(TAG, "Invalid device list"); } if (onStateChangeListener != null) { // Run callback to notify a listening client. The client can then // use public getters to query the new state. onStateChangeListener.run(); } }
/** * Returns a <code>ThrowableSet</code> representing the set of exceptions included in <code> * include</code> minus the set of exceptions included in <code>exclude</code>. Creates a new * <code>ThrowableSet</code> only if there was not already one whose contents correspond to * <code>include</code> - <code>exclude</code>. * * @param include A set of {@link RefLikeType} objects representing exception types included in * the result; may be <code>null</code> if there are no included types. * @param exclude A set of {@link AnySubType} objects representing exception types excluded from * the result; may be <code>null</code> if there are no excluded types. * @return a <code>ThrowableSet</code> representing the set of exceptions corresponding to * <code>include</code> - <code>exclude</code>. */ private ThrowableSet registerSetIfNew(Set include, Set exclude) { if (INSTRUMENTING) { registrationCalls++; } if (include == null) { include = Collections.EMPTY_SET; } if (exclude == null) { exclude = Collections.EMPTY_SET; } int size = include.size() + exclude.size(); Integer sizeKey = new Integer(size); List sizeList = (List) sizeToSets.get(sizeKey); if (sizeList == null) { sizeList = new LinkedList(); sizeToSets.put(sizeKey, sizeList); } for (Iterator i = sizeList.iterator(); i.hasNext(); ) { ThrowableSet set = (ThrowableSet) i.next(); if (set.exceptionsIncluded.equals(include) && set.exceptionsExcluded.equals(exclude)) { return set; } } if (INSTRUMENTING) { registeredSets++; } ThrowableSet result = new ThrowableSet(include, exclude); sizeList.add(result); return result; }
private Map<Triple<DendrogramNode, RowKey, RowKey>, Number> visit( final DendrogramNode root, final Map<Triple<DendrogramNode, RowKey, RowKey>, Number> m, final Map<RowKey, DistanceVectorDataValue> d, final int allLeaves, final ExecutionContext exec) throws CanceledExecutionException { if (root.isLeaf()) { final RowKey key = RowKeyHelper.getKey(root); return Collections.singletonMap(Triple.apply(root, key, key), (Number) Double.valueOf(0)); } final DendrogramNode w = root.getFirstSubnode(); final Map<Triple<DendrogramNode, RowKey, RowKey>, Number> leftM = visit(w, m, d, allLeaves, exec); final DendrogramNode x = root.getSecondSubnode(); final Map<Triple<DendrogramNode, RowKey, RowKey>, Number> rightM = visit(x, m, d, allLeaves, exec); final Map<Triple<DendrogramNode, RowKey, RowKey>, Number> ret = new HashMap<Triple<DendrogramNode, RowKey, RowKey>, Number>(leftM); ret.putAll(rightM); final Set<RowKey> leftKeys = computeLeaves(w); final Set<RowKey> rightKeys = computeLeaves(x); computeM(root, d, w, x, rightM, ret, leftKeys, rightKeys); exec.checkCanceled(); computeM(root, d, x, w, leftM, ret, rightKeys, leftKeys); exec.setProgress(((double) leftKeys.size() + rightKeys.size()) / allLeaves); return ret; }
private void validateAllocatedContainers(boolean isReplicated, String containerTemplateLink) throws Throwable { QueryTask.Query kindClause = new QueryTask.Query() .setTermPropertyName(ServiceDocument.FIELD_NAME_KIND) .setTermMatchValue(Utils.buildKind(ContainerService.State.class)); QueryTask.Query containerTemplateClause = new QueryTask.Query() .setTermPropertyName( ContainerService.State.FIELD_NAME_CONTAINER_TEMPLATE_SERVICE_LINK) .setTermMatchValue(containerTemplateLink); QueryTask.QuerySpecification querySpecification = new QueryTask.QuerySpecification(); querySpecification.query.addBooleanClause(kindClause); querySpecification.query.addBooleanClause(containerTemplateClause); QueryTask query = QueryTask.create(querySpecification).setDirect(true); NodeGroupBroadcastResponse queryResponse = testEnvironment.sendBroadcastQueryAndWait(query); Set<String> documentLinks = QueryTaskUtils.getBroadcastQueryResults(queryResponse); // Verify that count(replicas) == count(dockerVms) i.e. 1 container per vm int expectedReplicaCount = isReplicated ? dockerVms.size() : 1; assertThat(documentLinks.size(), is(expectedReplicaCount)); // Verify that each container was assigned to a unique docker vm Set<String> uniqueVmLinks = new HashSet<>(); for (String documentLink : documentLinks) { ContainerService.State state = testEnvironment.getServiceState(documentLink, ContainerService.State.class); uniqueVmLinks.add(state.vmServiceLink); } assertThat(uniqueVmLinks.size(), is(expectedReplicaCount)); }
/** @generated */ private static boolean buildElement2ViewMap( View parentView, Map<EObject, View> element2ViewMap, Set<? extends EObject> elements) { if (elements.size() == element2ViewMap.size()) { return true; } if (parentView.isSetElement() && !element2ViewMap.containsKey(parentView.getElement()) && elements.contains(parentView.getElement())) { element2ViewMap.put(parentView.getElement(), parentView); if (elements.size() == element2ViewMap.size()) { return true; } } boolean complete = false; for (Iterator<?> it = parentView.getChildren().iterator(); it.hasNext() && !complete; ) { complete = buildElement2ViewMap((View) it.next(), element2ViewMap, elements); } for (Iterator<?> it = parentView.getSourceEdges().iterator(); it.hasNext() && !complete; ) { complete = buildElement2ViewMap((View) it.next(), element2ViewMap, elements); } for (Iterator<?> it = parentView.getTargetEdges().iterator(); it.hasNext() && !complete; ) { complete = buildElement2ViewMap((View) it.next(), element2ViewMap, elements); } return complete; }
public void testBulkGetAfterLifespanExpire() throws InterruptedException { Map<String, String> dataIn = new HashMap<String, String>(); dataIn.put("aKey", "aValue"); dataIn.put("bKey", "bValue"); final long startTime = System.currentTimeMillis(); final long lifespan = 10000; remoteCache.putAll(dataIn, lifespan, TimeUnit.MILLISECONDS); Set<Object> dataOut = new HashSet<Object>(); while (true) { dataOut = remoteCache.keySet(); if (System.currentTimeMillis() >= startTime + lifespan) break; assert dataOut.size() == dataIn.size() : String.format( "Data size not the same, put in %s elements, keySet has %s elements", dataIn.size(), dataOut.size()); for (Object outKey : dataOut) { assert dataIn.containsKey(outKey); } Thread.sleep(100); } // Make sure that in the next 30 secs data is removed while (System.currentTimeMillis() < startTime + lifespan + 30000) { dataOut = remoteCache.keySet(); if (dataOut.size() == 0) return; } assert dataOut.size() == 0 : String.format("Data not empty, it contains: %s elements", dataOut.size()); }
public void loadGridNet(String mapGrid) throws IOException { Preconditions.checkArgument(!Strings.isNullOrEmpty(mapGrid)); logger.debug("loading {}...", mapGrid); CSVReader reader = new CSVReader(new FileReader(mapGrid), ',', '"', 1); String[] row; while ((row = reader.readNext()) != null) { String gridId = row[1].trim(); String dmRoads = row[2].trim(); String gjRoads = row[3].trim(); Set<String> x = Sets.newHashSet(Splitter.on('|').trimResults().omitEmptyStrings().split(dmRoads)); Set<String> y = Sets.newHashSet(Splitter.on('|').trimResults().omitEmptyStrings().split(gjRoads)); if (x.size() > 0 || y.size() > 0) { MapGrid grid = new MapGrid(); grid.dmRoads = x; grid.gjRoads = y; gridNet.put(gridId, grid); // logger.debug("{},{},{}", gridId, x, y); } } reader.close(); }
public static void reportConflicts(Time[] times) { Set<String> inProgress = new HashSet<String>(); List<Set<String>> conflictSets = new LinkedList<Set<String>>(); Set<String> conflict = null; for (int i = 0; i < times.length; i++) { Time t = times[i]; if (t.start) { // the show has started: add it to in progress inProgress.add(t.name); } else { // the show has ended: remove it from in progress inProgress.remove(t.name); } if (inProgress.size() > 1) { // we have a conflict; let's record it if (conflict == null) { conflict = new HashSet<String>(); } conflict.addAll(inProgress); } else { if (conflict != null && conflict.size() > 1) { conflictSets.add(conflict); conflict = null; // reset } } } for (Set<String> conflictSet : conflictSets) { System.out.println(conflictSet); } }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AllowedValues a2 = (AllowedValues) o; if (canBeOred != a2.canBeOred) { return false; } Set<PsiAnnotationMemberValue> v1 = new THashSet<PsiAnnotationMemberValue>(Arrays.asList(values)); Set<PsiAnnotationMemberValue> v2 = new THashSet<PsiAnnotationMemberValue>(Arrays.asList(a2.values)); if (v1.size() != v2.size()) { return false; } for (PsiAnnotationMemberValue value : v1) { for (PsiAnnotationMemberValue value2 : v2) { if (same(value, value2, value.getManager())) { v2.remove(value2); break; } } } return v2.isEmpty(); }
@Override public String toString() { StringBuilder builder = new StringBuilder(); for (String testName : testCases.keySet()) { builder.append("Test " + testName + ": \n"); builder.append(" " + testLineCoverage.get(testName)); builder.append("\n"); builder.append(" " + testBranchCoverage.get(testName)); builder.append("\n"); builder.append(" " + testMutantCoverage.get(testName)); builder.append("\n"); } builder.append("Uncovered lines: "); builder.append(uncoveredLines.toString()); builder.append("\n"); builder.append("Uncovered branches: "); builder.append(uncoveredBranches.toString()); builder.append("\n"); builder.append("Uncovered mutants: " + uncoveredMutants.size()); builder.append("\n"); builder.append("Covered mutants: " + coveredMutants.size()); builder.append("\n"); builder.append("Timeout mutants: " + exceptionMutants.size()); builder.append("\n"); builder.append("Failures: " + contractViolations); builder.append("\n"); return builder.toString(); }
@Test public void old_entities_are_recycled() { Set<Integer> ids = new HashSet<Integer>(); Entity e1 = world.createEntity(); Entity e2 = world.createEntity(); Entity e3 = world.createEntity(); ids.add(System.identityHashCode(e1)); ids.add(System.identityHashCode(e2)); ids.add(System.identityHashCode(e3)); assertEquals(3, ids.size()); e1.deleteFromWorld(); e2.deleteFromWorld(); e3.deleteFromWorld(); world.process(); Entity e1b = world.createEntity(); Entity e2b = world.createEntity(); Entity e3b = world.createEntity(); ids.add(System.identityHashCode(e1b)); ids.add(System.identityHashCode(e2b)); ids.add(System.identityHashCode(e3b)); assertEquals(3, ids.size()); }
/** @return Collection of IClasses, representing the interfaces this class implements. */ protected Collection<IClass> computeAllInterfacesAsCollection() { Collection<? extends IClass> c = getDirectInterfaces(); Set<IClass> result = HashSetFactory.make(); for (Iterator<? extends IClass> it = c.iterator(); it.hasNext(); ) { IClass klass = it.next(); if (klass.isInterface()) { result.add(klass); } else { Warnings.add(ClassHierarchyWarning.create("expected an interface " + klass)); } } // at this point result holds all interfaces the class directly extends. // now expand to a fixed point. Set<IClass> last = null; do { last = HashSetFactory.make(result); for (IClass i : last) { result.addAll(i.getDirectInterfaces()); } } while (last.size() < result.size()); // now add any interfaces implemented by the super class IClass sup = null; sup = getSuperclass(); if (sup != null) { result.addAll(sup.getAllImplementedInterfaces()); } return result; }
/** * Méthode appelée pour le calcul de la cohésion entre 2 fonctions. * * <p>Calcule le pourcentage d'appels de fonction communs dans les deux fonctions sur le nombre * total d'appels dans les deux fonctions. Les paramètres des appels ne sont pas pris en compte. * * @param f1 Une fonction d'un modèle de code source * @param f2 Une autre fonction d'un modèle de code source * @return Un double entre 0.0 et 100.0 * @see #cohesion(Function, Function) */ private double cohesionCalls(Function f1, Function f2) { double result = 0.0; Set<Call> calls1 = f1.getTotalCalls(); Set<Call> calls2 = f2.getTotalCalls(); Set<Function> fctCalledTotal = new HashSet<Function>(); Set<Function> fctCalledCommon = new HashSet<Function>(); for (Call call : calls1) { fctCalledTotal.add(call.getFunction()); } for (Call call : calls2) { boolean added = false; added = fctCalledTotal.add(call.getFunction()); if (added == false) { fctCalledCommon.add(call.getFunction()); } } if (fctCalledTotal.size() > 0) { result = (100.0 * fctCalledCommon.size()) / fctCalledTotal.size(); } return result; }
/** * Méthode appelée pour le calcul de la cohésion entre 2 fonctions. * * <p>Calcule le pourcentage de paramètres similaires entre les deux fonctions sur l'ensemble des * paramètres des deux fonctions. * * @param f1 Une fonction d'un modèle de code source * @param f2 Une autre fonction d'un modèle de code source * @return Un double entre 0.0 et 100.0 * @see #cohesion(Function, Function) */ private double cohesionArguments(Function f1, Function f2) { double result = 0.0; double nbCommon = 0; Set<LocalVariable> argsFct1 = f1.getArguments(); Set<LocalVariable> argsFct2 = f2.getArguments(); Set<LocalVariable> args = new HashSet<LocalVariable>(); for (LocalVariable arg1 : argsFct1) { for (LocalVariable arg2 : argsFct2) { if (arg1.getType().equals(arg2.getType())) { if (similarity.similar(arg1.getName(), arg2.getName())) { ++nbCommon; } } args.add(arg1); args.add(arg2); } } if (args.size() > 0) { result = 100.0 * nbCommon / args.size(); } return result; }
@NotNull public String getAnalysisSummary() { StringBuilder sb = new StringBuilder(); sb.append("\n" + _.banner("analysis summary")); String duration = _.formatTime(System.currentTimeMillis() - stats.getInt("startTime")); sb.append("\n- total time: " + duration); sb.append("\n- modules loaded: " + loadedFiles.size()); sb.append("\n- semantic problems: " + semanticErrors.size()); sb.append("\n- failed to parse: " + failedToParse.size()); // calculate number of defs, refs, xrefs int nDef = 0, nXRef = 0; for (Binding b : getAllBindings()) { nDef += 1; nXRef += b.refs.size(); } sb.append("\n- number of definitions: " + nDef); sb.append("\n- number of cross references: " + nXRef); sb.append("\n- number of references: " + getReferences().size()); long nResolved = this.resolved.size(); long nUnresolved = this.unresolved.size(); sb.append("\n- resolved names: " + nResolved); sb.append("\n- unresolved names: " + nUnresolved); sb.append("\n- name resolve rate: " + _.percent(nResolved, nResolved + nUnresolved)); sb.append("\n" + _.getGCStats()); return sb.toString(); }
/** * Provides a html form rendering of the Questions that the Question Developer can select to * deploy. * * @return * @throws QuestionDeploymentException * @author Trevor Hinson */ private StringBuffer renderQuestionDefinitionCheckBoxes() throws QuestionDeploymentException { StringBuffer sb = new StringBuffer(); Set<String> quest = identifyDeployableQuestions(); if (null != quest ? quest.size() > 0 : false) { outputRendering.append(START_SELECTION_FORM); for (int i = 0; i < quest.size(); i++) { String s = (String) quest.toArray()[i]; if (StringUtils.isNotEmpty(s)) { outputRendering .append("<div><span class='fields'>") .append("<input type=\"checkbox\" name=\"FILE_") .append(i) .append(" \" value=\"") .append(s) .append("\" />") .append("</span>") .append("<span>") .append(s) .append("</span></div>"); } } outputRendering.append(SUBMIT_BUTTON); outputRendering.append(END_SELECTION_FORM); } else { outputRendering.append(NO_QUESTIONS_TO_DEPLOY); } return sb; }
public Object eval(String source, int lineNo, int columnNo, Object expr) throws BSFException { if (!(expr instanceof String)) throw new BSFException("simple-method expression must be a string"); // right now only supports one method per file, so get all methods and just run the first... Map<String, SimpleMethod> simpleMethods = null; try { simpleMethods = SimpleMethod.getDirectSimpleMethods(source, (String) expr, "<bsf source>"); } catch (MiniLangException e) { throw new BSFException("Error loading/parsing simple-method XML source: " + e.getMessage()); } Set<String> smNames = simpleMethods.keySet(); if (smNames.size() == 0) throw new BSFException("Did not find any simple-methods in the file"); String methodName = smNames.iterator().next(); if (smNames.size() > 1) Debug.logWarning( "Found more than one simple-method in the file, running the [" + methodName + "] method, you should remove all but one method from this file", module); SimpleMethod simpleMethod = simpleMethods.get(methodName); MethodContext methodContext = new MethodContext(context, null, MethodContext.EVENT); try { return simpleMethod.exec(methodContext); } catch (MiniLangException e) { throw new BSFException(e.getMessage()); } }
/** * Ends all browser sessions. * * <p>Active and available but inactive sessions are ended. */ protected void endAllBrowserSessions(RemoteControlConfiguration configuration) { boolean done = false; Set<BrowserSessionInfo> allSessions = new HashSet<BrowserSessionInfo>(); while (!done) { // to avoid concurrent modification exceptions... synchronized (activeSessions) { for (BrowserSessionInfo sessionInfo : activeSessions) { allSessions.add(sessionInfo); } } synchronized (availableSessions) { for (BrowserSessionInfo sessionInfo : availableSessions) { allSessions.add(sessionInfo); } } for (BrowserSessionInfo sessionInfo : allSessions) { endBrowserSession(true, sessionInfo.sessionId, configuration); } done = (0 == activeSessions.size() && 0 == availableSessions.size()); allSessions.clear(); if (doCleanup) { cleanupTimer.cancel(); } } }
private static void printMap(Object name, Map map, int level) { String spaces = getSpaces(level); String spaces2 = getSpaces(level + 1); if (map instanceof KeyMap) { PadoShell.println(spaces + "KeyType: " + ((KeyMap) map).getKeyTypeName()); } Set<Map.Entry> entrySet = map.entrySet(); int count = 0; for (Map.Entry entry : entrySet) { Object key = entry.getKey(); Object value = entry.getValue(); if (key instanceof Map) { printMap(null, (Map) key, level + 1); } else { printObject(key.toString(), value, level + 1); } count++; if (count >= getCollectionEntryPrintCount()) { break; } } if (count < entrySet.size()) { PadoShell.println(spaces2 + "<" + (entrySet.size() - count) + " more ...>"); } }
/** * Adds the given POP application satisfier set as a subset of this set, with the given * exceptions. The exceptions set must be a subset of the given satisfier set. If the given POP * application was already a subset of this set, then the new exceptions set is the intersection * of the given exceptions set with the old one. Otherwise, the exceptions set is the given one * minus any individual elements of this set that satisfy the given POP application. */ public void addSatisfiers(NumberVar popApp, ObjectSet satisfiers, Set newExceptions) { if (popAppSatisfiers.containsKey(popApp)) { // already in set; assume satisfiers the same Set curExceptions = (Set) popAppExceptions.get(popApp); int oldNumExceptions = curExceptions.size(); curExceptions.retainAll(newExceptions); size += (oldNumExceptions - curExceptions.size()); } else { popAppSatisfiers.put(popApp, satisfiers); Set oldIndivs = (Set) popAppIndivs.remove(popApp); for (Iterator iter = oldIndivs.iterator(); iter.hasNext(); ) { individuals.remove(iter.next()); } Set curExceptions = new HashSet(newExceptions); curExceptions.removeAll(oldIndivs); popAppExceptions.put(popApp, curExceptions); size += (satisfiers.size() - oldIndivs.size() // because they were already // here - curExceptions.size()); // because they weren't added } }
/** @return the {@link UserGroupInformation} for the current thread */ public static UserGroupInformation getCurrentUGI() { Subject user = getCurrentUser(); if (user == null) { user = currentUser.get(); if (user == null) { return null; } } Set<UserGroupInformation> ugiPrincipals = user.getPrincipals(UserGroupInformation.class); UserGroupInformation ugi = null; if (ugiPrincipals != null && ugiPrincipals.size() == 1) { ugi = ugiPrincipals.iterator().next(); if (ugi == null) { throw new RuntimeException("Cannot find _current user_ UGI in the Subject!"); } } else { throw new RuntimeException( "Cannot resolve current user from subject, " + "which had " + ugiPrincipals.size() + " UGI principals!"); } return ugi; }
@Override public Set<ValueRequirement> getRequirements( final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { final ValueProperties constraints = desiredValue.getConstraints(); final Set<String> forwardCurveCalculationMethods = constraints.getValues(CURVE_CALCULATION_METHOD); if (forwardCurveCalculationMethods == null || forwardCurveCalculationMethods.size() != 1) { return null; } final Set<String> surfaceNames = constraints.getValues(SURFACE); if (surfaceNames == null || surfaceNames.size() != 1) { return null; } final String surfaceName = surfaceNames.iterator().next(); final ValueRequirement forwardCurveRequirement = getForwardCurveRequirement(target, desiredValue); final ValueRequirement volatilitySurfaceRequirement = getVolatilityDataRequirement( target, surfaceName, getInstrumentType(), getSurfaceQuoteType(), getSurfaceQuoteUnits()); final ValueRequirement interpolatorRequirement = getInterpolatorRequirement(target, desiredValue); return Sets.newHashSet( interpolatorRequirement, forwardCurveRequirement, volatilitySurfaceRequirement); }
/** * Méthode appelée pour le calcul de la cohésion entre 2 fonctions. * * <p>Calcule le pourcentage de variables globales au programme accédées en commun sur le nombre * total de variables globales au programme accédées dans les deux fonctions. * * @param f1 Une fonction d'un modèle de code source * @param f2 Une autre fonction d'un modèle de code source * @return Un double entre 0.0 et 100.0 * @see #cohesion(Function, Function) */ private double cohesionGlobalVars(Function f1, Function f2) { double result = 0.0; Map<GlobalVariable, Integer> globalVars1 = f1.getCoreGlobalVariables(); Map<GlobalVariable, Integer> globalVars2 = f2.getCoreGlobalVariables(); Set<GlobalVariable> globalVarsTotal = new HashSet<GlobalVariable>(); Set<GlobalVariable> globalVarsCommon = new HashSet<GlobalVariable>(); for (GlobalVariable var : globalVars1.keySet()) { globalVarsTotal.add(var); } for (GlobalVariable var : globalVars2.keySet()) { boolean newVar = globalVarsTotal.add(var); if (newVar == false) { globalVarsCommon.add(var); } } if (globalVarsTotal.size() > 0) { result = (100.0 * globalVarsCommon.size()) / globalVarsTotal.size(); } return result; }
@Test public void testCreateAndDropTable() throws Exception { catalog.createDatabase("tmpdb1", TajoConstants.DEFAULT_TABLESPACE_NAME); assertTrue(catalog.existDatabase("tmpdb1")); catalog.createDatabase("tmpdb2", TajoConstants.DEFAULT_TABLESPACE_NAME); assertTrue(catalog.existDatabase("tmpdb2")); TableDesc table1 = createMockupTable("tmpdb1", "table1"); catalog.createTable(table1); TableDesc table2 = createMockupTable("tmpdb2", "table2"); catalog.createTable(table2); Set<String> tmpdb1 = Sets.newHashSet(catalog.getAllTableNames("tmpdb1")); assertEquals(1, tmpdb1.size()); assertTrue(tmpdb1.contains("table1")); Set<String> tmpdb2 = Sets.newHashSet(catalog.getAllTableNames("tmpdb2")); assertEquals(1, tmpdb2.size()); assertTrue(tmpdb2.contains("table2")); catalog.dropDatabase("tmpdb1"); assertFalse(catalog.existDatabase("tmpdb1")); tmpdb2 = Sets.newHashSet(catalog.getAllTableNames("tmpdb2")); assertEquals(1, tmpdb2.size()); assertTrue(tmpdb2.contains("table2")); catalog.dropDatabase("tmpdb2"); assertFalse(catalog.existDatabase("tmpdb2")); }
/** * Méthode appelée pour le calcul de la cohésion entre 2 fonctions. * * <p>Calcule le pourcentage de types utilisés en commun dans les deux fonctions sur le nombre * total de types utilisés dans les deux fonctions. * * @param f1 Une fonction d'un modèle de code source * @param f2 Une autre fonction d'un modèle de code source * @return Un double entre 0.0 et 100.0 * @see #cohesion(Function, Function) */ private double cohesionTypes(Function f1, Function f2) { double result = 0.0; Map<ComplexType, Integer> usedTypesFct1 = f1.getTotalComplexTypes(); Map<ComplexType, Integer> usedTypesFct2 = f2.getTotalComplexTypes(); Set<ComplexType> usedTypesTotal = new HashSet<ComplexType>(); Set<ComplexType> usedTypesCommon = new HashSet<ComplexType>(); for (ComplexType t : usedTypesFct1.keySet()) { if (t.equals(PrimitiveType.voidType) == false && t.equals(ComplexType.anonymousType) == false) { usedTypesTotal.add(t); } } for (ComplexType t : usedTypesFct2.keySet()) { if (t.equals(PrimitiveType.voidType) == false && t.equals(ComplexType.anonymousType) == false) { boolean newType = usedTypesTotal.add(t); if (newType == false) { usedTypesCommon.add(t); } } } if (usedTypesTotal.size() > 0) { result = (100.0 * usedTypesCommon.size()) / usedTypesTotal.size(); } return result; }
private Variance calculateArgumentProjectionKindFromSuper( @NotNull TypeProjection argument, @NotNull List<TypeProjectionAndVariance> projectionsFromSuper) { Set<Variance> projectionKindsInSuper = Sets.newLinkedHashSet(); for (TypeProjectionAndVariance projectionAndVariance : projectionsFromSuper) { projectionKindsInSuper.add(projectionAndVariance.typeProjection.getProjectionKind()); } Variance defaultProjectionKind = argument.getProjectionKind(); if (projectionKindsInSuper.size() == 0) { return defaultProjectionKind; } else if (projectionKindsInSuper.size() == 1) { Variance projectionKindInSuper = projectionKindsInSuper.iterator().next(); if (defaultProjectionKind == INVARIANT || defaultProjectionKind == projectionKindInSuper) { return projectionKindInSuper; } else { reportError( "Incompatible projection kinds in type arguments of super methods' return types: " + projectionsFromSuper + ", defined in current: " + argument); return defaultProjectionKind; } } else { reportError( "Incompatible projection kinds in type arguments of super methods' return types: " + projectionsFromSuper); return defaultProjectionKind; } }
/** * Creates file with temporary files list. This list will be used to delete temporary files on the * next application launch. The method is called from shutdown(). * * @param fileCfg file with temporary files list. */ private void persistNewTemp(File fileCfg) { if (hsDeleteOnExit.size() == 0) { logDebug(LogArea.CONFIG, "No temp file names to persist on exit."); fileCfg.delete(); // do not pollute disk return; } logDebug( LogArea.CONFIG, "Persisting %d temp file names into %s", hsDeleteOnExit.size(), fileCfg.getAbsolutePath()); BufferedWriter writer = null; try { writer = new BufferedWriter(new FileWriter(fileCfg)); for (File file : hsDeleteOnExit) { if (!file.delete()) { String f = file.getCanonicalPath(); writer.write(f); writer.newLine(); logWarn(LogArea.JAR, "JVM failed to release %s", f); } } } catch (IOException e) { // Ignore. In the worst case temp files will accumulate. } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { } } } } // persistNewTemp()