private Set<String> getSortedMethods(final Class<?> clazz, final Method[] methods) { final Set<String> sortedMethodNames = sortedMethods.get(clazz); if (sortedMethodNames == null) { final GroupOrder order = clazz.getAnnotation(GroupOrder.class); if (order == null) { sortedMethods.put(clazz, Collections.<String>emptySet()); return Collections.emptySet(); } else { final Set<String> result = new LinkedHashSet<>(); for (final Class<?> groupClazz : order.value()) { for (final Method method : methods) { if (method.getReturnType() == groupClazz && method.getParameterTypes().length == 0) { result.add(method.getName()); } } } sortedMethods.put(clazz, result); return result; } } return sortedMethodNames; }
/** Test method for {@link net.sf.hajdbc.balancer.load.LoadBalancer#clear()}. */ @Test public void clear() { Balancer<Void, MockDatabase> balancer = this.factory.createBalancer(Collections.<MockDatabase>emptySet()); balancer.clear(); assertEquals(Collections.<MockDatabase>emptySet(), balancer); balancer = this.factory.createBalancer(Collections.singleton(this.databases[0])); balancer.clear(); assertEquals(Collections.<MockDatabase>emptySet(), balancer); balancer = this.factory.createBalancer( new HashSet<MockDatabase>(Arrays.asList(this.databases[0], this.databases[1]))); balancer.clear(); assertEquals(Collections.<MockDatabase>emptySet(), balancer); balancer = this.factory.createBalancer(new HashSet<MockDatabase>(Arrays.asList(this.databases))); balancer.clear(); assertEquals(Collections.<MockDatabase>emptySet(), balancer); }
@Test public void extractFieldNames() { assertEquals(Collections.emptySet(), plugin.extractFieldNames("")); assertEquals(Collections.emptySet(), plugin.extractFieldNames("text")); assertEquals(Collections.emptySet(), plugin.extractFieldNames("+(^:text")); assertEquals(Collections.emptySet(), plugin.extractFieldNames("foo :bar")); assertEquals(Collections.singleton("foo"), plugin.extractFieldNames("foo:bar")); assertEquals(Collections.singleton("both"), plugin.extractFieldNames("both:(one two)")); assertEquals(Collections.singleton("title__"), plugin.extractFieldNames("title__:text")); assertEquals( Collections.singleton("title_zh_TW"), plugin.extractFieldNames("title_zh_TW:text")); assertEquals( Collections.singleton("property.Blog.BlogPostClass.title"), plugin.extractFieldNames("property.Blog.BlogPostClass.title:value")); assertEquals( Collections.singleton("property.Blog.Blog..Post$5EClass.title"), plugin.extractFieldNames("property.Blog.Blog..Post$5EClass.title:value")); assertEquals( new HashSet<String>( Arrays.asList( "abc", "g_h.i", "m$n-o", "_\u0103\u00EE\u00E2\u0219\u021B\u00E8\u00E9\u00EA\u00EB")), plugin.extractFieldNames( "+abc:def AND -g_h.i:jkl AND (m$n-o:pqr OR " + "_\u0103\u00EE\u00E2\u0219\u021B\u00E8\u00E9\u00EA\u00EB:stu^3)")); }
public void testConstrainedMapIllegal() { Map<String, Integer> map = Maps.newLinkedHashMap(); Map<String, Integer> constrained = MapConstraints.constrainedMap(map, TEST_CONSTRAINT); try { constrained.put(TEST_KEY, TEST_VALUE); fail("TestKeyException expected"); } catch (TestKeyException expected) { } try { constrained.put("baz", TEST_VALUE); fail("TestValueException expected"); } catch (TestValueException expected) { } try { constrained.put(TEST_KEY, 3); fail("TestKeyException expected"); } catch (TestKeyException expected) { } try { constrained.putAll(ImmutableMap.of("baz", 3, TEST_KEY, 4)); fail("TestKeyException expected"); } catch (TestKeyException expected) { } assertEquals(Collections.emptySet(), map.entrySet()); assertEquals(Collections.emptySet(), constrained.entrySet()); }
@Override public void elementChanged(ElementChangedEvent event) { initializeCache(true); Set<IJavaProject> javaProjectsWithClasspathChange = javaProjectClasspathChangeAnalyzer.getJavaProjectsWithClasspathChange(event.getDelta()); if (!javaProjectsWithClasspathChange.isEmpty()) { for (IJavaProject project : javaProjectsWithClasspathChange) { updateCache(project); } } for (IJavaElementDelta projectDelta : getProjectDeltas(event.getDelta())) { IJavaProject project = (IJavaProject) projectDelta.getElement(); if ((projectDelta.getKind() & IJavaElementDelta.REMOVED) != 0) { clearCache(project, Collections.<PackageFragmentRootData>emptySet()); } switch (projectDelta.getFlags()) { case IJavaElementDelta.F_OPENED: updateCache(project); break; case IJavaElementDelta.F_CLOSED: clearCache(project, Collections.<PackageFragmentRootData>emptySet()); break; } } }
@Test public void testEmptyInputs() { TestTransaction tx1 = new TestTransaction(env); Set<Bytes> rowSet = Collections.singleton(Bytes.of("foo")); Set<Column> colSet = Collections.singleton(new Column("a", "b")); Set<Bytes> emptyRowSet = Collections.emptySet(); Set<Column> emptyColSet = Collections.emptySet(); Set<RowColumn> emptyRowColSet = Collections.emptySet(); Assert.assertEquals(0, tx1.get(Bytes.of("foo"), emptyColSet).size()); Assert.assertEquals(0, tx1.get(emptyRowSet, emptyColSet).size()); Assert.assertEquals(0, tx1.get(emptyRowSet, colSet).size()); Assert.assertEquals(0, tx1.get(rowSet, emptyColSet).size()); Assert.assertEquals(0, tx1.get(rowSet, emptyColSet).size()); Assert.assertEquals(0, tx1.get(emptyRowColSet).size()); Set<String> erss = Collections.emptySet(); Set<String> rss = Collections.singleton("foo"); Assert.assertEquals(0, tx1.gets("foo", emptyColSet).size()); Assert.assertEquals(0, tx1.gets(erss, emptyColSet).size()); Assert.assertEquals(0, tx1.gets(erss, colSet).size()); Assert.assertEquals(0, tx1.gets(rss, emptyColSet).size()); Assert.assertEquals(0, tx1.gets(rss, emptyColSet).size()); Assert.assertEquals(0, tx1.gets(emptyRowColSet).size()); }
/** * Find the maximum weight matching of a path using dynamic programming. * * @param path a list of edges. The code assumes that the list of edges is a valid simple path, * and that is not a cycle. * @return a maximum weight matching of the path */ public Pair<Double, Set<E>> getMaximumWeightMatching(Graph<V, E> g, LinkedList<E> path) { int pathLength = path.size(); // special cases switch (pathLength) { case 0: // special case, empty path return Pair.of(Double.valueOf(0d), Collections.emptySet()); case 1: // special case, one edge E e = path.getFirst(); double eWeight = g.getEdgeWeight(e); if (comparator.compare(eWeight, 0d) > 0) { return Pair.of(eWeight, Collections.singleton(e)); } else { return Pair.of(Double.valueOf(0d), Collections.emptySet()); } } // make sure work array has enough space if (a.length < pathLength + 1) { a = new double[pathLength + 1]; } // first pass to find solution Iterator<E> it = path.iterator(); E e = it.next(); double eWeight = g.getEdgeWeight(e); a[0] = 0d; a[1] = (comparator.compare(eWeight, 0d) > 0) ? eWeight : 0d; for (int i = 2; i <= pathLength; i++) { e = it.next(); eWeight = g.getEdgeWeight(e); if (comparator.compare(a[i - 1], a[i - 2] + eWeight) > 0) { a[i] = a[i - 1]; } else { a[i] = a[i - 2] + eWeight; } } // reverse second pass to build solution Set<E> matching = new HashSet<>(); it = path.descendingIterator(); int i = pathLength; while (i >= 1) { e = it.next(); if (comparator.compare(a[i], a[i - 1]) > 0) { matching.add(e); // skip next edge if (i > 1) { e = it.next(); } i--; } i--; } // return solution return Pair.of(a[pathLength], matching); }
@NotNull private static Collection<PsiLanguageInjectionHost> collectInjectionHosts( @NotNull PsiFile file, @NotNull TextRange range) { Stack<PsiElement> toProcess = new Stack<PsiElement>(); for (PsiElement e = file.findElementAt(range.getStartOffset()); e != null; e = e.getNextSibling()) { if (e.getTextRange().getStartOffset() >= range.getEndOffset()) { break; } toProcess.push(e); } if (toProcess.isEmpty()) { return Collections.emptySet(); } Set<PsiLanguageInjectionHost> result = null; while (!toProcess.isEmpty()) { PsiElement e = toProcess.pop(); if (e instanceof PsiLanguageInjectionHost) { if (result == null) { result = ContainerUtilRt.newHashSet(); } result.add((PsiLanguageInjectionHost) e); } else { for (PsiElement child = e.getFirstChild(); child != null; child = child.getNextSibling()) { if (e.getTextRange().getStartOffset() >= range.getEndOffset()) { break; } toProcess.push(child); } } } return result == null ? Collections.<PsiLanguageInjectionHost>emptySet() : result; }
@Override public Collection<ARGState> getAbstractSuccessors(AbstractState pElement, Precision pPrecision) throws CPATransferException, InterruptedException { ARGState element = (ARGState) pElement; // covered elements may be in the reached set, but should always be ignored if (element.isCovered()) { return Collections.emptySet(); } element.markExpanded(); AbstractState wrappedState = element.getWrappedState(); Collection<? extends AbstractState> successors = transferRelation.getAbstractSuccessors(wrappedState, pPrecision); if (successors.isEmpty()) { return Collections.emptySet(); } Collection<ARGState> wrappedSuccessors = new ArrayList<>(); for (AbstractState absElement : successors) { ARGState successorElem = new ARGState(absElement, element); wrappedSuccessors.add(successorElem); } return wrappedSuccessors; }
private Set<MetricNameDto> getCustomMetricsNamesNewModel(List<TaskDataDto> tests) { try { Set<Long> taskIds = CommonUtils.getTestsIds(tests); List<Object[]> metricDescriptionEntities = getMetricNames(taskIds); if (metricDescriptionEntities.isEmpty()) { return Collections.emptySet(); } Set<MetricNameDto> metrics = new HashSet<>(metricDescriptionEntities.size()); for (Object[] mde : metricDescriptionEntities) { for (TaskDataDto td : tests) { if (td.getIds().contains((Long) mde[2])) { String metricName = (String) mde[0]; MetricNameDto metricNameDto = new MetricNameDto(td, metricName, (String) mde[1], MetricNameDto.Origin.METRIC); // synonyms are required for new model of standard metrics for correct back // compatibility metricNameDto.setMetricNameSynonyms(StandardMetricsNamesUtil.getSynonyms(metricName)); metrics.add(metricNameDto); break; } } } return metrics; } catch (PersistenceException e) { log.debug( "Could not fetch data from MetricSummaryEntity: {}", DataProcessingUtil.getMessageFromLastCause(e)); return Collections.emptySet(); } }
public static Set<UUID> getUUIDsFromString(String list) { String[] split = list.split(","); HashSet<UUID> result = new HashSet<>(); for (String name : split) { if (name.isEmpty()) { // Invalid return Collections.emptySet(); } if ("*".equals(name)) { result.add(DBFunc.everyone); continue; } if (name.length() > 16) { try { result.add(UUID.fromString(name)); continue; } catch (IllegalArgumentException ignored) { return Collections.emptySet(); } } UUID uuid = UUIDHandler.getUUID(name, null); if (uuid == null) { return Collections.emptySet(); } result.add(uuid); } return result; }
@Override public XContentBuilder newBuilder(@Nullable BytesReference autoDetectSource, boolean useFiltering) throws IOException { XContentType contentType = XContentType.fromMediaTypeOrFormat(format); if (contentType == null) { // try and guess it from the auto detect source if (autoDetectSource != null) { contentType = XContentFactory.xContentType(autoDetectSource); } } if (contentType == null) { // default to JSON contentType = XContentType.JSON; } Set<String> includes = Collections.emptySet(); Set<String> excludes = Collections.emptySet(); if (useFiltering) { Set<String> filters = Strings.splitStringByCommaToSet(filterPath); includes = filters.stream().filter(INCLUDE_FILTER).collect(toSet()); excludes = filters.stream().filter(EXCLUDE_FILTER).map(f -> f.substring(1)).collect(toSet()); } XContentBuilder builder = new XContentBuilder( XContentFactory.xContent(contentType), bytesOutput(), includes, excludes); if (pretty) { builder.prettyPrint().lfAtEnd(); } builder.humanReadable(human); return builder; }
Result<Id, T> join(Result<Id, T> r1, Result<Id, T> r2) throws AnalyzerException { if (r1 instanceof Final && ((Final) r1).value == top) { return r1; } if (r2 instanceof Final && ((Final) r2).value == top) { return r2; } if (r1 instanceof Final && r2 instanceof Final) { return new Final<Id, T>(lattice.join(((Final<?, T>) r1).value, ((Final<?, T>) r2).value)); } if (r1 instanceof Final && r2 instanceof Pending) { Final<?, T> f1 = (Final<?, T>) r1; Pending<Id, T> pending = (Pending<Id, T>) r2; Set<Product<Id, T>> sum1 = new HashSet<Product<Id, T>>(pending.sum); sum1.add(new Product<Id, T>(f1.value, Collections.<Id>emptySet())); return new Pending<Id, T>(sum1); } if (r1 instanceof Pending && r2 instanceof Final) { Final<?, T> f2 = (Final<?, T>) r2; Pending<Id, T> pending = (Pending<Id, T>) r1; Set<Product<Id, T>> sum1 = new HashSet<Product<Id, T>>(pending.sum); sum1.add(new Product<Id, T>(f2.value, Collections.<Id>emptySet())); return new Pending<Id, T>(sum1); } Pending<Id, T> pending1 = (Pending<Id, T>) r1; Pending<Id, T> pending2 = (Pending<Id, T>) r2; Set<Product<Id, T>> sum = new HashSet<Product<Id, T>>(); sum.addAll(pending1.sum); sum.addAll(pending2.sum); checkLimit(sum); return new Pending<Id, T>(sum); }
/* @Test public void test02() { List<TblColRef> groups = buildGroups(); List<FunctionDesc> aggregations = buildAggregations(); TupleFilter filter = buildFilter2(groups.get(1)); int count = search(groups, aggregations, filter, context); assertTrue(count > 0); } @Test public void test03() { List<TblColRef> groups = buildGroups(); List<FunctionDesc> aggregations = buildAggregations(); TupleFilter filter = buildAndFilter(groups); int count = search(groups, aggregations, filter, context); assertTrue(count > 0); } @Test public void test04() { List<TblColRef> groups = buildGroups(); List<FunctionDesc> aggregations = buildAggregations(); TupleFilter filter = buildOrFilter(groups); int count = search(groups, aggregations, filter, context); assertTrue(count > 0); } @Test public void test05() { List<TblColRef> groups = buildGroups(); List<FunctionDesc> aggregations = buildAggregations(); int count = search(groups, aggregations, null, context); assertTrue(count > 0); } */ private int search( List<TblColRef> groups, List<FunctionDesc> aggregations, TupleFilter filter, StorageContext context) { int count = 0; ITupleIterator iterator = null; try { SQLDigest sqlDigest = new SQLDigest( "default.test_kylin_fact", filter, null, Collections.<TblColRef>emptySet(), groups, Sets.<TblColRef>newHashSet(), Collections.<TblColRef>emptySet(), Collections.<TblColRef>emptySet(), aggregations, Collections.<SQLCall>emptyList(), new ArrayList<TblColRef>(), new ArrayList<SQLDigest.OrderEnum>(), false); iterator = storageEngine.search(context, sqlDigest, mockup.newTupleInfo(groups, aggregations)); while (iterator.hasNext()) { ITuple tuple = iterator.next(); System.out.println("Tuple = " + tuple); count++; } } finally { if (iterator != null) iterator.close(); } return count; }
/** Test method for {@link net.sf.hajdbc.balancer.load.LoadBalancer#remove(java.lang.Object)}. */ @Test public void remove() { Balancer<Void, MockDatabase> balancer = this.factory.createBalancer(Collections.<MockDatabase>emptySet()); boolean result = balancer.remove(this.databases[1]); assertFalse(result); assertCollectionEquals(Collections.<MockDatabase>emptySet(), balancer); balancer = this.factory.createBalancer(Collections.singleton(this.databases[0])); result = balancer.remove(this.databases[1]); assertFalse(result); assertCollectionEquals(Collections.singleton(this.databases[0]), balancer); balancer = this.factory.createBalancer( new HashSet<MockDatabase>(Arrays.asList(this.databases[0], this.databases[1]))); result = balancer.remove(this.databases[1]); assertTrue(result); assertCollectionEquals(Collections.singleton(this.databases[0]), balancer); balancer = this.factory.createBalancer(new HashSet<MockDatabase>(Arrays.asList(this.databases))); result = balancer.remove(this.databases[1]); assertTrue(result); assertCollectionEquals(Arrays.asList(this.databases[0], this.databases[2]), balancer); }
@Test public void returnsSingleFile() throws IOException { final Set<ClassFile> minimizeResult = this.bcelMinimizationStrategy.minimize(Collections.emptySet(), Collections.emptySet()); Assert.assertNotNull(minimizeResult); Assert.assertTrue(minimizeResult.size() == 1); }
public WordToSentenceProcessor(Pattern regionBeginPattern, Pattern regionEndPattern) { this( "", Collections.<String>emptySet(), Collections.<String>emptySet(), regionBeginPattern, regionEndPattern); }
@Override public void storeBatch(FlowRuleBatchOperation operation) { if (operation.getOperations().isEmpty()) { notifyDelegate( FlowRuleBatchEvent.completed( new FlowRuleBatchRequest(operation.id(), Collections.emptySet()), new CompletedBatchOperation(true, Collections.emptySet(), operation.deviceId()))); return; } DeviceId deviceId = operation.deviceId(); NodeId master = mastershipService.getMasterFor(deviceId); if (master == null) { log.warn("No master for {} : flows will be marked for removal", deviceId); updateStoreInternal(operation); notifyDelegate( FlowRuleBatchEvent.completed( new FlowRuleBatchRequest(operation.id(), Collections.emptySet()), new CompletedBatchOperation(true, Collections.emptySet(), operation.deviceId()))); return; } if (Objects.equal(local, master)) { storeBatchInternal(operation); return; } log.trace( "Forwarding storeBatch to {}, which is the primary (master) for device {}", master, deviceId); clusterCommunicator .unicast(operation, APPLY_BATCH_FLOWS, SERIALIZER::encode, master) .whenComplete( (result, error) -> { if (error != null) { log.warn("Failed to storeBatch: {} to {}", operation, master, error); Set<FlowRule> allFailures = operation .getOperations() .stream() .map(op -> op.target()) .collect(Collectors.toSet()); notifyDelegate( FlowRuleBatchEvent.completed( new FlowRuleBatchRequest(operation.id(), Collections.emptySet()), new CompletedBatchOperation(false, allFailures, deviceId))); } }); }
public ScriptedQuery(DatabasesClient aBasesProxy, String aModuleName) { super(aBasesProxy); publicAccess = true; sqlText = JAVASCRIPT_QUERY_CONTENTS; datasourceName = aModuleName; entityName = aModuleName; readRoles = Collections.<String>emptySet(); writeRoles = Collections.<String>emptySet(); procedure = false; }
public void testMultiRpcCommand() throws Exception { String cacheName = EmbeddedCacheManager.DEFAULT_CACHE_NAME; ClusteredGetCommand c2 = new ClusteredGetCommand("key", cacheName, Collections.<Flag>emptySet()); PutKeyValueCommand c5 = new PutKeyValueCommand("k", "v", false, null, 0, 0, Collections.<Flag>emptySet()); MultipleRpcCommand c99 = new MultipleRpcCommand(Arrays.<ReplicableCommand>asList(c2, c5), cacheName); marshallAndAssertEquality(c99); }
public void test_filter_by_branch_and_user() throws Exception { List<String> hashes = generateHistoryForFilters(false); VcsLogBranchFilter branchFilter = new VcsLogBranchFilterImpl(singleton("feature"), Collections.<String>emptySet()); VcsLogUserFilter userFilter = new VcsLogUserFilterImpl( singleton(GitTestUtil.USER_NAME), Collections.<VirtualFile, VcsUser>emptyMap(), Collections.<VcsUser>emptySet()); List<String> actualHashes = getFilteredHashes(branchFilter, userFilter); assertEquals(hashes, actualHashes); }
public Set<SNode> findExactInstances(SNode concept, IScope scope) { if (!myFindUsagesSupported) return Collections.emptySet(); SModel model = myModelDescriptor.getSModel(); if (model == null) return Collections.emptySet(); Set<SNode> result = new HashSet<SNode>(); for (SNode root : model.roots()) { addExactInstances(root, concept, result, scope); } return result; }
public <T extends Element> Set<String> getIndexedKeys(final Class<T> elementClass) { if (Vertex.class.isAssignableFrom(elementClass)) { if (!this.rawGraph.index().getNodeAutoIndexer().isEnabled()) return Collections.emptySet(); return this.rawGraph.index().getNodeAutoIndexer().getAutoIndexedProperties(); } else if (Edge.class.isAssignableFrom(elementClass)) { if (!this.rawGraph.index().getRelationshipAutoIndexer().isEnabled()) return Collections.emptySet(); return this.rawGraph.index().getRelationshipAutoIndexer().getAutoIndexedProperties(); } else { throw ExceptionFactory.classIsNotIndexable(elementClass); } }
private static class LocalConfiguration extends ConfigurationBase { @Setting("item-permissions-only") public boolean useItemPermissionsOnly; @Setting("allowed-items") public Set<Integer> allowedItems = Collections.emptySet(); @Setting("disllowed-items") public Set<Integer> disallowedItems = Collections.emptySet(); @Setting("default-item-stack-size") public int defaultItemStackSize = 1; }
@Test public void testRoleCombinationRejecting() { Action action = new Action( null, null, EnumSet.of(Action.ActionEffect.ADDRESS, Action.ActionEffect.READ_CONFIG)); TargetResource targetResource = TargetResource.forStandalone(PathAddress.EMPTY_ADDRESS, ROOT_RR, null); DefaultPermissionFactory permissionFactory = null; try { permissionFactory = new DefaultPermissionFactory( new TestRoleMapper(), Collections.<ConstraintFactory>emptySet(), REJECTING); permissionFactory.getUserPermissions(caller, environment, action, targetResource); } catch (Exception e) { fail(); } try { permissionFactory = new DefaultPermissionFactory( new TestRoleMapper(StandardRole.MONITOR), Collections.<ConstraintFactory>emptySet(), REJECTING); permissionFactory.getUserPermissions(caller, environment, action, targetResource); } catch (Exception e) { fail(); } permissionFactory = new DefaultPermissionFactory( new TestRoleMapper(StandardRole.MONITOR, StandardRole.DEPLOYER), REJECTING); try { permissionFactory.getUserPermissions(caller, environment, action, targetResource); fail(); } catch (Exception e) { /* expected */ } permissionFactory = new DefaultPermissionFactory( new TestRoleMapper(StandardRole.MONITOR, StandardRole.DEPLOYER, StandardRole.AUDITOR), Collections.<ConstraintFactory>emptySet(), REJECTING); try { permissionFactory.getUserPermissions(caller, environment, action, targetResource); fail(); } catch (Exception e) { /* expected */ } }
@Override public Collection<GroupReference> suggest(String name, ProjectControl project) { AccountGroup.UUID uuid = new AccountGroup.UUID(name); if (isLdapUUID(uuid)) { GroupDescription.Basic g = get(uuid); if (g == null) { return Collections.emptySet(); } return Collections.singleton(GroupReference.forGroup(g)); } else if (name.startsWith(LDAP_NAME)) { return suggestLdap(name.substring(LDAP_NAME.length())); } return Collections.emptySet(); }
public Module getExternalModule(String name) { File externalJar = findJar(name); if (externalJar == null) { throw new UnknownModuleException( String.format( "Cannot locate JAR for module '%s' in distribution directory '%s'.", name, gradleDistributionLocator.getGradleHome())); } return new DefaultModule( name, Collections.singleton(externalJar), Collections.<File>emptySet(), Collections.<Module>emptySet()); }
/** * Implementation of the OCL * * <ul> * <li><tt>Set::intersection(set : Set(T)) : Set(T)</tt> * <li><tt>Set::intersection(bag : Bag(T)) : Set(T)</tt> * <li><tt>Bag::intersection(set : Set(T)) : Set(T)</tt> * <li><tt>Bag::intersection(bag : Bag(T)) : Set(T)</tt> * </ul> * * operations. * * @param self the source set or bag * @param c another set or bag * @return the intersection of the source set or bag with the other set or bag */ public static <E> Collection<E> intersection( Collection<? extends E> self, Collection<? extends E> c) { int size1 = self.size(); int size2 = c.size(); // if either collection is empty, then so is the result if (size1 == 0 || size2 == 0) { if (self instanceof Set<?> || c instanceof Set<?>) { return Collections.emptySet(); } else { return BagImpl.emptyBag(); } } Collection<E> result = null; if (self instanceof Set<?> || c instanceof Set<?>) { // if either argument is a set, so is the result if (size1 == 0 || size2 == 0) { return Collections.emptySet(); } result = createNewSet(); } else { // both arguments are bags, so is the result if (size1 == 0 || size2 == 0) { return BagImpl.emptyBag(); } result = createNewBag(); } // loop over the smaller collection and add only elements // that are in the larger collection if (self.size() > c.size()) { for (E e : c) { if (includes(self, e)) { result.add(e); } } } else { for (E e : self) { if (includes(c, e)) { result.add(e); } } } return result; }
/** * Creates a new {@code SwingTerminalImplementation} * * @param component JComponent that is the Swing terminal surface * @param fontConfiguration Font configuration to use * @param initialTerminalSize Initial size of the terminal * @param deviceConfiguration Device configuration * @param colorConfiguration Color configuration * @param scrollController Controller to be used when inspecting scroll status */ SwingTerminalImplementation( JComponent component, SwingTerminalFontConfiguration fontConfiguration, TerminalSize initialTerminalSize, TerminalEmulatorDeviceConfiguration deviceConfiguration, TerminalEmulatorColorConfiguration colorConfiguration, TerminalScrollController scrollController) { super(initialTerminalSize, deviceConfiguration, colorConfiguration, scrollController); this.component = component; this.fontConfiguration = fontConfiguration; // Prevent us from shrinking beyond one character component.setMinimumSize( new Dimension(fontConfiguration.getFontWidth(), fontConfiguration.getFontHeight())); //noinspection unchecked component.setFocusTraversalKeys( KeyboardFocusManager.FORWARD_TRAVERSAL_KEYS, Collections.<AWTKeyStroke>emptySet()); //noinspection unchecked component.setFocusTraversalKeys( KeyboardFocusManager.BACKWARD_TRAVERSAL_KEYS, Collections.<AWTKeyStroke>emptySet()); // Make sure the component is double-buffered to prevent flickering component.setDoubleBuffered(true); component.addKeyListener(new TerminalInputListener()); component.addMouseListener( new TerminalMouseListener() { @Override public void mouseClicked(MouseEvent e) { super.mouseClicked(e); SwingTerminalImplementation.this.component.requestFocusInWindow(); } }); component.addHierarchyListener( new HierarchyListener() { @Override public void hierarchyChanged(HierarchyEvent e) { if (e.getChangeFlags() == HierarchyEvent.DISPLAYABILITY_CHANGED) { if (e.getChanged().isDisplayable()) { onCreated(); } else { onDestroyed(); } } } }); }
protected Set<Resource> doFindPathMatchingFileResources( Resource rootDirResource, String pattern) { File rootDir; try { rootDir = rootDirResource.getFile().getAbsoluteFile(); } catch (IOException ex) { LOGGER.logMessage( LogLevel.WARN, "Cannot search for matching files underneath " + rootDirResource + " because it does not correspond to a directory in the file system", ex); return Collections.emptySet(); } if (!rootDir.exists()) { // Silently skip non-existing directories. LOGGER.logMessage( LogLevel.DEBUG, "Skipping [" + rootDir.getAbsolutePath() + "] because it does not exist"); return Collections.emptySet(); } if (!rootDir.isDirectory()) { // Complain louder if it exists but is no directory. LOGGER.logMessage( LogLevel.WARN, "Skipping [" + rootDir.getAbsolutePath() + "] because it does not denote a directory"); return Collections.emptySet(); } if (!rootDir.canRead()) { LOGGER.logMessage( LogLevel.WARN, "Cannot search for matching files underneath directory [" + rootDir.getAbsolutePath() + "] because the application is not allowed to read the directory"); return Collections.emptySet(); } String fullPattern = StringUtils.replace(rootDir.getAbsolutePath(), File.separator, "/"); if (!pattern.startsWith("/")) { fullPattern += "/"; } String subPattern = StringUtils.replace(pattern, File.separator, "/"); fullPattern = fullPattern + subPattern; Set<Resource> result = new LinkedHashSet<Resource>(8); try { doRetrieveMatchingFiles(fullPattern, subPattern, rootDir, result); } catch (IOException e) { LOGGER.errorMessage(e.getMessage(), e); } return result; }