public SelectMany05Bean() { HobbitBean[] hobbits = { new HobbitBean("Bilbo", "Ring Finder"), new HobbitBean("Frodo", "Ring Bearer"), new HobbitBean("Merry", "Trouble Maker"), new HobbitBean("Pippin", "Trouble Maker") }; Set<SelectItem> items = new LinkedHashSet<SelectItem>(); for (HobbitBean hobbit : hobbits) { items.add(new SelectItem(hobbit.getName())); } hobbitCollection = new TreeSet<HobbitBean>(); hobbitCollection.addAll(Arrays.asList(hobbits)); possibleValues = Collections.unmodifiableSet(items); initialSortedSetValues = new TreeSet<String>(Collections.reverseOrder()); initialSortedSetValues.add("Pippin"); initialSortedSetValues.add("Frodo"); initialCollectionValues = new LinkedHashSet<String>(2); initialCollectionValues.add("Bilbo"); initialCollectionValues.add("Merry"); initialSetValues = new CopyOnWriteArraySet<String>(); // not Cloneable initialSetValues.add("Frodo"); initialListValues = new Vector<String>(); initialListValues.add("Bilbo"); initialListValues.add("Pippin"); initialListValues.add("Merry"); hobbitDataModel = new ListDataModel<HobbitBean>(new ArrayList<HobbitBean>(Arrays.asList(hobbits))); }
/** * misleading method name, examples are all instances from the a-Part of the atomicRole(a,b) it * has nothing to do with the actual Domain class * * @param atomicRole */ public void makeNegativeExamplesFromDomain(OWLObjectProperty atomicRole) { fromDomain.clear(); logger.debug("making Negative Examples from Domain of : " + atomicRole); fromDomain.addAll(reasoningService.getPropertyMembers(atomicRole).keySet()); fromDomain.removeAll(fullPositiveSet); logger.debug("|-neg Example size from Domain: " + this.fromDomain.size()); }
public void executeMojo() throws MojoExecutionException { getLog().info("Validating " + getHosts() + " using migrations at " + getMigrationsPath() + ""); try { MigrationManager manager = createMigrationManager(); SortedSet<Migration> pendingMigrations = manager.pendingMigrations(); StringBuilder sb = new StringBuilder(); sb.append("\n Database: ").append(getHosts()); sb.append("\n Up-to-date: ").append(pendingMigrations.isEmpty()); sb.append("\n Pending Migrations: "); if (!pendingMigrations.isEmpty()) { boolean first = true; for (Migration migration : pendingMigrations) { if (!first) { sb.append("\n "); } first = false; sb.append(migration.getFilename()); } } getLog().info(sb.toString()); } catch (Exception e) { throw new MojoExecutionException("Failed to validate " + getHosts(), e); } }
/** {@inheritDoc} */ @Override public void onMatching( String docUri, Type type, SortedSet<Annotation> goldAnnos, SortedSet<Annotation> sysAnnos) { if (goldAnnos.size() == 1 && sysAnnos.size() == 1) { Annotation goldAnno = goldAnnos.iterator().next(); Annotation sysAnno = sysAnnos.iterator().next(); if (goldAnno.getBegin() == sysAnno.getBegin() && goldAnno.getEnd() == sysAnno.getEnd()) { printRow( type.getShortName(), "Exact", goldAnno.getCoveredText(), String.valueOf(goldAnno.getBegin()), sysAnno.getCoveredText(), String.valueOf(sysAnno.getBegin()), docUri); return; } } printRow( type.getShortName(), "Partial", Joiner.on(" /// ").join(transform(goldAnnos, annoToTxt)), Joiner.on(", ").join(transform(goldAnnos, annoToOffset)), Joiner.on(" /// ").join(transform(sysAnnos, annoToTxt)), Joiner.on(", ").join(transform(sysAnnos, annoToOffset)), docUri); }
@VisibleForTesting static StripCapabilitiesResult stripCapabilities( XmlElement configElement, Set<String> allCapabilitiesFromHello) { // collect all namespaces Set<String> foundNamespacesInXML = getNamespaces(configElement); LOG.trace( "All capabilities {}\nFound namespaces in XML {}", allCapabilitiesFromHello, foundNamespacesInXML); // required are referenced both in xml and hello SortedSet<String> requiredCapabilities = new TreeSet<>(); // can be removed SortedSet<String> obsoleteCapabilities = new TreeSet<>(); for (String capability : allCapabilitiesFromHello) { String namespace = capability.replaceAll("\\?.*", ""); if (foundNamespacesInXML.contains(namespace)) { requiredCapabilities.add(capability); } else { obsoleteCapabilities.add(capability); } } LOG.trace( "Required capabilities {}, \nObsolete capabilities {}", requiredCapabilities, obsoleteCapabilities); return new StripCapabilitiesResult(requiredCapabilities, obsoleteCapabilities); }
private void fillMap( Resource s, Model model, Map<Resource, SortedSet<Statement>> resource2Statements) { // get all statements with subject s ExtendedIterator<Statement> it = model.listStatements(s, null, (RDFNode) null); // filter statement if necessary if (!dropFilters.isEmpty()) { Iterator<Filter<Statement>> iter = dropFilters.iterator(); Filter<Statement> keepFilter = iter.next(); it = it.filterKeep(keepFilter); while (iter.hasNext()) { it = it.filterKeep(iter.next()); // keepFilter = keepFilter.and(iter.next()); } // it = it.filterKeep(keepFilter); } SortedSet<Statement> statements = resource2Statements.get(s); if (statements == null) { statements = new TreeSet<Statement>(comparator); resource2Statements.put(s, statements); } while (it.hasNext()) { Statement st = it.next(); statements.add(st); if ((st.getObject().isResource()) && !resource2Statements.containsKey(st.getObject())) { fillMap(st.getObject().asResource(), model, resource2Statements); } } }
public static Map<Package, ClassLoader[]> getPackageMap( List<ClassLoader> classLoaders, Set<String> ignorePackages) { Map<Package, ClassLoader[]> answer = new HashMap<Package, ClassLoader[]>(); ClassLoader[] globalClassLoaders = { Thread.currentThread().getContextClassLoader(), ClassScanner.class.getClassLoader() }; Set<Package> packages = new HashSet<Package>(); add(answer, Package.getPackages(), globalClassLoaders, ignorePackages); ClassLoader[] classLoaderArray = new ClassLoader[classLoaders.size()]; classLoaders.toArray(classLoaderArray); for (ClassLoader classLoader : classLoaders) { Package[] loaderPackages = findPackagesForClassLoader(classLoader); add(answer, loaderPackages, classLoaderArray, ignorePackages); } SortedSet<String> names = new TreeSet<String>(); for (Package aPackage : packages) { names.add(aPackage.getName()); } for (String name : names) { LOG.info("Got package " + name); } return answer; }
/** * Queries the given customer's orders. * * @param alias the customer alias * @param passwordHash the customer password-hash * @return the customer's orders * @throws IllegalStateException if the login data is invalid * @throws SQLException if there is a problem with the underlying JDBC connection */ public SortedSet<Order> queryOrders(final String alias, final byte[] passwordHash) throws SQLException { final SortedSet<Order> orders = new TreeSet<Order>(); final Customer customer = this.queryCustomer(alias, passwordHash); synchronized (this.connection) { try (PreparedStatement statement = this.connection.prepareStatement(SQL_SELECT_PURCHASES)) { statement.setLong(1, customer.getIdentity()); try (ResultSet resultSet = statement.executeQuery()) { while (resultSet.next()) { final Order purchase = new Order(); purchase.setIdentity(resultSet.getLong("identity")); purchase.setCustomerIdentity(resultSet.getLong("customerIdentity")); purchase.setCreationTimestamp(resultSet.getLong("creationTimestamp")); purchase.setTaxRate(resultSet.getDouble("taxRate")); orders.add(purchase); } } } } for (final Order purchase : orders) { this.populateOrderItems(purchase); } return orders; }
/** * @param definitions * @return */ private static FontDefinition[] addDefaulted(FontDefinition[] definitions) { IThemeRegistry registry = WorkbenchPlugin.getDefault().getThemeRegistry(); FontDefinition[] allDefs = registry.getFonts(); SortedSet set = addDefaulted(definitions, allDefs); return (FontDefinition[]) set.toArray(new FontDefinition[set.size()]); }
@Test(timeout = 5 * 60 * 1000) public void testConcurrentAssignmentPerformance() throws Exception { // make a table with a lot of splits String tableName = getUniqueNames(1)[0]; Connector c = getConnector(); c.tableOperations().create(tableName); SortedSet<Text> splits = new TreeSet<Text>(); for (int i = 0; i < 4000; i++) { splits.add(new Text(randomHex(8))); } c.tableOperations().addSplits(tableName, splits); c.tableOperations().offline(tableName, true); // time how long it takes to load long now = System.currentTimeMillis(); c.tableOperations().online(tableName, true); long diff = System.currentTimeMillis() - now; log.debug("Loaded " + splits.size() + " tablets in " + diff + " ms"); c.instanceOperations().setProperty(Property.TSERV_ASSIGNMENT_MAXCONCURRENT.getKey(), "20"); now = System.currentTimeMillis(); c.tableOperations().offline(tableName, true); // wait >10 seconds for thread pool to update UtilWaitThread.sleep(Math.max(0, now + 11 * 1000 - System.currentTimeMillis())); now = System.currentTimeMillis(); c.tableOperations().online(tableName, true); long diff2 = System.currentTimeMillis() - now; log.debug("Loaded " + splits.size() + " tablets in " + diff2 + " ms"); assertTrue(diff2 < diff); }
/** * Checks to see if there are any "unseen" messages or delivery reports. Shows the most recent * notification if there is one. * * @param context the context to use * @param isNew if notify a new message comes, it should be true, otherwise, false. */ public static void blockingUpdateNewMessageIndicator( Context context, boolean isNew, boolean isStatusMessage) { SortedSet<MmsSmsNotificationInfo> accumulator = new TreeSet<MmsSmsNotificationInfo>(INFO_COMPARATOR); MmsSmsDeliveryInfo delivery = null; Set<Long> threads = new HashSet<Long>(4); int count = 0; count += accumulateNotificationInfo(accumulator, getMmsNewMessageNotificationInfo(context, threads)); count += accumulateNotificationInfo(accumulator, getSmsNewMessageNotificationInfo(context, threads)); cancelNotification(context, NOTIFICATION_ID); if (!accumulator.isEmpty()) { if (Log.isLoggable(LogTag.APP, Log.VERBOSE)) { Log.d(TAG, "blockingUpdateNewMessageIndicator: count=" + count + ", isNew=" + isNew); } accumulator.first().deliver(context, isNew, count, threads.size()); } // And deals with delivery reports (which use Toasts). It's safe to call in a worker // thread because the toast will eventually get posted to a handler. delivery = getSmsNewDeliveryInfo(context); if (delivery != null) { delivery.deliver(context, isStatusMessage); } }
public PageSet<ObjectInfo> apply(InputStream stream) { checkState(args != null, "request should be initialized at this point"); Type listType = new TypeToken<SortedSet<ObjectInfoImpl>>() {}.getType(); try { SortedSet<ObjectInfoImpl> list = apply(stream, listType); SortedSet<ObjectInfo> returnVal = Sets.newTreeSet( Iterables.transform( list, new Function<ObjectInfoImpl, ObjectInfo>() { public ObjectInfo apply(ObjectInfoImpl from) { return from.toBuilder() .container(container) .uri( uriBuilder(request.getEndpoint()) .clearQuery() .appendPath(from.getName()) .build()) .build(); } })); boolean truncated = options.getMaxResults() == returnVal.size(); String marker = truncated ? returnVal.last().getName() : null; return new PageSetImpl<ObjectInfo>(returnVal, marker); } catch (IOException e) { throw new RuntimeException("problem reading response from request: " + request, e); } }
/** * Handles the configuration submission. * * <p>Load view-specific properties here. */ @Override protected void submit(StaplerRequest req) throws ServletException, FormException, IOException { jobNames.clear(); for (TopLevelItem item : Hudson.getInstance().getItems()) { if (req.getParameter(item.getName()) != null) jobNames.add(item.getName()); } if (req.getParameter("useincluderegex") != null) { includeRegex = Util.nullify(req.getParameter("includeRegex")); if (includeRegex == null) includePattern = null; else includePattern = Pattern.compile(includeRegex); } else { includeRegex = null; includePattern = null; } if (columns == null) { columns = new DescribableList<ListViewColumn, Descriptor<ListViewColumn>>(Saveable.NOOP); } columns.rebuildHetero(req, req.getSubmittedForm(), ListViewColumn.all(), "columns"); if (jobFilters == null) { jobFilters = new DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>>(Saveable.NOOP); } jobFilters.rebuildHetero(req, req.getSubmittedForm(), ViewJobFilter.all(), "jobFilters"); String filter = Util.fixEmpty(req.getParameter("statusFilter")); statusFilter = filter != null ? "1".equals(filter) : null; }
/** * Returns a read-only view of all {@link Job}s in this view. * * <p>This method returns a separate copy each time to avoid concurrent modification issue. */ public synchronized List<TopLevelItem> getItems() { SortedSet<String> names = new TreeSet<String>(jobNames); if (includePattern != null) { for (TopLevelItem item : Hudson.getInstance().getItems()) { String itemName = item.getName(); if (includePattern.matcher(itemName).matches()) { names.add(itemName); } } } List<TopLevelItem> items = new ArrayList<TopLevelItem>(names.size()); for (String n : names) { TopLevelItem item = Hudson.getInstance().getItem(n); // Add if no status filter or filter matches enabled/disabled status: if (item != null && (statusFilter == null || !(item instanceof AbstractProject) || ((AbstractProject) item).isDisabled() ^ statusFilter)) items.add(item); } // check the filters Iterable<ViewJobFilter> jobFilters = getJobFilters(); List<TopLevelItem> allItems = Hudson.getInstance().getItems(); for (ViewJobFilter jobFilter : jobFilters) { items = jobFilter.filter(items, allItems, this); } return items; }
/** * aggregates all collected neg examples CAVE: it is necessary to call one of the make functions * before calling this OTHERWISE it will choose random examples * * @param neglimit size of negative Example set, 0 means all, which can be quite large * @param stable decides whether neg Examples are randomly picked, default false, faster for * developing, since the cache can be used * @param forceNegLimit forces that exactly neglimit instances are returned by adding more * instances */ public SortedSet<OWLIndividual> getNegativeExamples( int neglimit, boolean stable, boolean forceNegLimit) { SortedSet<OWLIndividual> negatives = new TreeSet<OWLIndividual>(); negatives.addAll(fromParallelClasses); negatives.addAll(fromRelated); negatives.addAll(fromSuperclasses); if (negatives.size() < neglimit) { makeNegativeExamplesFromAllOtherInstances(); negatives.addAll(SetManipulation.stableShrinkInd(fromAllOther, neglimit - negatives.size())); } if (neglimit <= 0) { logger.debug("neg Example size NO shrinking: " + negatives.size()); return negatives; } logger.debug("neg Example size before shrinking: " + negatives.size()); if (stable) { negatives = SetManipulation.stableShrinkInd(negatives, neglimit); } else { negatives = SetManipulation.fuzzyShrinkInd(negatives, neglimit); } logger.debug("neg Example size after shrinking: " + negatives.size()); return negatives; }
/* (non-Javadoc) * @see java.lang.Object#toString() */ public String toString() { final StringBuilder sb = new StringBuilder(); SortedSet<Option> union = new TreeSet<Option>(savedOptionMap.keySet()); union.addAll(dependenciesResolvedOptionMap.keySet()); union.addAll(commandLineOptionMap.keySet()); union.addAll(optionFileOptionMap.keySet()); for (Option option : union) { Object value = null; for (int i = OptionContainer.SAVEDOPTION; i <= OptionContainer.OPTIONFILE; i++) { if (i == OptionContainer.SAVEDOPTION) { value = savedOptionMap.get(option); } else if (i == OptionContainer.DEPENDENCIES_RESOLVED) { value = dependenciesResolvedOptionMap.get(option); } else if (i == OptionContainer.COMMANDLINE) { value = commandLineOptionMap.get(option); } else if (i == OptionContainer.OPTIONFILE) { value = optionFileOptionMap.get(option); } if (value != null) { break; } } sb.append(option.getGroup().getName() + "\t" + option.getName() + "\t" + value + "\n"); } return sb.toString(); }
/** * create a file chooser populated with file filters for the given pathway importers / exporters */ private void createFileFilters(Set<? extends PathwayIO> set) { jfc.setAcceptAllFileFilterUsed(false); SortedSet<PathwayIO> exporters = new TreeSet<PathwayIO>( new Comparator<PathwayIO>() { public int compare(PathwayIO o1, PathwayIO o2) { return o1.getName().compareTo(o2.getName()); } }); exporters.addAll(set); PathwayFileFilter selectedFilter = null; for (PathwayIO exp : exporters) { PathwayFileFilter ff = new PathwayFileFilter(exp); jfc.addChoosableFileFilter(ff); if (exp instanceof GpmlFormat) { selectedFilter = ff; } } if (selectedFilter != null) { jfc.setFileFilter(selectedFilter); fileDialog.setFilenameFilter(selectedFilter); } }
/** * @return Returns the {@link MonthlyWage}s from this class sorted by the person ids. The person * ids are assumed to be numbers. If there are ids that are not numbers, those are sorted to * the end of the collection. */ public SortedSet<MonthlyWage> getSortedWages() { SortedSet<MonthlyWage> sortedWages = new TreeSet<>( (wage1, wage2) -> { Integer id1 = null, id2 = null; try { id1 = Integer.parseInt(wage1.getPerson().getId()); } catch (NumberFormatException e) { } try { id2 = Integer.parseInt(wage2.getPerson().getId()); } catch (NumberFormatException e) { } if (id1 != null && id2 != null) { return id1.compareTo(id2); } else if (id1 != null && id2 == null) { return -1; } else if (id1 == null && id2 != null) { return 1; } else { return wage1.getPerson().getId().compareTo(wage2.getPerson().getId()); } }); sortedWages.addAll(this.getWages()); return sortedWages; }
/** * @returns the neighbour IDs * @param userId - the numeric ID of the target user * @param itemId - the numerid ID of the target item * @param itemProfileMap - a map containing item profiles * @param simMap - a map containing user-user similarities */ public ArrayList<Integer> getNeighbours( final Integer itemId, final Integer userId, final Map<Integer, Profile> userProfileMap, final SimilarityMap simMap) { SortedSet<ScoredThingDsc> ss = new TreeSet< ScoredThingDsc>(); // store all user IDs in order of descending similarity in a sorted // set if (userProfileMap.containsKey(userId)) { for (Iterator<Integer> it = userProfileMap.get(userId).getIds().iterator(); it.hasNext(); ) // iterate over each user in the item profile { Integer id = it.next(); double sim = simMap.getSimilarity(itemId, id); if (sim > 0) ss.add(new ScoredThingDsc(sim, id)); } } ArrayList<Integer> neighbours = new ArrayList<Integer>(); // get the k most similar neighbours int kCounter = 0; for (Iterator<ScoredThingDsc> it = ss.iterator(); it.hasNext(); ) { kCounter++; ScoredThingDsc st = it.next(); neighbours.add((Integer) st.thing); if (kCounter > k) break; } return neighbours; }
@Override public List<Invocation> getMatches() { SortedSet<Invocation> set = new TreeSet<Invocation>(Invocation.COMPARATOR); set.addAll(keep.getMatches()); set.removeAll(remove.getMatches()); return new ArrayList<Invocation>(set); }
public String toString() { StringBuilder buffer = new StringBuilder(); buffer.append("Hosts: ").append(this.getHosts().size()).append("\n"); buffer.append("Sites: ").append(this.sites.size()).append("\n"); buffer.append("Fragments: ").append(this.fragments.size()).append("\n"); SortedSet<String> hosts = new TreeSet<String>(); hosts.addAll(this.getHosts()); for (String host_key : hosts) { buffer.append("\nHost ").append(CatalogKey.getNameFromKey(host_key)).append("\n"); for (SiteEntry site : this.host_site_xref.get(host_key)) { buffer.append(SPACER).append("Site ").append(site.getId()).append("\n"); for (FragmentEntry fragment : site.getFragments()) { buffer .append(SPACER) .append(SPACER) .append("Fragment ") .append(fragment) .append(" Size=") .append(fragment.getEstimatedSize()) .append(" Heat=") .append(fragment.getEstimatedHeat()) .append("\n"); } // FOR } // FOR buffer.append("--------------------"); } // FOR return (buffer.toString()); }
/** Fluffs the given nodeset and accepts it. */ public boolean filter(MutableNodeSet nodeSet) { IntArray toAdd = new IntArray(); Graph graph = nodeSet.getGraph(); /* We won't fluff clusters smaller than five elements, * there's not enough evidence to extend it more. */ if (nodeSet.size() < 5) return true; do { int minCount = (int) Math.floor(2.0 * nodeSet.size() / 3.0); SortedSet<Integer> members = nodeSet.getMembers(); toAdd.clear(); for (int node : nodeSet.getExternalBoundaryNodes()) { int[] neis = graph.getAdjacentNodeIndicesArray(node, Directedness.ALL); int intersectionSize = 0; for (int neighbor : neis) if (members.contains(neighbor)) intersectionSize++; if (intersectionSize >= minCount) toAdd.add(node); } nodeSet.add(toAdd.toArray()); } while (iterative && toAdd.size() > 0); return true; }
public static SortedSet<File> flattenAndSort(Iterable<File> files) throws IOException { final SortedSet<File> sortedFiles = new TreeSet<>(new FileComparator()); for (File file : files) { if (file.isDirectory()) { Files.walkFileTree( file.toPath(), new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path path, BasicFileAttributes basicFileAttributes) throws IOException { sortedFiles.add(path.toFile()); return FileVisitResult.CONTINUE; } }); } else { sortedFiles.add(file); } } return sortedFiles; }
@Override public void handle(Page page, TemplateContext local, TemplateContext global) { Homepage homepage = homepage(page.getSite()); if (homepage == null || !homepage.isHomepageActivated()) { return; // TODO we might want 404 here } global.put("homepage", homepage); Person owner = homepage.getPerson(); global.put("owner", owner); if (homepage.getShowCurrentAttendingExecutionCourses()) { SortedSet<Attends> attendedCoursesByName = new TreeSet<Attends>(Attends.ATTENDS_COMPARATOR_BY_EXECUTION_COURSE_NAME); attendedCoursesByName.addAll(homepage.getPerson().getCurrentAttends()); global.put("attendingCourses", attendedCoursesByName); } List<? extends PartyContact> emails = owner.getEmailAddresses(); global.put("visibleEmails", getSortedFilteredContacts(emails)); List<? extends PartyContact> phones = owner.getPhones(); global.put( "visiblePersonalPhones", getSortedFilteredContacts(phones, PartyContactType.PERSONAL)); global.put("visibleWorkPhones", getSortedFilteredContacts(phones, PartyContactType.WORK)); List<? extends PartyContact> mobilePhones = owner.getMobilePhones(); global.put("visibleMobilePhones", getSortedFilteredContacts(mobilePhones)); List<? extends PartyContact> websites = owner.getWebAddresses(); global.put("visibleWebsites", getSortedFilteredContacts(websites)); }
public static void RetrieveTrainMatrix() { try { BufferedReader flrdr = new BufferedReader( new FileReader("D:\\IR\\Assignment7\\Pytest\\part2\\output\\train_matrix.txt")); String line = ""; int doc_count = 0; while ((line = flrdr.readLine()) != null) { SortedSet<Integer> word_ids = new TreeSet<Integer>(); int val_count = 0; String[] key_value = line.split(" :: "); key_value[1] = key_value[1].substring(1, key_value[1].length() - 2); String[] values = key_value[1].split(","); FeatureNode[] node = new FeatureNode[values.length]; for (String val : values) word_ids.add(Integer.parseInt(val.trim())); for (int val : word_ids) node[val_count++] = new FeatureNode(val, 1); if (spam_docs.contains(key_value[0].trim())) ylable[doc_count] = 1; else ylable[doc_count] = 0; train_matrix[doc_count++] = node; } flrdr.close(); } catch (Exception e) { e.printStackTrace(); System.exit(0); } }
@Transactional public void update(Node node, Integer[] viewOrgIds) { if (viewOrgIds == null) { viewOrgIds = new Integer[0]; } SortedSet<NodeOrg> nodeOrgs = node.getNodeOrgs(); if (nodeOrgs == null) { nodeOrgs = new TreeSet<NodeOrg>(new NodeOrgComparator()); node.setNodeOrgs(nodeOrgs); } // 先更新 for (NodeOrg nodeOrg : nodeOrgs) { if (ArrayUtils.contains(viewOrgIds, nodeOrg.getOrg().getId())) { nodeOrg.setViewPerm(true); } else { nodeOrg.setViewPerm(false); } } // 再新增 for (Integer viewOrgId : viewOrgIds) { boolean contains = false; for (NodeOrg nodeOrg : nodeOrgs) { if (nodeOrg.getOrg().getId().equals(viewOrgId)) { contains = true; break; } } if (!contains) { nodeOrgs.add(save(node, viewOrgId, true)); } } }
public static SortedSet<String> getClassNames(final SortedSet<ClassStats> classes) { final SortedSet<String> names = new TreeSet<>(); for (final ClassStats c : classes) { names.add(c.classname); } return names; }
public void sortReadTriples() { final SortedSet<Triple> sst = new TreeSet<Triple>(new TripleComparator(RDF3XIndexScan.CollationOrder.SPO)); sst.addAll(readTriples); readTriples.clear(); readTriples.addAll(sst); }
/** * Returns the number of option values amongst all internal option containers. * * @return the number of option values amongst all internal option containers */ public int getNumberOfOptionValues() { SortedSet<Option> union = new TreeSet<Option>(savedOptionMap.keySet()); union.addAll(dependenciesResolvedOptionMap.keySet()); union.addAll(commandLineOptionMap.keySet()); union.addAll(optionFileOptionMap.keySet()); return union.size(); }
/** * NOT IMPLEMENTED YET, DO NOT USE see <code> makeNegativeExamplesFromParallelClasses</code> * * @param positiveSet */ @SuppressWarnings("unused") private void makeNegativeExamplesFromClassesOfInstances(SortedSet<OWLIndividual> positiveSet) { logger.debug("making neg Examples from parallel classes"); SortedSet<OWLClassExpression> classes = new TreeSet<OWLClassExpression>(); this.fromParallelClasses.clear(); for (OWLIndividual instance : positiveSet) { try { // realization is not implemented in reasoningservice // classes.addAll(reasoningService.realize() } catch (Exception e) { logger.warn("not implemented in " + this.getClass()); } } logger.debug("getting negExamples from " + classes.size() + " parallel classes"); for (OWLClassExpression oneClass : classes) { logger.debug(oneClass); // rsc = new // JenaResultSetConvenience(queryConcept("\""+oneClass+"\"",limit)); try { this.fromParallelClasses.addAll(reasoningService.getIndividuals(oneClass)); } catch (Exception e) { logger.warn("not implemented in " + this.getClass()); } } fromParallelClasses.removeAll(fullPositiveSet); logger.debug("|-neg Example size from parallelclass: " + fromParallelClasses.size()); throw new RuntimeException( "not implemented in " + this.getClass() + "method makeNegativeExamplesFromParallelClasses"); }