private SortedSet<String> generateAllPrefixes() { SortedSet<String> oldPrefs = new TreeSet<String>(); SortedSet<String> newPrefs = new TreeSet<String>(); SortedSet<String> dummySet; oldPrefs.add(""); Alphabet alph = motifSearchSpace.getAlphabet(); for (int i = 0; i < prefixLength; i++) { for (Character c : alph) { for (String s : oldPrefs) { newPrefs.add(s + c); } } dummySet = oldPrefs; oldPrefs = newPrefs; newPrefs = dummySet; newPrefs.clear(); } return oldPrefs; }
/** * Handles the configuration submission. * * <p>Load view-specific properties here. */ @Override protected void submit(StaplerRequest req) throws ServletException, FormException, IOException { jobNames.clear(); for (TopLevelItem item : Hudson.getInstance().getItems()) { if (req.getParameter(item.getName()) != null) jobNames.add(item.getName()); } if (req.getParameter("useincluderegex") != null) { includeRegex = Util.nullify(req.getParameter("includeRegex")); if (includeRegex == null) includePattern = null; else includePattern = Pattern.compile(includeRegex); } else { includeRegex = null; includePattern = null; } if (columns == null) { columns = new DescribableList<ListViewColumn, Descriptor<ListViewColumn>>(Saveable.NOOP); } columns.rebuildHetero(req, req.getSubmittedForm(), ListViewColumn.all(), "columns"); if (jobFilters == null) { jobFilters = new DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>>(Saveable.NOOP); } jobFilters.rebuildHetero(req, req.getSubmittedForm(), ViewJobFilter.all(), "jobFilters"); String filter = Util.fixEmpty(req.getParameter("statusFilter")); statusFilter = filter != null ? "1".equals(filter) : null; }
void clear() { pending_entries.clear(); output_set.clear(); object_tasks.clear(); active_threads.clear(); time_marks.clear(); thread_entries = null; next_time = 0; end_time = 0; current_thread = null; thread_map.clear(); cpu_time = null; thread_counter = 0; task_counter = 0; max_delta = 1; }
@Override public void clearWebsite() { LOG.trace("Clear list of websites"); TreeSet<Website> oldValue = new TreeSet<>(websites); websites.clear(); changes.firePropertyChange(ModelProperties.WEBSITES.getPropertyName(), oldValue, websites); }
/** * just takes all other instances from the ontology, except the ones in the fullPositiveSet (see * Constructor) */ public void makeNegativeExamplesFromAllOtherInstances() { logger.debug("making random examples "); fromAllOther.clear(); fromAllOther.addAll(reasoningService.getIndividuals()); fromAllOther.removeAll(fullPositiveSet); logger.debug("|-negExample size from random: " + fromAllOther.size()); }
/** * misleading method name, examples are all instances from the a-Part of the atomicRole(a,b) it * has nothing to do with the actual Domain class * * @param atomicRole */ public void makeNegativeExamplesFromDomain(OWLObjectProperty atomicRole) { fromDomain.clear(); logger.debug("making Negative Examples from Domain of : " + atomicRole); fromDomain.addAll(reasoningService.getPropertyMembers(atomicRole).keySet()); fromDomain.removeAll(fullPositiveSet); logger.debug("|-neg Example size from Domain: " + this.fromDomain.size()); }
@Override public void clear() { resizeLock.writeLock().lock(); try { synchronized (entries) { synchronized (freeList) { // wait until all readers are done reading file entries for (FileEntry fe : entries.values()) fe.waitUnlocked(); for (FileEntry fe : freeList) fe.waitUnlocked(); // clear in-memory state entries.clear(); freeList.clear(); // reset file if (trace) log.tracef("Truncating file, current size is %d", filePos); channel.truncate(0); channel.write(ByteBuffer.wrap(MAGIC), 0); filePos = MAGIC.length; } } } catch (Exception e) { throw new PersistenceException(e); } finally { resizeLock.writeLock().unlock(); } }
@Override public void update(Observable o, Object arg) { Set<Integer> oldSelection = new HashSet<Integer>(); oldSelection.addAll(selection); SortedSet<Feature> fs = model.selectionModel().getFeatureSelection(); // System.out.println(fs); // int prevIndex = selectedIndex; if (fs.size() > 0) { for (Feature f : fs) { selection.add(listModel.getRow(f)); } } else { selection.clear(); } oldSelection.retainAll(selection); if (oldSelection.size() != selection.size()) { fireValueChanged(false); } // getSelectionModel().setSelectionInterval(row, row); }
@Override public boolean removeAllChildren(String folderId) { // Redis command: ZREMRANGEBYSCORE or ZREM SortedSet<FolderChild> children = getChildrenInner(folderId); children.clear(); return true; }
public void clear() throws MaltChainedException { edgePool.checkInAll(); graphEdges.clear(); root.clear(); super.clear(); numberOfComponents++; }
@Override public void clearChapters() { LOG.trace("Clear list of chapters"); TreeSet<Chapter> oldValue = new TreeSet<>(loadedChapters); loadedChapters.clear(); changes.firePropertyChange( ModelProperties.CHAPTER_LOADED.getPropertyName(), oldValue, loadedChapters); }
@Override public SortedSet<FolderChange> removeAllFolderChanges(String folderId) { // Redis command: ZREM SortedSet<FolderChange> changes = getFolderChanges(folderId); SortedSet<FolderChange> ret = new ConcurrentSkipListSet<FolderChange>(changes); changes.clear(); return ret; }
/** Clear the list of all events. */ public void clear() { synchronized (mLock) { mAllEvents.clear(); mFilteredEvents = new EventDetails[0]; mPendingEvents.clear(); fireTableDataChanged(); } }
@Override public void launchDownload() { LOG.trace("Launch download of chapters [ToDownloadChapters =" + toDownloadChapters + "]"); List<Chapter> toDownload = new LinkedList<>(toDownloadChapters); Services.getDownloadServices().download(toDownload); toDownloadChapters.clear(); changes.firePropertyChange( ModelProperties.CHAPTER_TO_DOWNLOAD.getPropertyName(), toDownload, toDownloadChapters); }
/** * misleading method name, examples are all instances from the b-Part of the atomicRole(a,b) it * has nothing to do with the actual Range class * * @param atomicRole */ public void makeNegativeExamplesFromRange(OWLObjectProperty atomicRole) { fromRange.clear(); logger.debug("making Negative Examples from Range of : " + atomicRole); Collection<SortedSet<OWLIndividual>> tmp = reasoningService.getPropertyMembers(atomicRole).values(); for (SortedSet<OWLIndividual> set : tmp) { fromRange.addAll(set); } fromRange.removeAll(fullPositiveSet); logger.debug("|-neg Example size from Range: " + fromRange.size()); }
public void clear() { if (taskMap != null) { taskMap.clear(); } if (timeTriggers != null) { timeTriggers.clear(); } if (triggerMap != null) { triggerMap.clear(); } }
/** * if pos ex derive from one class, then neg ex are taken from a superclass CURRENTLY SAME METHOD * AS makeNegativeExamplesFromSuperClasses(OWLClass concept) but works quite often * * @param concept * @param depth PARAMETER CURRENTLY NOT USED, ONLY DIRECT SUPERCLASSES */ public void makeNegativeExamplesFromSuperClasses(OWLClass concept, int depth) { fromSuperclasses.clear(); SortedSet<OWLClassExpression> superClasses = reasoningService.getSuperClasses(concept); logger.debug("making neg Examples from " + superClasses.size() + " superclasses"); for (OWLClassExpression oneSuperClass : superClasses) { logger.debug(oneSuperClass); fromSuperclasses.addAll(reasoningService.getIndividuals(oneSuperClass)); } this.fromSuperclasses.removeAll(fullPositiveSet); logger.debug("|-neg Example from superclass: " + fromSuperclasses.size()); }
/** * Returns the Attr[]s to be outputted for the given element. <br> * The code of this method is a copy of {@link #handleAttributes(Element, NameSpaceSymbTable)}, * whereas it takes into account that subtree-c14n is -- well -- subtree-based. So if the element * in question isRoot of c14n, it's parent is not in the node set, as well as all other ancestors. * * @param E * @param ns * @return the Attr[]s to be outputted * @throws CanonicalizationException */ Iterator handleAttributesSubtree(Element E, NameSpaceSymbTable ns) throws CanonicalizationException { if (!E.hasAttributes() && !firstCall) { return null; } // result will contain the attrs which have to be outputted final SortedSet result = this.result; result.clear(); NamedNodeMap attrs = E.getAttributes(); int attrsLength = attrs.getLength(); for (int i = 0; i < attrsLength; i++) { Attr N = (Attr) attrs.item(i); String NUri = N.getNamespaceURI(); if (XMLNS_URI != NUri) { // It's not a namespace attr node. Add to the result and continue. result.add(N); continue; } String NName = N.getLocalName(); String NValue = N.getValue(); if (XML.equals(NName) && XML_LANG_URI.equals(NValue)) { // The default mapping for xml must not be output. continue; } Node n = ns.addMappingAndRender(NName, NValue, N); if (n != null) { // Render the ns definition result.add(n); if (C14nHelper.namespaceIsRelative(N)) { Object exArgs[] = {E.getTagName(), NName, N.getNodeValue()}; throw new CanonicalizationException("c14n.Canonicalizer.RelativeNamespace", exArgs); } } } if (firstCall) { // It is the first node of the subtree // Obtain all the namespaces defined in the parents, and added to the output. ns.getUnrenderedNodes(result); // output the attributes in the xml namespace. xmlattrStack.getXmlnsAttr(result); firstCall = false; } return result.iterator(); }
public void testElementSetSubsetClear() { TreeMultiset<String> ms = TreeMultiset.create(); ms.add("a", 1); ms.add("b", 3); ms.add("c", 2); ms.add("d", 1); ms.add("e", 3); ms.add("f", 2); SortedSet<String> elementSet = ms.elementSet(); ASSERT.that(elementSet).has().exactly("a", "b", "c", "d", "e", "f").inOrder(); SortedSet<String> subset = elementSet.subSet("b", "f"); ASSERT.that(subset).has().exactly("b", "c", "d", "e").inOrder(); subset.clear(); ASSERT.that(elementSet).has().exactly("a", "f").inOrder(); ASSERT.that(subset).isEmpty(); assertEquals(3, ms.size()); }
/** Moves a temporary file to a final log filename and enrolls it. */ private synchronized long createEntry(File temp, String tag, int flags) throws IOException { long t = System.currentTimeMillis(); // Require each entry to have a unique timestamp; if there are entries // >10sec in the future (due to clock skew), drag them back to avoid // keeping them around forever. SortedSet<EntryFile> tail = mAllFiles.contents.tailSet(new EntryFile(t + 10000)); EntryFile[] future = null; if (!tail.isEmpty()) { future = tail.toArray(new EntryFile[tail.size()]); tail.clear(); // Remove from mAllFiles } if (!mAllFiles.contents.isEmpty()) { t = Math.max(t, mAllFiles.contents.last().timestampMillis + 1); } if (future != null) { for (EntryFile late : future) { mAllFiles.blocks -= late.blocks; FileList tagFiles = mFilesByTag.get(late.tag); if (tagFiles != null && tagFiles.contents.remove(late)) { tagFiles.blocks -= late.blocks; } if ((late.flags & DropBoxManager.IS_EMPTY) == 0) { enrollEntry(new EntryFile(late.file, mDropBoxDir, late.tag, t++, late.flags, mBlockSize)); } else { enrollEntry(new EntryFile(mDropBoxDir, late.tag, t++)); } } } if (temp == null) { enrollEntry(new EntryFile(mDropBoxDir, tag, t)); } else { enrollEntry(new EntryFile(temp, mDropBoxDir, tag, t, flags, mBlockSize)); } return t; }
/** * Resort all of the players and teams in the pool * * @pre true * @post All of the players are teams are sorted */ public void resort() { // Resort players for (PoolTeam pt : m_poolTeams) { Iterator<Player> it = pt.playersIterator(); List<Player> players = new ArrayList<Player>(); while (it.hasNext()) players.add(it.next()); pt.clearPlayers(); for (Player p : players) { pt.addPlayer(p); } } // Resort Pool Teams List<PoolTeam> tempTeamList = new ArrayList<PoolTeam>(); for (PoolTeam pt : m_poolTeams) { tempTeamList.add(pt); } m_poolTeams.clear(); for (PoolTeam pt : tempTeamList) { m_poolTeams.add(pt); } }
/** headSet returns set with keys in requested range */ public void testDescendingHeadSetContents() { NavigableSet set = dset5(); SortedSet sm = set.headSet(m4); assertTrue(sm.contains(m1)); assertTrue(sm.contains(m2)); assertTrue(sm.contains(m3)); assertFalse(sm.contains(m4)); assertFalse(sm.contains(m5)); Iterator i = sm.iterator(); Object k; k = (Integer) (i.next()); assertEquals(m1, k); k = (Integer) (i.next()); assertEquals(m2, k); k = (Integer) (i.next()); assertEquals(m3, k); assertFalse(i.hasNext()); sm.clear(); assertTrue(sm.isEmpty()); assertEquals(2, set.size()); assertEquals(m4, set.first()); }
private void initializeArrowsBackward(Graph graph) { sortedArrows.clear(); lookupArrows.clear(); for (Edge edge : graph.getEdges()) { Node x = edge.getNode1(); Node y = edge.getNode2(); if (!knowledgeEmpty()) { if (!getKnowledge().noEdgeRequired(x.getName(), y.getName())) { continue; } } if (Edges.isDirectedEdge(edge)) { calculateArrowsBackward(x, y, graph); } else { calculateArrowsBackward(x, y, graph); calculateArrowsBackward(y, x, graph); } } }
public void refreshMenu() throws ArcturusDatabaseException { removeAll(); Person me = adb.findMe(); Set<Project> mypset = null; if (adb.isCoordinator()) mypset = adb.getAllProjects(); else mypset = adb.getProjectsForOwner(me); SortedSet<Project> myProjects = new TreeSet<Project>(comparator); if (mypset != null && !mypset.isEmpty()) { myProjects.addAll(mypset); for (Project project : myProjects) if (!project.isBin()) { ContigTransferAction action = new ContigTransferAction(source, project); action.setEnabled(project.isActive()); add(action); } Set<Project> bin = null; bin = adb.getBinProjects(); if (bin != null) { myProjects.clear(); myProjects.addAll(bin); addSeparator(); for (Project project : myProjects) add(new ContigTransferAction(source, project)); } if (getMenuComponentCount() > 40) { VerticalGridLayout menuGrid = new VerticalGridLayout(40, 0); getPopupMenu().setLayout(menuGrid); } } }
/** headSet returns set with keys in requested range */ public void testHeadSetContents() { NavigableSet set = set5(); SortedSet sm = set.headSet(four); assertTrue(sm.contains(one)); assertTrue(sm.contains(two)); assertTrue(sm.contains(three)); assertFalse(sm.contains(four)); assertFalse(sm.contains(five)); Iterator i = sm.iterator(); Object k; k = (Integer) (i.next()); assertEquals(one, k); k = (Integer) (i.next()); assertEquals(two, k); k = (Integer) (i.next()); assertEquals(three, k); assertFalse(i.hasNext()); sm.clear(); assertTrue(sm.isEmpty()); assertEquals(2, set.size()); assertEquals(four, set.first()); }
public Graph search(List<Node> nodes) { long startTime = System.currentTimeMillis(); localScoreCache.clear(); if (!dataSet().getVariables().containsAll(nodes)) { throw new IllegalArgumentException("All of the nodes must be in " + "the supplied data set."); } Graph graph; if (initialGraph == null) { graph = new EdgeListGraphSingleConnections(nodes); } else { initialGraph = GraphUtils.replaceNodes(initialGraph, variables); graph = new EdgeListGraphSingleConnections(initialGraph); } topGraphs.clear(); buildIndexing(graph); addRequiredEdges(graph); score = 0.0; // Do forward search. fes(graph, nodes); // Do backward search. bes(graph); long endTime = System.currentTimeMillis(); this.elapsedTime = endTime - startTime; this.logger.log("graph", "\nReturning this graph: " + graph); this.logger.log("info", "Elapsed time = " + (elapsedTime) / 1000. + " s"); this.logger.flush(); return graph; }
/** * Greedy equivalence search: Start from the empty graph, add edges till model is significant. * Then start deleting edges till a minimum is achieved. * * @return the resulting Pattern. */ public Graph search() { Graph graph; if (initialGraph == null) { graph = new EdgeListGraphSingleConnections(getVariables()); } else { graph = new EdgeListGraphSingleConnections(initialGraph); } fireGraphChange(graph); buildIndexing(graph); addRequiredEdges(graph); topGraphs.clear(); storeGraph(graph); List<Node> nodes = graph.getNodes(); long start = System.currentTimeMillis(); score = 0.0; // Do forward search. fes(graph, nodes); // Do backward search. bes(graph); long endTime = System.currentTimeMillis(); this.elapsedTime = endTime - start; this.logger.log("graph", "\nReturning this graph: " + graph); this.logger.log("info", "Elapsed time = " + (elapsedTime) / 1000. + " s"); this.logger.flush(); return graph; }
/** Destroys the queue, discarding all pages. */ public synchronized void destroy() { closed.set(true); state = QueueState.FINISHED; // drop all of the queues for (NamedQueue namedQueue : openQueuesBySequenceId) { namedQueue.setFinished(); } openQueuesBySequenceId.clear(); // clear the buffer masterQueue.clear(); bufferedBytes = 0; // free queued page waiters for (QueuedPage queuedPage : queuedPages) { queuedPage.getFuture().set(null); } queuedPages.clear(); // notify readers that the buffer has been destroyed this.notifyAll(); }
@Test public void testAddRemoveClear() throws Exception { Map map = MapTests.createDefaultMap("typename", 2, true, null); // $NON-NLS-1$ map.getLayersInternal() .add( map.getLayerFactory() .createLayer(MapTests.createGeoResource("type2", 3, false))); // $NON-NLS-1$ CompositeRendererImpl renderer = (CompositeRendererImpl) map.getRenderManagerInternal().getRenderExecutor().getRenderer(); renderer.getContext().clear(); RendererCreator creator = map.getRenderManagerInternal().getRendererCreator(); SortedSet<Layer> layers = creator.getLayers(); layers.clear(); layers.addAll(map.getLayersInternal()); layers.add(new SelectionLayer(map.getLayersInternal().get(0))); layers.add(new SelectionLayer(map.getLayersInternal().get(1))); creator.reset(); CompositeRenderContextImpl comp = new CompositeRenderContextImpl(); comp.addContexts(creator.getConfiguration()); Iterator iter = comp.getContexts().iterator(); RenderContext executor = (RenderContext) iter.next(); assertEquals(map.getLayersInternal().get(0), executor.getLayer()); executor = (RenderContext) iter.next(); assertEquals(map.getLayersInternal().get(1), executor.getLayer()); executor = (RenderContext) iter.next(); SelectionLayer sl = (SelectionLayer) executor.getLayer(); assertEquals(map.getLayersInternal().get(0), sl.getWrappedLayer()); executor = (RenderContext) iter.next(); sl = (SelectionLayer) executor.getLayer(); assertEquals(map.getLayersInternal().get(1), sl.getWrappedLayer()); }
/** * Returns the Attr[]s to be outputted for the given element. <br> * IMPORTANT: This method expects to work on a modified DOM tree, i.e. a DOM which has been * prepared using {@link * com.sun.org.apache.xml.internal.security.utils.XMLUtils#circumventBug2650( * org.w3c.dom.Document)}. * * @param E * @param ns * @return the Attr[]s to be outputted * @throws CanonicalizationException */ Iterator handleAttributes(Element E, NameSpaceSymbTable ns) throws CanonicalizationException { // result will contain the attrs which have to be outputted xmlattrStack.push(ns.getLevel()); boolean isRealVisible = isVisibleDO(E, ns.getLevel()) == 1; NamedNodeMap attrs = null; int attrsLength = 0; if (E.hasAttributes()) { attrs = E.getAttributes(); attrsLength = attrs.getLength(); } SortedSet result = this.result; result.clear(); for (int i = 0; i < attrsLength; i++) { Attr N = (Attr) attrs.item(i); String NUri = N.getNamespaceURI(); if (XMLNS_URI != NUri) { // A non namespace definition node. if (XML_LANG_URI == NUri) { xmlattrStack.addXmlnsAttr(N); } else if (isRealVisible) { // The node is visible add the attribute to the list of output attributes. result.add(N); } // keep working continue; } String NName = N.getLocalName(); String NValue = N.getValue(); if ("xml".equals(NName) && XML_LANG_URI.equals(NValue)) { /* except omit namespace node with local name xml, which defines * the xml prefix, if its string value is http://www.w3.org/XML/1998/namespace. */ continue; } // add the prefix binding to the ns symb table. // ns.addInclusiveMapping(NName,NValue,N,isRealVisible); if (isVisible(N)) { if (!isRealVisible && ns.removeMappingIfRender(NName)) { continue; } // The xpath select this node output it if needed. // Node n=ns.addMappingAndRenderXNodeSet(NName,NValue,N,isRealVisible); Node n = ns.addMappingAndRender(NName, NValue, N); if (n != null) { result.add(n); if (C14nHelper.namespaceIsRelative(N)) { Object exArgs[] = {E.getTagName(), NName, N.getNodeValue()}; throw new CanonicalizationException("c14n.Canonicalizer.RelativeNamespace", exArgs); } } } else { if (isRealVisible && NName != XMLNS) { ns.removeMapping(NName); } else { ns.addMapping(NName, NValue, N); } } } if (isRealVisible) { // The element is visible, handle the xmlns definition Attr xmlns = E.getAttributeNodeNS(XMLNS_URI, XMLNS); Node n = null; if (xmlns == null) { // No xmlns def just get the already defined. n = ns.getMapping(XMLNS); } else if (!isVisible(xmlns)) { // There is a definition but the xmlns is not selected by the xpath. // then xmlns="" n = ns.addMappingAndRender(XMLNS, "", nullNode); } // output the xmlns def if needed. if (n != null) { result.add(n); } // Float all xml:* attributes of the unselected parent elements to this one. // addXmlAttributes(E,result); xmlattrStack.getXmlnsAttr(result); ns.getUnrenderedNodes(result); } return result.iterator(); }