/** * Find the maximum weight matching of a path using dynamic programming. * * @param path a list of edges. The code assumes that the list of edges is a valid simple path, * and that is not a cycle. * @return a maximum weight matching of the path */ public Pair<Double, Set<E>> getMaximumWeightMatching(Graph<V, E> g, LinkedList<E> path) { int pathLength = path.size(); // special cases switch (pathLength) { case 0: // special case, empty path return Pair.of(Double.valueOf(0d), Collections.emptySet()); case 1: // special case, one edge E e = path.getFirst(); double eWeight = g.getEdgeWeight(e); if (comparator.compare(eWeight, 0d) > 0) { return Pair.of(eWeight, Collections.singleton(e)); } else { return Pair.of(Double.valueOf(0d), Collections.emptySet()); } } // make sure work array has enough space if (a.length < pathLength + 1) { a = new double[pathLength + 1]; } // first pass to find solution Iterator<E> it = path.iterator(); E e = it.next(); double eWeight = g.getEdgeWeight(e); a[0] = 0d; a[1] = (comparator.compare(eWeight, 0d) > 0) ? eWeight : 0d; for (int i = 2; i <= pathLength; i++) { e = it.next(); eWeight = g.getEdgeWeight(e); if (comparator.compare(a[i - 1], a[i - 2] + eWeight) > 0) { a[i] = a[i - 1]; } else { a[i] = a[i - 2] + eWeight; } } // reverse second pass to build solution Set<E> matching = new HashSet<>(); it = path.descendingIterator(); int i = pathLength; while (i >= 1) { e = it.next(); if (comparator.compare(a[i], a[i - 1]) > 0) { matching.add(e); // skip next edge if (i > 1) { e = it.next(); } i--; } i--; } // return solution return Pair.of(a[pathLength], matching); }
private static <T> Iterator<T> reverse(Iterable<T> values) { LinkedList<T> reversed = new LinkedList<>(); for (T value : values) { reversed.add(value); } return reversed.descendingIterator(); }
@Override public String toString() { LinkedList<Key> path = new LinkedList<>(); @Nullable DelegatingMarshalContext currentContext = this; do { Key currentKey = currentContext.contextKey; if (!(currentKey instanceof NoKey)) { path.add(currentKey); } currentContext = currentContext.ancestor; } while (currentContext != null); StringBuilder stringBuilder = new StringBuilder(256); stringBuilder.append("context '"); boolean first = true; Iterator<Key> it = path.descendingIterator(); while (it.hasNext()) { if (first) { first = false; } else { stringBuilder.append('/'); } stringBuilder.append(it.next()); } return stringBuilder.append('\'').toString(); }
public <T> T getLastExisting(UnaryFunction<T, ExecutionSequence> f) { if (parallel.isEmpty()) return null; for (Iterator<ExecutionSequence> iter = parallel.descendingIterator(); iter.hasNext(); ) { T any = f.evaluate(iter.next()); if (any != null) return any; } return null; }
public void apply(boolean ascending, UnaryProcedure<ExecutionSequence> proc) { if (parallel.isEmpty()) return; for (Iterator<ExecutionSequence> iter = (ascending ? parallel.iterator() : parallel.descendingIterator()); iter.hasNext(); ) { proc.execute(iter.next()); } }
@SuppressWarnings("deprecation") private void autoWire( CompatibilityLevel level, InputPorts inputPorts, LinkedList<OutputPort> readyOutputs) throws PortException { boolean success = false; do { Set<InputPort> complete = new HashSet<InputPort>(); for (InputPort in : inputPorts.getAllPorts()) { success = false; if (!in.isConnected() && !complete.contains(in) && in.getPorts().getOwner().getOperator().shouldAutoConnect(in)) { Iterator<OutputPort> outIterator; // TODO: Simon: Does the same in both cases. Check again. if (in.simulatesStack()) { outIterator = readyOutputs.descendingIterator(); } else { outIterator = readyOutputs.descendingIterator(); } while (outIterator.hasNext()) { OutputPort outCandidate = outIterator.next(); // TODO: Remove shouldAutoConnect() in later versions Operator owner = outCandidate.getPorts().getOwner().getOperator(); if (owner.shouldAutoConnect(outCandidate)) { if (outCandidate.getMetaData() != null) { if (in.isInputCompatible(outCandidate.getMetaData(), level)) { readyOutputs.remove(outCandidate); outCandidate.connectTo(in); // we cannot continue with the remaining input ports // since connecting may have triggered the creation of new input ports // which would result in undefined behavior and a ConcurrentModificationException success = true; break; } } } } // no port found. complete.add(in); if (success) { break; } } } } while (success); }
Geometry toShape(List<LineSegment> path, double h) { // TODO: take into account letter alignment // turn the path into a single polygon by generating points orthogonal // to the individual line segments GeomBuilder gb = new GeomBuilder(); LinkedList<Coordinate> top = new LinkedList<Coordinate>(); for (int i = 0; i < path.size(); i++) { LineSegment seg = path.get(i); Coordinate p0 = seg.p0; Coordinate p1 = seg.p1; double theta = seg.angle(); gb.points(p0.x, p0.y); // generate the perpendicular point at a distance of h Coordinate p2 = new Coordinate(); if (theta > 0) { if (theta <= HALFPI) { // ne double phi = Math.PI - (HALFPI + theta); p2.x = (Math.cos(phi) * h - p0.x) * -1; p2.y = Math.sin(phi) * h + p0.y; } else { // nw double phi = Math.PI - theta; p2.x = Math.cos(phi) * h + p0.x; p2.y = Math.sin(phi) * h + p0.y; } } else { theta = Math.abs(theta); if (theta < HALFPI) { double phi = HALFPI - theta; p2.x = (Math.cos(phi) * h + p0.x); p2.y = (Math.sin(phi) * h + p0.y); } else { double phi = theta = HALFPI; p2.x = Math.cos(phi) * h + p0.x; p2.y = (Math.sin(phi) * h - p0.y) * -1; } } top.add(p2); if (i == path.size() - 1) { gb.points(p1.x, p1.y); top.add(new Coordinate(p1.x + p2.x - p0.x, p1.y + p2.y - p0.y)); } } for (Iterator<Coordinate> it = top.descendingIterator(); it.hasNext(); ) { Coordinate c = it.next(); gb.points(c.x, c.y); } return gb.toPolygon(); }
/** * Compute the unique decomposition of the input graph G (atoms of G). Implementation of algorithm * Atoms as described in Berry et al. (2010), DOI:10.3390/a3020197, <a * href="http://www.mdpi.com/1999-4893/3/2/197">http://www.mdpi.com/1999-4893/3/2/197</a> */ private void computeAtoms() { if (chordalGraph == null) { computeMinimalTriangulation(); } separators = new HashSet<>(); // initialize g' as subgraph of graph (same vertices and edges) UndirectedGraph<V, E> gprime = copyAsSimpleGraph(graph); // initialize h' as subgraph of chordalGraph (same vertices and edges) UndirectedGraph<V, E> hprime = copyAsSimpleGraph(chordalGraph); atoms = new HashSet<>(); Iterator<V> iterator = meo.descendingIterator(); while (iterator.hasNext()) { V v = iterator.next(); if (generators.contains(v)) { Set<V> separator = new HashSet<>(Graphs.neighborListOf(hprime, v)); if (isClique(graph, separator)) { if (separator.size() > 0) { if (separators.contains(separator)) { fullComponentCount.put(separator, fullComponentCount.get(separator) + 1); } else { fullComponentCount.put(separator, 2); separators.add(separator); } } UndirectedGraph<V, E> tmpGraph = copyAsSimpleGraph(gprime); tmpGraph.removeAllVertices(separator); ConnectivityInspector<V, E> con = new ConnectivityInspector<>(tmpGraph); if (con.isGraphConnected()) { throw new RuntimeException("separator did not separate the graph"); } for (Set<V> component : con.connectedSets()) { if (component.contains(v)) { gprime.removeAllVertices(component); component.addAll(separator); atoms.add(new HashSet<>(component)); assert (component.size() > 0); break; } } } } hprime.removeVertex(v); } if (gprime.vertexSet().size() > 0) { atoms.add(new HashSet<>(gprime.vertexSet())); } }
/** * set history in database * * @param list list with history data */ public void setHistory(LinkedList<T> list) throws SQLException { PreparedStatement preparedStatement; synchronized (lock) { Database database = null; try { // open database database = new Database(HISTORY_DATABASE_NAME); // delete old history preparedStatement = database.prepareStatement("DELETE FROM " + name); preparedStatement.executeUpdate(); database.commit(); // add new history Iterator<T> iterator = null; switch (direction) { case ASCENDING: iterator = list.descendingIterator(); break; case DESCENDING: iterator = list.iterator(); break; case SORTED: Collections.sort( list, new Comparator<T>() { public int compare(T data0, T data1) { return dataCompareTo(data0, data1); } }); iterator = list.iterator(); break; } while (iterator.hasNext()) { T data = iterator.next(); preparedStatement = prepareInsert(database, data); preparedStatement.executeUpdate(); } database.commit(); // close database database.close(); database = null; } finally { if (database != null) try { database.close(); } catch (SQLException unusedException) { /* ignored */ } } } }
private void generateOpenClassDeclarations( @NotNull List<JsVar> vars, @NotNull List<JsPropertyInitializer> propertyInitializers) { // first pass: set up list order LinkedList<OpenClassInfo> sortedOpenClasses = (LinkedList<OpenClassInfo>) DFS.topologicalOrder( openList, new DFS.Neighbors<OpenClassInfo>() { @NotNull @Override public Iterable<OpenClassInfo> getNeighbors(OpenClassInfo current) { LinkedList<OpenClassInfo> parents = new LinkedList<OpenClassInfo>(); ClassDescriptor classDescriptor = getClassDescriptor(context().bindingContext(), current.declaration); Collection<JetType> superTypes = classDescriptor.getTypeConstructor().getSupertypes(); for (JetType type : superTypes) { ClassDescriptor descriptor = getClassDescriptorForType(type); OpenClassInfo item = openClassDescriptorToItem.get(descriptor); if (item == null) { continue; } item.referencedFromOpenClass = true; parents.add(item); } return parents; } }); assert sortedOpenClasses.size() == openList.size(); // second pass: generate Iterator<OpenClassInfo> it = sortedOpenClasses.descendingIterator(); while (it.hasNext()) { OpenClassInfo item = it.next(); JsExpression translatedDeclaration = new ClassTranslator(item.declaration, item.descriptor, classDescriptorToLabel, context()) .translate(); JsExpression value; if (item.referencedFromOpenClass) { vars.add(new JsVar(item.label.getName(), translatedDeclaration)); value = item.label; } else { value = translatedDeclaration; } propertyInitializers.add(new JsPropertyInitializer(item.label, value)); } }
private LinkedHashSet<Vertex> reverseOrderOfVertices(LinkedHashSet<Vertex> verticesOnPath) { LinkedList<Vertex> verticesList = new LinkedList<Vertex>(verticesOnPath); LinkedHashSet<Vertex> verticesOnPathReversed = new LinkedHashSet<Vertex>(); Iterator<Vertex> it = verticesList.descendingIterator(); while (it.hasNext()) { verticesOnPathReversed.add(it.next()); } return verticesOnPathReversed; }
/** * Return the LAST source tag (closest to AIS sentence) * * @return */ public IProprietarySourceTag getSourceTag() { if (tags == null) { return null; } // Iterate backwards for (Iterator<IProprietaryTag> iterator = tags.descendingIterator(); iterator.hasNext(); ) { IProprietaryTag tag = iterator.next(); if (tag instanceof IProprietarySourceTag) { return (IProprietarySourceTag) tag; } } return null; }
public Method replaceWithOverriddenOrInterfaceMethod( Method method, List<Method> allMethodsOfType) { LinkedList<Method> list = new LinkedList<Method>(allMethodsOfType); Iterator<Method> iterator = list.descendingIterator(); while (iterator.hasNext()) { Method overriddenOrInterfaceMethod = iterator.next(); if (executableHelper.overrides(method, overriddenOrInterfaceMethod)) { if (method.getAnnotation(ValidateOnExecution.class) != null) { throw log.getValidateOnExecutionOnOverriddenOrInterfaceMethodException(method); } return overriddenOrInterfaceMethod; } } return method; }
public int lastIndexOfPage(Class<? extends Page> pageClass) { if (mPageStack.size() == 0) { return -1; } int index = mPageStack.size(); Iterator<Page> it = mPageStack.descendingIterator(); while (it.hasNext()) { --index; if (it.next().getClass() == pageClass) { return index; } } return -1; }
public static void main(String[] args) throws FileNotFoundException, IOException { if (args.length != 2) { System.out.println("Please provide a file name to read and word to seek"); System.exit(0); } BufferedReader bR = new BufferedReader(new FileReader(args[0])); LinkedList<String> lL = new LinkedList<String>(); String s; while ((s = bR.readLine()) != null) { lL.add(s); } bR.close(); System.out.println(lL); Iterator<String> it = lL.descendingIterator(); while (it.hasNext()) { s = it.next(); if (s.contains(args[1])) { System.out.println(s); } } }
private static Iterator<EsbNode> getNodeIterator(LinkedList<EsbNode> nodeList) { Iterator<EsbNode> iterator = nodeList.iterator(); if (nodeList.size() > 0) { EditPart editpart = getEditpart(nodeList.getFirst()); if (editpart != null) { if (editpart.getParent() instanceof AbstractMediatorCompartmentEditPart) { if (editpart.getParent().getParent() instanceof ShapeNodeEditPart) { EditPart container = editpart.getParent().getParent().getParent(); if (container instanceof complexFiguredAbstractMediator) { if (((complexFiguredAbstractMediator) container).reversed) { iterator = nodeList.descendingIterator(); } } } } } } return iterator; }
// @include public static String ShortestEquivalentPath(String path) { LinkedList<String> pathNames = new LinkedList<>(); // Special case: starts with "/", which is an absolute path. if (path.startsWith("/")) { pathNames.push("/"); } for (String token : path.split("/")) { System.out.println(token); if (token.equals("..")) { if (pathNames.isEmpty() || pathNames.peek().equals("..")) { pathNames.push(token); } else { if (pathNames.peek().equals("/")) { throw new RuntimeException("Path error"); } pathNames.pop(); } } else if (!token.equals(".") && !token.isEmpty()) { // Must be a name. pathNames.push(token); } } StringBuilder result = new StringBuilder(); if (!pathNames.isEmpty()) { Iterator<String> it = pathNames.descendingIterator(); String prev = it.next(); result.append(prev); while (it.hasNext()) { if (!prev.equals("/")) { result.append("/"); } prev = it.next(); result.append(prev); } } return result.toString(); }
/** * This uses a fixed step, and can have variable window sizing to accommodate widely varying CG * density. * * @param cpg Must be streamed in serially. */ protected void streamCpgVariableWind(Cpg[] cpg) { int newPos = cpg[0].chromPos; Logger.getLogger(Logger.GLOBAL_LOGGER_NAME) .fine(String.format("Variable wind found Cpg: %d\n", newPos)); // Add this Cpg to the head of the queue window.add(cpg); // Remove cpgs from the tail boolean done = false; Cpg[] endCpg; while (!done && ((endCpg = window.peek()) != null)) { if ((newPos - endCpg[0].chromPos) < this.walkParams.maxScanningWindSize) { done = true; } else { window.remove(); } } // System.err.println("\tChecking " + this.windStr()); // And process the window // System.err.println(this.windStr()); if (window.size() >= walkParams.minScanningWindCpgs) { // First we set our summarizers. // Take the last minCpgs as the sub-window. Do it as an iterator since // a linked list might be quicker iterating than using "get(i)". Iterator<Cpg[]> backIt = window.descendingIterator(); int i = 0; int lastPos = 0; if (useSummarizers) this.resetSummarizers(); boolean minSizeReached = false; while ((i < window.size()) && !(minSizeReached && (i > walkParams.minScanningWindCpgs))) { // ***** REMOVE THIS if (!backIt.hasNext()) System.err.println("Why did we run out of window elements?!"); // REMOVE // ***** REMOVE THIS Cpg[] backCpg = backIt.next(); // System.err.println("\t(i=" + i + ") cpg=" + backCpg.chromPos); lastPos = backCpg[0].chromPos; if (useSummarizers) { for (int t = 0; t < this.numTables(); t++) { methSummarizer.get(t).streamCpg(backCpg[t]); methSummarizerFw.get(t).streamCpg(backCpg[t]); methSummarizerRev.get(t).streamCpg(backCpg[t]); } } int windLen = newPos - lastPos; minSizeReached = (windLen >= this.walkParams.minScanningWindSize); // System.err.printf("Checking size %d (minSizeReached=%s)\n",windLen,minSizeReached); i++; } // Process this window // System.err.printf("Checking size %d // (minSizeReached=%s)\n",cpg.chromPos-lastPos+1,minSizeReached); if (minSizeReached && (i > walkParams.minScanningWindCpgs)) { this.processWindow( this.window.subList(this.window.size() - (i - 1), this.window.size()), true); } } }
/** * ********************************************************************** The method sorts segment * of isoline to LineString * * @param coordA - start vertex of isoline's segment * @param coordB - stop vertex of isoline's segment * @param elevation - elevation of isoline's segment */ private void sortIsolines(Coordinate coordA, Coordinate coordB, double elevation) { DVertex izoA = null; DVertex izoB = null; int indexA = 0; int indexB = 0; BinaryTree tree = null; int elevIndex = new Double((elevation - minIso) / elevatedStep).intValue(); if (treeIndex.containsKey(elevIndex)) { tree = (BinaryTree) treeIndex.get(elevIndex); } else { tree = new BinaryTree(); treeIndex.put(elevIndex, tree); } izoA = (DVertex) tree.search(coordA); izoB = (DVertex) tree.search(coordB); if (izoA != null) indexA = 1; if (izoB != null) indexB = 2; switch (indexA + indexB) { case 0: { LinkedList izoList = new LinkedList(); izoList.add(coordA); izoList.add(coordB); tree.insert(coordA, new Integer(finalIsolines.size())); tree.insert(coordB, new Integer(finalIsolines.size())); finalIsolines.add(finalIsolines.size(), izoList); break; } case 1: { LinkedList izoList = (LinkedList) finalIsolines.get(izoA.data); if (izoList == null) break; tree.remove(coordA); if (((Coordinate) izoList.getFirst()).equals2D(coordA)) { izoList.addFirst(coordB); tree.insert(coordB, izoA.data); } else { izoList.addLast(coordB); tree.insert(coordB, izoA.data); } break; } case 2: { LinkedList izoList = (LinkedList) finalIsolines.get(izoB.data); if (izoList == null) break; tree.remove(coordB); if (((Coordinate) izoList.getFirst()).equals2D(coordB)) { izoList.addFirst(coordA); tree.insert(coordA, izoB.data); } else { izoList.addLast(coordA); tree.insert(coordA, izoB.data); } break; } case 3: { LinkedList izoList = (LinkedList) finalIsolines.get(izoA.data); if (izoList == null) break; if ((izoA.data.intValue() == izoB.data.intValue())) { tree.remove(coordA); tree.remove(coordB); if (((Coordinate) izoList.getFirst()).equals2D(coordA)) { izoList.addLast(coordA); } else { izoList.addFirst(coordA); } } else { LinkedList izoListB = (LinkedList) finalIsolines.get(izoB.data); if (izoListB == null) break; if (((Coordinate) izoList.getFirst()).equals2D(coordA)) { if (((Coordinate) izoListB.getFirst()).equals2D(coordB)) { Iterator iterIzoB = izoListB.iterator(); while (iterIzoB.hasNext()) { izoList.addFirst(iterIzoB.next()); } } else { Iterator iterIzoB = izoListB.descendingIterator(); while (iterIzoB.hasNext()) { izoList.addFirst(iterIzoB.next()); } } } else { if (((Coordinate) izoListB.getFirst()).equals2D(coordB)) { Iterator iterIzoB = izoListB.iterator(); while (iterIzoB.hasNext()) { izoList.addLast(iterIzoB.next()); } } else { Iterator iterIzoB = izoListB.descendingIterator(); while (iterIzoB.hasNext()) { izoList.addLast(iterIzoB.next()); } } } finalIsolines.set(izoB.data, null); ((DVertex) tree.search((Coordinate) izoList.getLast())).data = izoA.data; ((DVertex) tree.search((Coordinate) izoList.getFirst())).data = izoA.data; } tree.remove(coordA); tree.remove(coordB); } } }
public Iterator<Poynt> iterateClockwise(Loop loop) { if (directions.get(loop).signum() > 0) return poynts.iterator(); else return poynts.descendingIterator(); }
public Iterator<Node> getReverseIterator() { return args.descendingIterator(); }
public Iterator<ParsedEntry> getDescendingIteratorMissingSourceLines() { return missingSourceLines.descendingIterator(); }
public Iterator<ITrigger> getTriggerIterator(boolean descending) { return descending ? _potentialTriggers.descendingIterator() : _potentialTriggers.iterator(); }
/** * "pop" operation ends when one of the classes specified by pageClasses is found, if none of the * classes is found, the method call is a no-op * * @param pageClasses classes of pages as the destination for this pop operation * @param animated true to animate the transition */ public void popToClasses( Class<? extends Page>[] pageClasses, boolean animated, PageAnimator.AnimationDirection animationDirection) { if (mAnimating) { return; } if (pageClasses == null || pageClasses.length == 0) { throw new IllegalArgumentException( "cannot call popToClasses() with null or empty pageClasses."); } if (mPageStack.size() <= 0) { return; } // do nothing if the topPage is the page we want to navigate to Class topPageClass = mPageStack.peekLast().getClass(); for (Class pageClass : pageClasses) { if (pageClass == topPageClass) { return; } } // do nothing if the page we want to navigate to does not exist boolean hasDestClass = false; Iterator<Page> it = mPageStack.descendingIterator(); LOOP1: while (it.hasNext()) { Class destPageClass = it.next().getClass(); for (Class pageClass : pageClasses) { if (destPageClass == pageClass) { hasDestClass = true; break LOOP1; } } } if (!hasDestClass) { return; } Page oldPage = mPageStack.removeLast(); LOOP2: while (mPageStack.size() > 1) { Class lastPageClass = mPageStack.peekLast().getClass(); for (Class pageClass : pageClasses) { if (lastPageClass == pageClass) { break LOOP2; } } Page page = mPageStack.removeLast(); page.onHide(); mContainerView.removeView(page.getView()); page.onDetached(); page.onHidden(); } popPageInternal(oldPage, animated, animationDirection); }
public Type intersection(List<Type> types) { // exclude 'dynamic' type { List<Type> newTypes = Lists.newArrayList(); for (Type type : types) { if (TypeKind.of(type) != TypeKind.DYNAMIC) { newTypes.add(type); } } types = newTypes; } // no types, so Dynamic if (types.isEmpty()) { return typeProvider.getDynamicType(); } // prepare all super types List<List<InterfaceType>> superTypesLists = Lists.newArrayList(); List<Map<InterfaceType, InterfaceType>> superTypesMaps = Lists.newArrayList(); for (Type type : types) { List<InterfaceType> superTypes = getSuperTypes(type); superTypesLists.add(superTypes); Map<InterfaceType, InterfaceType> superTypesMap = Maps.newHashMap(); for (InterfaceType superType : superTypes) { superTypesMap.put(superType.asRawType(), superType); } superTypesMaps.add(superTypesMap); } // find intersection of super types LinkedList<InterfaceType> interTypes = Lists.newLinkedList(); if (superTypesLists.size() > 0) { for (InterfaceType superType : superTypesLists.get(0)) { boolean inAll = true; for (Map<InterfaceType, InterfaceType> otherTypesMap : superTypesMaps) { InterfaceType superTypeRaw = superType.asRawType(); InterfaceType otherType = otherTypesMap.get(superTypeRaw); // no such raw type, exclude from intersection if (otherType == null) { inAll = false; break; } // if not raw, choose type arguments if (!superType.getArguments().isEmpty()) { InterfaceType t0 = superType; InterfaceType t1 = otherType; // if two-way sub-type, then has Dynamic(s), choose with least number if (isSubtype(t0, t1) && isSubtype(t1, t0)) { int dynamics0 = getDynamicArgumentsCount(t0); int dynamics1 = getDynamicArgumentsCount(t1); if (dynamics0 < dynamics1) { superType = t0; } else { superType = t1; } continue; } // use super-type of t0 and t1 if (isSubtype(t0, t1)) { superType = t1; } if (isSubtype(t1, t0)) { superType = t0; } } } if (inAll && !interTypes.contains(superType)) { interTypes.add(superType); } } } // try to remove sub-types already covered by existing types for (Iterator<InterfaceType> i = interTypes.descendingIterator(); i.hasNext(); ) { InterfaceType subType = i.next(); boolean hasSuperType = false; for (InterfaceType superType : interTypes) { if (superType != subType && isSubtype(superType, subType)) { hasSuperType = true; break; } } if (hasSuperType) { i.remove(); } } // use single type if (interTypes.size() == 0) { return typeProvider.getObjectType(); } if (interTypes.size() == 1) { return interTypes.get(0); } // create union return unionTypes(interTypes); }
/** * @param cand Candidate to add. * @return {@code False} if failed to add candidate and transaction should be cancelled. */ private boolean add0(GridCacheMvccCandidate cand) { assert cand != null; // Local. if (cand.local()) { if (locs == null) locs = new LinkedList<>(); if (!cand.nearLocal()) { if (!locs.isEmpty()) { if (cand.serializable()) { Iterator<GridCacheMvccCandidate> it = locs.descendingIterator(); if (cand.read()) { while (it.hasNext()) { GridCacheMvccCandidate c = it.next(); if (!c.serializable()) return false; if (!c.read()) { if (compareSerializableVersion(c, cand)) break; else return false; } } } else { while (it.hasNext()) { GridCacheMvccCandidate c = it.next(); if (!c.serializable() || !compareSerializableVersion(c, cand)) return false; if (!c.read()) break; } } locs.addLast(cand); return true; } GridCacheMvccCandidate first = locs.getFirst(); if (first.owner()) { // If reentry, add at the beginning. Note that // no reentry happens for DHT-local candidates. if (!cand.dhtLocal() && first.threadId() == cand.threadId()) { assert !first.serializable(); cand.setOwner(); cand.setReady(); cand.setReentry(); locs.addFirst(cand); return true; } } // Iterate in reverse order. for (ListIterator<GridCacheMvccCandidate> it = locs.listIterator(locs.size()); it.hasPrevious(); ) { GridCacheMvccCandidate c = it.previous(); assert !c.version().equals(cand.version()) : "Versions can't match [existing=" + c + ", new=" + cand + ']'; // Add after the owner or serializable tx. if (c.owner() || c.serializable()) { // Threads are checked above. assert cand.dhtLocal() || c.threadId() != cand.threadId(); // Reposition. it.next(); it.add(cand); return true; } // If not the owner, add after the lesser version. if (c.version().isLess(cand.version())) { // Reposition. it.next(); it.add(cand); return true; } } } // Either list is empty or candidate is first. locs.addFirst(cand); } else // For near local candidates just add it to the end of list. locs.add(cand); } // Remote. else { assert !cand.serializable() && !cand.read() : cand; if (rmts == null) rmts = new LinkedList<>(); assert !cand.owner() || localOwners() == null : "Cannot have local and remote owners " + "at the same time [cand=" + cand + ", locs=" + locs + ", rmts=" + rmts + ']'; GridCacheMvccCandidate cur = candidate(rmts, cand.version()); // For existing candidates, we only care about owners and keys. if (cur != null) { if (cand.owner()) cur.setOwner(); return true; } // Either list is empty or candidate is last. rmts.add(cand); } return true; }
/** * Called as part of connecting a block when the new block results in a different chain having * higher total work. * * <p>if (shouldVerifyTransactions) Either newChainHead needs to be in the block store as a * FullStoredBlock, or (block != null && block.transactions != null) */ private void handleNewBestChain( StoredBlock storedPrev, StoredBlock newChainHead, Block block, boolean expensiveChecks) throws BlockStoreException, VerificationException, PrunedException { checkState(lock.isHeldByCurrentThread()); // This chain has overtaken the one we currently believe is best. Reorganize is required. // // Firstly, calculate the block at which the chain diverged. We only need to examine the // chain from beyond this block to find differences. StoredBlock head = getChainHead(); final StoredBlock splitPoint = findSplit(newChainHead, head, blockStore); log.info("Re-organize after split at height {}", splitPoint.getHeight()); log.info("Old chain head: {}", head.getHeader().getHashAsString()); log.info("New chain head: {}", newChainHead.getHeader().getHashAsString()); log.info("Split at block: {}", splitPoint.getHeader().getHashAsString()); // Then build a list of all blocks in the old part of the chain and the new part. final LinkedList<StoredBlock> oldBlocks = getPartialChain(head, splitPoint, blockStore); final LinkedList<StoredBlock> newBlocks = getPartialChain(newChainHead, splitPoint, blockStore); // Disconnect each transaction in the previous main chain that is no longer in the new main // chain StoredBlock storedNewHead = splitPoint; if (shouldVerifyTransactions()) { for (StoredBlock oldBlock : oldBlocks) { try { disconnectTransactions(oldBlock); } catch (PrunedException e) { // We threw away the data we need to re-org this deep! We need to go back to a peer with // full // block contents and ask them for the relevant data then rebuild the indexs. Or we could // just // give up and ask the human operator to help get us unstuck (eg, rescan from the genesis // block). // TODO: Retry adding this block when we get a block with hash e.getHash() throw e; } } StoredBlock cursor; // Walk in ascending chronological order. for (Iterator<StoredBlock> it = newBlocks.descendingIterator(); it.hasNext(); ) { cursor = it.next(); Block cursorBlock = cursor.getHeader(); if (expensiveChecks && cursorBlock.getTimeSeconds() <= getMedianTimestampOfRecentBlocks(cursor.getPrev(blockStore), blockStore)) throw new VerificationException("Block's timestamp is too early during reorg"); TransactionOutputChanges txOutChanges; if (cursor != newChainHead || block == null) txOutChanges = connectTransactions(cursor); else txOutChanges = connectTransactions(newChainHead.getHeight(), block); storedNewHead = addToBlockStore(storedNewHead, cursorBlock.cloneAsHeader(), txOutChanges); } } else { // (Finally) write block to block store storedNewHead = addToBlockStore(storedPrev, newChainHead.getHeader()); } // Now inform the listeners. This is necessary so the set of currently active transactions (that // we can spend) // can be updated to take into account the re-organize. We might also have received new coins we // didn't have // before and our previous spends might have been undone. for (final ListenerRegistration<BlockChainListener> registration : listeners) { if (registration.executor == Threading.SAME_THREAD) { // Short circuit the executor so we can propagate any exceptions. // TODO: Do we really need to do this or should it be irrelevant? registration.listener.reorganize(splitPoint, oldBlocks, newBlocks); } else { registration.executor.execute( new Runnable() { @Override public void run() { try { registration.listener.reorganize(splitPoint, oldBlocks, newBlocks); } catch (VerificationException e) { log.error("Block chain listener threw exception during reorg", e); } } }); } } // Update the pointer to the best known block. setChainHead(storedNewHead); }
public Iterator<IAction> getActionIterator(boolean descending) { return descending ? _potentialActions.descendingIterator() : _potentialActions.iterator(); }
/** Creates the color annotations from the FSTDirectives. */ private void createAnnotations() { AnnotationModelEvent event = new AnnotationModelEvent(this); Iterator<FSTDirective> it = validDirectiveList.descendingIterator(); while (it.hasNext()) { FSTDirective dir = it.next(); try { int startline = dir.getStartLine(); int endline = dir.getEndLine(); for (int i = startline; i <= endline; i++) { if (i < endline || dir.getEndLength() > 0) { int lineLength = document.getLineLength(i); int lineOffset = document.getLineOffset(i); if (i == endline) { lineLength = dir.getEndLength(); } if (i == startline) { lineOffset += dir.getStartOffset(); lineLength -= dir.getStartOffset(); } Position newPos = new Position(lineOffset, lineLength); if (!annotatedPositions.containsKey(i)) { if (!ColorList.isValidColor(dir.getColor())) break; ColorAnnotation ca = new ColorAnnotation( dir.getColor(), new Position(lineOffset, lineLength), ColorAnnotation.TYPE_IMAGE); annotations.add(ca); event.annotationAdded(ca); if (highlighting) { ca = new ColorAnnotation( dir.getColor(), newPos, i == startline ? ColorAnnotation.TYPE_HIGHLIGHT_OVERVIEW : ColorAnnotation.TYPE_HIGHLIGHT); annotations.add(ca); event.annotationAdded(ca); } else if (i == startline) { ca = new ColorAnnotation(dir.getColor(), newPos, ColorAnnotation.TYPE_OVERVIEW); annotations.add(ca); event.annotationAdded(ca); } annotatedPositions.put(i, newPos); } else if (highlighting) { Position oldPos = annotatedPositions.get(i); int oldOffset = oldPos.getOffset(); int oldLength = oldPos.getLength(); int wholeOffset = oldOffset; int wholeLength = oldLength; if (oldOffset > lineOffset) { ColorAnnotation ca = new ColorAnnotation( dir.getColor(), new Position(lineOffset, oldOffset - lineOffset), ColorAnnotation.TYPE_HIGHLIGHT); annotations.add(ca); event.annotationAdded(ca); wholeOffset = lineOffset; wholeLength += oldOffset - lineOffset; } int newOffset = oldOffset + oldLength; int newLength = lineLength - (newOffset - lineOffset); if (newLength > 0) { newPos.setOffset(newOffset); newPos.setLength(newLength); ColorAnnotation ca = new ColorAnnotation(dir.getColor(), newPos, ColorAnnotation.TYPE_HIGHLIGHT); annotations.add(ca); event.annotationAdded(ca); wholeLength += newLength; } annotatedPositions.put(i, new Position(wholeOffset, wholeLength)); } } } } catch (BadLocationException e) { UIPlugin.getDefault().logError(e); } } fireModelChanged(event); }
public Iterator<ParsedEntry> getDescendingIteratorLineToTranslate() { return linesToTranslate.descendingIterator(); }