public void testGrowReplaceShrink() { Random r = new Random(); int k = 10000; String s = "A"; double t = 0; FibonacciHeap<String> h = new FibonacciHeap<>(); for (int i = 0; i < (k * 3); ++i) { // during first two-thirds, insert if (i < (k * 2)) { double d = r.nextDouble(); t += d; FibonacciHeapNode<String> n = new FibonacciHeapNode<>(s); h.insert(n, d); } // during last two-thirds, delete (so during middle // third, we'll do both insert and delete, interleaved) if (i >= k) { FibonacciHeapNode<String> n2 = h.removeMin(); t -= n2.getKey(); } } assertTrue(h.isEmpty()); // tally should come back down to zero, or thereabouts (due to roundoff) assertEquals(0.0, t, 0.00001); }
public static FibonacciHeap<NodeInfoHelper> initialStartSet( long startNode, long endNode, HashMap<Long, FibonacciHeapNode<NodeInfoHelper>> nodeHelperCache) { FibonacciHeap<NodeInfoHelper> openSet = new FibonacciHeap<NodeInfoHelper>(); NodeInfo start = OSMData.nodeHashMap.get(startNode); NodeInfoHelper initial; FibonacciHeapNode<NodeInfoHelper> fInitial; // initial start end set if (start.isIntersect()) { // initial initial = new NodeInfoHelper(startNode); initial.setCost(0); initial.setHeuristic(estimateHeuristic(startNode, endNode)); fInitial = new FibonacciHeapNode<NodeInfoHelper>(initial); openSet.insert(fInitial, initial.getTotalCost()); // push the start node nodeHelperCache.put(initial.getNodeId(), fInitial); // add cache } else { EdgeInfo edge = start.getOnEdgeList().getFirst(); double speed = edge.getTravelSpeed(); int travelTime = 1; // second int distance; if (!edge.isOneway()) { // distance from start to middle distance = edge.getStartDistance(startNode); travelTime = (int) Math.round(distance / speed * OSMParam.MILLI_PER_SECOND); initial = new NodeInfoHelper(edge.getStartNode()); initial.setCost(travelTime); initial.setHeuristic(estimateHeuristic(edge.getStartNode(), endNode)); fInitial = new FibonacciHeapNode<NodeInfoHelper>(initial); openSet.insert(fInitial, initial.getTotalCost()); // push the start node nodeHelperCache.put(initial.getNodeId(), fInitial); // add cache } distance = edge.getEndDistance(startNode); travelTime = (int) Math.round(distance / speed * OSMParam.MILLI_PER_SECOND); initial = new NodeInfoHelper(edge.getEndNode()); initial.setCost(travelTime); initial.setHeuristic(estimateHeuristic(edge.getEndNode(), endNode)); fInitial = new FibonacciHeapNode<NodeInfoHelper>(initial); openSet.insert(fInitial, initial.getTotalCost()); // push the start node nodeHelperCache.put(initial.getNodeId(), fInitial); // add cache } return openSet; }
public void relaxEdges(FibonacciHeap<Vertex> unvisited) { Vertex[] vertices = Graph.this.vertices; for (Edge edge : edges) { Vertex to = vertices[edge.to]; int newDistance = distance + edge.cost; if (newDistance < to.distance) { to.distance = newDistance; unvisited.decreaseKey(to.heapEntry, newDistance); } } }
public int getDistance(int from, int to) { FibonacciHeap<Vertex> unvisited = new FibonacciHeap<Vertex>(); for (int i = 1; i <= vertexCount; i++) { vertices[i].distance = vertexCount + 1; vertices[i].visited = false; } vertices[from].distance = 0; for (int i = 1; i <= vertexCount; i++) vertices[i].heapEntry = unvisited.enqueue(vertices[i], vertices[i].distance); while (unvisited.size() > 0) { Vertex v = unvisited.dequeueMin().getValue(); if (v.index == to || v.distance > vertexCount) return v.distance; v.relaxEdges(unvisited); } return vertices[to].distance; }
// in honor of sf.net bug #1845376 public void testAddRemoveOne() { String s = "A"; FibonacciHeapNode<String> n = new FibonacciHeapNode<>(s); FibonacciHeap<String> h = new FibonacciHeap<>(); assertTrue(h.isEmpty()); h.insert(n, 1.0); assertFalse(h.isEmpty()); FibonacciHeapNode<String> n2 = h.removeMin(); assertEquals(s, n2.getData()); assertTrue(h.isEmpty()); }
/** * Given two Fibonacci heaps, returns a new Fibonacci heap that contains all of the elements of * the two heaps. Each of the input heaps is destructively modified by having all its elements * removed. You can continue to use those heaps, but be aware that they will be empty after this * call completes. * * @param one The first Fibonacci heap to merge. * @param two The second Fibonacci heap to merge. * @return A new FibonacciHeap containing all of the elements of both heaps. */ public static <T> FibonacciHeap<T> merge(FibonacciHeap<T> one, FibonacciHeap<T> two) { /* Create a new FibonacciHeap to hold the result. */ FibonacciHeap<T> result = new FibonacciHeap<T>(); /* Merge the two Fibonacci heap root lists together. This helper function * also computes the min of the two lists, so we can store the result in * the mMin field of the new heap. */ result.mMin = mergeLists(one.mMin, two.mMin); /* The size of the new heap is the sum of the sizes of the input heaps. */ result.mSize = one.mSize + two.mSize; /* Clear the old heaps. */ one.mSize = two.mSize = 0; one.mMin = null; two.mMin = null; /* Return the newly-merged heap. */ return result; }
/** * routing using A* algorithm with fibonacci heap basically same as routingAStar function * * @param startNode * @param endNode * @param startTime * @param pathNodeList return path * @return */ public static double routingAStarFibonacci( long startNode, long endNode, int startTime, int dayIndex, ArrayList<Long> pathNodeList) { System.out.println("start finding the path..."); int debug = 0; double totalCost = -1; try { // test store transversal nodes // HashSet<Long> transversalSet = new HashSet<Long>(); if (!OSMData.nodeHashMap.containsKey(startNode) || !OSMData.nodeHashMap.containsKey(endNode)) { System.err.println("cannot find start or end node!"); return -1; } if (startNode == endNode) { System.out.println("start node is the same as end node."); return 0; } HashSet<Long> closedSet = new HashSet<Long>(); HashMap<Long, FibonacciHeapNode<NodeInfoHelper>> nodeHelperCache = new HashMap<Long, FibonacciHeapNode<NodeInfoHelper>>(); FibonacciHeap<NodeInfoHelper> openSet = initialStartSet(startNode, endNode, nodeHelperCache); HashSet<Long> endSet = initialEndSet(endNode); NodeInfoHelper current = null; FibonacciHeapNode<NodeInfoHelper> fCurrent = null; while (!openSet.isEmpty()) { // remove current from openset fCurrent = openSet.min(); openSet.removeMin(); current = fCurrent.getData(); // if(!transversalSet.contains(current.getNodeId())) // transversalSet.add(current.getNodeId()); long nodeId = current.getNodeId(); // add current to closedset closedSet.add(nodeId); if (endSet.contains(nodeId)) { // find the destination current = current.getEndNodeHelper(endNode); totalCost = current.getCost(); break; } // for time dependent routing int timeIndex = startTime + (int) (current.getCost() / OSMParam.SECOND_PER_MINUTE / OSMRouteParam.TIME_INTERVAL); if (timeIndex > OSMRouteParam.TIME_RANGE - 1) // time [6am - 9 pm], we regard times after 9pm as constant edge weights timeIndex = OSMRouteParam.TIME_RANGE - 1; LinkedList<ToNodeInfo> adjNodeList = OSMData.adjListHashMap.get(nodeId); if (adjNodeList == null) continue; // this node cannot go anywhere double arriveTime = current.getCost(); // for each neighbor in neighbor_nodes(current) for (ToNodeInfo toNode : adjNodeList) { debug++; long toNodeId = toNode.getNodeId(); int travelTime; if (toNode.isFix()) // fix time travelTime = toNode.getTravelTime(); else // fetch from time array travelTime = toNode.getSpecificTravelTime(dayIndex, timeIndex); // tentative_g_score := g_score[current] + dist_between(current,neighbor) double costTime = arriveTime + (double) travelTime / OSMParam.MILLI_PER_SECOND; // tentative_f_score := tentative_g_score + heuristic_cost_estimate(neighbor, goal) double heuristicTime = estimateHeuristic(toNodeId, endNode); double totalCostTime = costTime + heuristicTime; // if neighbor in closedset and tentative_f_score >= f_score[neighbor] if (closedSet.contains(toNodeId) && nodeHelperCache.get(toNodeId).getData().getTotalCost() <= totalCostTime) { continue; } NodeInfoHelper node = null; FibonacciHeapNode<NodeInfoHelper> fNode = null; // if neighbor not in openset or tentative_f_score < f_score[neighbor] if (!nodeHelperCache.containsKey(toNodeId)) { // neighbor not in openset // create new one node = new NodeInfoHelper(toNodeId); node.setCost(costTime); node.setHeuristic(heuristicTime); node.setParentId(nodeId); fNode = new FibonacciHeapNode<NodeInfoHelper>(node); openSet.insert(fNode, node.getTotalCost()); nodeHelperCache.put(node.getNodeId(), fNode); } else if (nodeHelperCache.get(toNodeId).getData().getTotalCost() > totalCostTime) { // neighbor in openset fNode = nodeHelperCache.get(toNodeId); node = fNode.getData(); // update information node.setCost(costTime); node.setHeuristic(heuristicTime); node.setParentId(nodeId); if (closedSet.contains(toNodeId)) { // neighbor in closeset closedSet.remove(toNodeId); // remove neighbor form colseset openSet.insert(fNode, node.getTotalCost()); } else { // neighbor in openset, decreaseKey openSet.decreaseKey(fNode, node.getTotalCost()); } } } } if (totalCost != -1) { long traceNodeId = current.getNodeId(); pathNodeList.add(traceNodeId); // add end node traceNodeId = current.getParentId(); while (traceNodeId != 0) { pathNodeList.add(traceNodeId); // add node fCurrent = nodeHelperCache.get(traceNodeId); current = fCurrent.getData(); traceNodeId = current.getParentId(); } Collections.reverse(pathNodeList); // reverse the path list System.out.println("find the path successful!"); } else { System.out.println("can not find the path!"); } // OSMOutput.generateTransversalNodeKML(transversalSet, nodeHashMap); } catch (Exception e) { e.printStackTrace(); System.err.println( "tdsp: debug code " + debug + ", start node " + startNode + ", end node " + endNode); } return totalCost; }