@Override public Solution pack(double upperBound) { Set<Cycle> matching = new HashSet<Cycle>(); double objVal = 0.0; Set<Vertex> matchedVerts = new HashSet<Vertex>(); long start = System.nanoTime(); // First, use every altruist by packing chains if (maxChainSize > 1) { for (Vertex alt : pool.getAltruists()) { // Can't sample any chains from isolated altruists if (pool.outgoingEdgesOf(alt).isEmpty()) { continue; } Cycle chain = sampleAChain(alt, matchedVerts, maxChainSize, usingFailureProbabilities); // Couldn't find a legal path from this altruist if (null == chain) { continue; } // We check legality of the chain during generation, so add all verts and chain to matching Set<Vertex> cVerts = Cycle.getConstituentVertices(chain, pool); matchedVerts.addAll(cVerts); objVal += chain.getWeight(); matching.add(chain); // If we hit the upper bound, break out if (objVal >= upperBound) { break; } } } // Second, pack remaining vertices in cycles (using a VertexShufflePacker) VertexShufflePacker cyclePacker = new VertexShufflePacker( this.pool, this.cycles, this.membership, this.shuffleType, matchedVerts); Solution cyclesOnly = cyclePacker.pack(upperBound - objVal); // Add these packed cycles to our full matching matching.addAll(cyclesOnly.getMatching()); objVal += cyclesOnly.getObjectiveValue(); long end = System.nanoTime(); long totalTime = end - start; // Construct formal matching, return Solution sol = new Solution(); sol.setMatching(matching); sol.setObjectiveValue(objVal); sol.setSolveTime(totalTime); return sol; }
public static void removeKidneyToLiverEdges( Pool pool, Random r, double probKidneyToLiver, double pctKidney) { IOUtil.dPrintln("Removing edges from some kidney donors to liver pairs."); // First, label the vertices as either kidney- or liver-needing (all altruists are assumed // kidney) Set<Vertex> kidneyPairedDonors = new HashSet<Vertex>(); Set<Vertex> liverPairedDonors = new HashSet<Vertex>(); for (VertexPair vp : pool.getPairs()) { if (r.nextDouble() < pctKidney) { kidneyPairedDonors.add(vp); } else { liverPairedDonors.add(vp); } } for (VertexAltruist alt : pool.getAltruists()) { kidneyPairedDonors.add(alt); } // Next, for each kidney-paired donor, determine if that donor is willing // to give a liver. If not, remove all outgoing edges to liver-paired donors Set<Edge> edgesToRemove = new HashSet<Edge>(); for (Vertex kidneyV : kidneyPairedDonors) { boolean willingToGive = (r.nextDouble() < probKidneyToLiver); willingToGive &= !(kidneyV.isAltruist()); // disallow any kidney altruists from given to liver pairs if (willingToGive) { continue; } for (Edge e : pool.outgoingEdgesOf(kidneyV)) { if (liverPairedDonors.contains(pool.getEdgeTarget(e))) { edgesToRemove.add(e); } } } int removedEdgeCt = 0; for (Edge e : edgesToRemove) { pool.removeEdge(e); removedEdgeCt++; } IOUtil.dPrintln( "Removed " + removedEdgeCt + " edges from kidney donors to liver pairs (" + pool.edgeSet().size() + " remain)."); }
/** * Random walks a chain from altruistic Vertex alt, sampling hops based on our weighting scheme. * * @param alt Starting altruist for the chain * @param matchedVerts Set of off-limits vertices for this chain * @param maxChainSize Chain cap (will sample a chain <= maxChainSize) * @param usingFailureProbabilities True if the chain's weight should be discounted, false if raw * @return A chain starting at alt of size <= maxChainSize that random walks from alt through the * legal remaining vertices in the pool, sampling neighbors inversely proportional to the * number of cycles containing those neighbors. Weight is discounted * (usingFailureProbabilities=True) or raw (=False) */ protected Cycle sampleAChain( Vertex alt, Set<Vertex> matchedVerts, int maxChainSize, boolean usingFailureProbabilities) { if (null == alt) { throw new IllegalArgumentException("Altruist cannot be null."); } if (null == matchedVerts) { throw new IllegalArgumentException("Set of matched vertices cannot be null."); } if (maxChainSize < 2) { throw new IllegalArgumentException( "Cannot sample chains if maxChainSize<2 (maxChainSize=" + maxChainSize); } if (pool.outgoingEdgesOf(alt).size() < 1) { throw new IllegalArgumentException( "Altruist " + alt + " has no outgoing edges. Cannot call sampleAChain."); } // Accumulate our chain's edges Deque<Edge> path = new ArrayDeque<Edge>(); Set<Vertex> inPath = new HashSet<Vertex>(); double pathSuccProb = 1.0; double discountedPathWeight = 0.0; double rawPathWeight = 0.0; Vertex currentV = alt; do { // Want to choose next hop inversely proportional to #cycles/chains containing it WeightedRandomSample<Edge> neighborSet = new WeightedRandomSample<Edge>(); for (Edge edge : pool.outgoingEdgesOf(currentV)) { Vertex candidateV = pool.getEdgeTarget(edge); // If this neighbor has already been matched (or is in our chain), skip if (matchedVerts.contains(candidateV) || inPath.contains(candidateV)) { continue; } // If this neighbor is an altruist who isn't the starting altruist, skip if (candidateV.isAltruist() && !candidateV.equals(alt)) { continue; } // Never want to sample vertices that are not in any cycles (no chance of matching) double cycleCount = membership.getMembershipSet(candidateV).size(); if (cycleCount == 0) { continue; } // Not worrying about overflow for now, since we won't be using this on big |cycle| counts double weight = (double) cycles.size() / cycleCount; neighborSet.add(weight, edge); } // Get our next hop in the chain, based on the weights computed above Edge nextE = null; if (path.size() >= maxChainSize - 1 || neighborSet.size() < 1) { // If we're at the last step of the chain due to a chain cap, or if no vertices // are both neighbors of this vertex AND unmatched, then try to hop back to the // starting altruist nextE = pool.getEdge(currentV, alt); if (null == nextE) { return null; // throw new RuntimeException("Starting with altruist " + alt + ", found a vertex that did // not connect (dummy edge or otherwise) back to the altruist.\n" + // "Vertex: " + currentV + ", neighbors: " + pool.outgoingEdgesOf(currentV)); } } else { nextE = neighborSet.sampleWithoutReplacement(); } path.push(nextE); Vertex nextV = pool.getEdgeTarget(nextE); inPath.add(nextV); // If we're ending the chain, make a formal Cycle and return if (nextV.isAltruist() && nextV.equals(alt)) { // Add the discounted weight from this chain executing in its entirety discountedPathWeight += ((1.0 - nextE.getFailureProbability()) * pathSuccProb * rawPathWeight); break; } else { // Add discounted utility of chain goings to EXACTLY this edge and then failing (so \sum // weights * \prod success * (1-failure of this edge)) discountedPathWeight += (rawPathWeight * pathSuccProb * nextE.getFailureProbability()); // Probability of chain executing to very end (and maybe continuing) pathSuccProb *= (1.0 - nextE.getFailureProbability()); // We assume the chain gets this far, add edge to raw weight rawPathWeight += pool.getEdgeWeight(nextE); // We've hopped! currentV = nextV; } } while (true); // Construct a formal Cycle from the sampled path, and weight it accordingly if (!usingFailureProbabilities) { return Cycle.makeCycle(path, rawPathWeight); } else { return Cycle.makeCycle(path, discountedPathWeight); } }
/** * Outputs the set of graphs we used for the JAIR Revisions for the Multi-Organ paper * * @param args (ignored) */ public static void main(String[] args) { // Output all graphs with APD bimodal distribution (more conservative than the UNOS one) ProbabilityDistribution failureDist = ProbabilityDistribution.CONSTANT; double constantFailureRate = 0.2; // Where are the (unzipped, raw) UNOS files located? String basePath = IOUtil.getBaseUNOSFilePath(); // Generate draws from all UNOS match runs currently on the machine Random r = new Random(); // add a seed if you want UNOSGenerator gen = UNOSGenerator.makeAndInitialize(basePath, ',', r); IOUtil.dPrintln( "UNOS generator operating on #donors: " + gen.getDonors().size() + " and #recipients: " + gen.getRecipients().size()); // Iterate over tuples of (#pairs, kidney vs liver pct, and #kidney altruists) List<Integer> numPairsList = Arrays.asList(new Integer[] {6000}); // , 5122, 877}); List<Double> pctKidneyList = Arrays.asList(new Double[] {0.85377}); // , 1.0, 0.0}); List<Integer> numKidneyAltsList = Arrays.asList(new Integer[] {100}); // , 100, 0}); assert (numPairsList.size() == pctKidneyList.size()); assert (pctKidneyList.size() == numKidneyAltsList.size()); // Probability that a kidney donor is willing to give a liver to a liver patient List<Double> probKidneyToLiverList = Arrays.asList(new Double[] {0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0}); // Number of base graphs to generate; just doing 62 for now (for one 64-core Steamroller node) int numGraphReps = 32; for (int typeIdx = 0; typeIdx < numPairsList.size(); typeIdx++) { int numPairs = numPairsList.get(typeIdx); double pctKidney = pctKidneyList.get(typeIdx); int numKidneyAlts = numKidneyAltsList.get(typeIdx); for (int graphRep = 0; graphRep < numGraphReps; graphRep++) { IOUtil.dPrintln("Graph repetition group: " + graphRep + "/" + numGraphReps + "..."); // Want to use the same seed for each of the probKidneyToLiver graph generations, // but a new seed every time we loop through the entire list long seed = System.currentTimeMillis(); for (double probKidneyToLiver : probKidneyToLiverList) { r.setSeed(seed); IOUtil.dPrintln( "Looking at numPairs=" + numPairs + ", pctKidney=" + pctKidney + ", numKidneyAlts=" + numKidneyAlts, ", probKidneyToLiver=" + probKidneyToLiver); // Base output filename, leads to files ${baseOut}.input, ${baseOut}-details.input String baseOut = "unos_v" + numPairs + "_p" + pctKidney + "_a" + numKidneyAlts + "_f0.0_kl" + probKidneyToLiver + "_i" + graphRep; // Generates base pool: unit edge weights, no failure probabilities Pool pool = gen.generate(numPairs, numKidneyAlts); // Assign failure probabilities to edges (can be ignored by optimizer) FailureProbabilityUtil.setFailureProbability( pool, failureDist, r, constantFailureRate); // 0.9 = constant failure rate used for UNOS runs // Post-process: remove some edges from kidney donors to liver patients, depending // on the probability of the former giving to the latter if (pctKidney > 0.0) { DriverJAIRRevisions.removeKidneyToLiverEdges(pool, r, probKidneyToLiver, pctKidney); } // Write to .input and .input details file for C++ optimizer pool.writeToUNOSKPDFile(baseOut); } // graphRep } // probKidneyToLiver in probKidneyToLiverList } // typeIdx }