@Override public List<IBool> checkConsistency(IModel model) { if (!lazy_scheduling) { return Collections.emptyList(); } else { List<IBool> constraints = new ArrayList<>(); IConstraintNetwork network = solver.getConstraintNetwork(); final Map<IObject, Collection<IFormula>> formulas = getFormulas(model); IStaticCausalGraph causal_graph = solver.getStaticCausalGraph(); if (!closed_batteries && solver .getCurrentNode() .getFlaws() .stream() .map( flaw -> { if (flaw instanceof IGoal) { return causal_graph.getNode(((IGoal) flaw).getFormula().getType()); } else if (flaw instanceof IFact) { return causal_graph.getNode(((IFact) flaw).getFormula().getType()); } else if (flaw instanceof IDisjunctionFlaw) { return causal_graph.getNode(((IDisjunctionFlaw) flaw).getDisjunction()); } else if (flaw instanceof IPreferenceFlaw) { return causal_graph.getNode(((IPreferenceFlaw) flaw).getPreference()); } else { throw new AssertionError( "Flaw " + flaw.getClass().getName() + " is supported yet.."); } }) .noneMatch( node -> causal_graph.existsPath(node, causal_graph.getNode(charge_predicate)) || causal_graph.existsPath( node, causal_graph.getNode(consume_predicate)))) { // We need a resolver in order to re-open the resource when backtracking solver .getCurrentNode() .addResolver( new IResolver() { private boolean resolved = false; @Override public double getKnownCost() { return 0; } @Override public void resolve() { assert !resolved; // Let's close the batteries closed_batteries = true; resolved = true; } @Override public boolean isResolved() { return resolved; } @Override public void retract() { assert resolved; closed_batteries = false; resolved = false; } }); } if (closed_batteries) { instances.forEach( battery -> { Collection<IFormula> c_formulas = formulas.get(battery); BatteryTimeline timeline = new BatteryTimeline(solver, model, battery, c_formulas); for (int i = 0; i < timeline.values.size(); i++) { // <editor-fold defaultstate="collapsed" desc="battery overcharge"> if (model.evaluate( network.gt(timeline.values.get(i).max_amount, timeline.capacity))) { // We have a battery overcharge so we need to anticipate consumptions to charges Collection<IFormula> good_charges = new ArrayList<>(c_formulas.size()); Collection<IFormula> good_consumptions = new ArrayList<>(c_formulas.size()); for (IFormula f : c_formulas) { switch (f.getType().getName()) { case CHARGE_PREDICATE_NAME: if (model.evaluate( network.leq( (INumber) f.get(Constants.START), network.newReal(timeline.pulses.get(i).toString())))) { // Charges that affect current overcharge are all those that start before // this timeline value good_charges.add(f); } break; case CONSUME_PREDICATE_NAME: if (model.evaluate( network.geq( (INumber) f.get(Constants.END), network.newReal(timeline.pulses.get(i + 1).toString())))) { // Consumptions that might resolve the current overcharge are all those // that end after this timeline value good_consumptions.add(f); } break; default: throw new AssertionError(f.getType().getName()); } } List<IBool> or = new ArrayList<>(good_charges.size() * good_consumptions.size()); good_consumptions.forEach( (cons) -> { good_charges.forEach( (charge) -> { or.add( network.leq( cons.get(Constants.END), charge.get(Constants.START))); or.add(network.not(cons.getScope().eq(charge.getScope()))); }); }); or.add(network.geq(battery.get(CAPACITY), timeline.values.get(i).max_amount)); constraints.add(network.or(or.toArray(new IBool[or.size()]))); } // </editor-fold> // <editor-fold defaultstate="collapsed" desc="battery overconsumption"> if (model.evaluate( network.lt(timeline.values.get(i).min_amount, network.newReal("0")))) { // We have a battery overconsumption so we need to anticipate charges to // consumption Collection<IFormula> good_charges = new ArrayList<>(c_formulas.size()); Collection<IFormula> good_consumptions = new ArrayList<>(c_formulas.size()); for (IFormula f : c_formulas) { switch (f.getType().getName()) { case CHARGE_PREDICATE_NAME: if (model.evaluate( network.geq( (INumber) f.get(Constants.END), network.newReal(timeline.pulses.get(i + 1).toString())))) { // Charges that might resolve the current overconsumption are all those // that end after this timeline value good_charges.add(f); } break; case CONSUME_PREDICATE_NAME: if (model.evaluate( network.leq( (INumber) f.get(Constants.START), network.newReal(timeline.pulses.get(i).toString())))) { // Consumptions that affect current overconsumption are all those that // start before this timeline value good_consumptions.add(f); } break; default: throw new AssertionError(f.getType().getName()); } } List<IBool> or = new ArrayList<>(good_charges.size() * good_consumptions.size()); good_consumptions.forEach( (cons) -> { good_charges.forEach( (charge) -> { or.add( network.leq( charge.get(Constants.END), cons.get(Constants.START))); or.add(network.not(charge.getScope().eq(cons.getScope()))); }); }); or.add(network.leq(network.newReal("0"), timeline.values.get(i).min_amount)); constraints.add(network.or(or.toArray(new IBool[or.size()]))); } // </editor-fold> } if (timeline.values.isEmpty() ? model.evaluate(network.not(timeline.initial_amount.eq(timeline.final_amount))) : model.evaluate( network.not( timeline .values .get(timeline.values.size() - 1) .final_amount .eq(timeline.final_amount)))) { // The initial amount plus the sum of charges and consumptions is not equal to the // final amount final List<INumber> sum = new ArrayList<>(c_formulas.size() + 1); sum.add(battery.get(INITIAL_AMOUNT)); sum.addAll( c_formulas .stream() .map( f -> { switch (f.getType().getName()) { case CHARGE_PREDICATE_NAME: return f.get(C_AMOUNT); case CONSUME_PREDICATE_NAME: return network.negate((INumber) f.get(AMOUNT)); default: throw new AssertionError(f.getType().getName()); } }) .collect(Collectors.toList())); final List<IBool> or = new ArrayList<>(); c_formulas.forEach( formula -> { or.add(network.not(battery.eq(formula.getScope()))); }); instances .stream() .filter(instance -> (instance != battery)) .forEach( instance -> { formulas .get(instance) .forEach( formula -> { or.add(battery.eq(formula.getScope())); }); }); if (sum.size() == 1) { or.add(network.eq(sum.get(0), (INumber) battery.get(FINAL_AMOUNT))); } else { or.add( network.eq( network.add(sum.toArray(new INumber[sum.size()])), (INumber) battery.get(FINAL_AMOUNT))); } constraints.add(network.or(or.toArray(new IBool[or.size()]))); } }); } return constraints; } }
@Override public void extractLandmarks() { candidates.clear(); landmarks.clear(); rpgs.clear(); Set<IStaticCausalGraph.INode> nodes = causal_graph.getNodes().stream().collect(Collectors.toSet()); // We define the initial state .. Set<IStaticCausalGraph.IPredicateNode> init_state = nodes .stream() .filter(node -> node instanceof IStaticCausalGraph.IPredicateNode) .map(node -> (IStaticCausalGraph.IPredicateNode) node) .flatMap( predicate -> predicate .getPredicate() .getInstances() .stream() .map(instance -> (IFormula) instance) .filter(formula -> formula.getFormulaState() == FormulaState.Active)) .map(formula -> causal_graph.getNode(formula.getType())) .collect(Collectors.toSet()); // .. and the goal state Set<IStaticCausalGraph.IPredicateNode> goals = nodes .stream() .filter(node -> node instanceof IStaticCausalGraph.IPredicateNode) .map(node -> (IStaticCausalGraph.IPredicateNode) node) .flatMap( predicate -> predicate .getPredicate() .getInstances() .stream() .map(instance -> (IFormula) instance) .filter(formula -> formula.getFormulaState() == FormulaState.Inactive) .map(formula -> causal_graph.getNode(formula.getType())) .filter(node -> !init_state.contains(node))) .collect(Collectors.toSet()); // We add high level goals to initial landmark candidates goals .stream() .filter(node -> !init_state.contains(node)) .forEach(node -> candidates.add(new Landmark(node))); // Main landmark extraction procedure loop while (!candidates.isEmpty()) { // The landmark candidate to analyze ILandmark candidate = candidates.stream().findAny().get(); // We remove the landmark candidate from the candidates.. candidates.remove(candidate); // .. and we add it to the landmarks landmarks.put(candidate, new HashSet<>()); // These are the (disjunctive) causal preconditions of the first achievers.. Set<Set<IStaticCausalGraph.INode>> first_achievers_preconditions = new HashSet<>(); candidate .getNodes() .forEach(node -> first_achievers_preconditions.addAll(getPreconditions(node))); // We compute the relaxed planning graph excluding the candidate .. RelaxedPlanningGraph rpg = new RelaxedPlanningGraph(solver, candidate.getNodes()); rpg.extract(); rpg.propagate(); rpgs.put(candidate, rpg); // .. and extract the causal preconditions of the first achievers according to the relaxed // planning graph // specifically, we remove those causal preconditions which are not reachable according to the // relaxed planning graph without the candidate first_achievers_preconditions.removeIf( preconditions -> preconditions.stream().anyMatch(pre -> Double.isInfinite(rpg.level(pre)))); // We compute the intersection of the preconditions (all of them must be true..) Set<IStaticCausalGraph.INode> intersection = new HashSet<>(first_achievers_preconditions.stream().findAny().get()); first_achievers_preconditions.forEach( conjunction -> { intersection.retainAll(conjunction); }); intersection.removeIf(node -> init_state.contains(node)); if (!intersection.isEmpty()) { // We remove from candidates those which are strictly worst than the current ones.. candidates.removeIf( c -> c.getNodes().size() > 1 && intersection.stream().anyMatch(lm -> c.getNodes().contains(lm))); // We remove from landmarks those which are strictly worst than the current ones.. landmarks .entrySet() .removeIf( c -> c.getKey().getNodes().size() > 1 && intersection .stream() .anyMatch(lm -> c.getKey().getNodes().contains(lm))); // We add new candidates to candidates (if they are not already in candidates nor in // landmarks..) intersection .stream() .filter( node -> candidates.stream().noneMatch(c -> c.getNodes().contains(node)) && landmarks .entrySet() .stream() .noneMatch(c -> c.getKey().getNodes().contains(node))) .forEach(node -> candidates.add(new Landmark(node))); } // We compute a disjunctive landmark with oddments.. (at least one of them must be true..) Set<IStaticCausalGraph.INode> symmetric_difference = first_achievers_preconditions .stream() .flatMap( conjunction -> conjunction.stream().filter(node -> !intersection.contains(node))) .collect(Collectors.toSet()); if (!symmetric_difference.isEmpty() && symmetric_difference.stream().noneMatch(node -> init_state.contains(node))) { // We remove from candidates those which are strictly worst than the current disjunctive // landmark.. candidates.removeIf( c -> c.getNodes().containsAll(symmetric_difference) && c.getNodes().size() > symmetric_difference.size()); // We remove from landmarks those which are strictly worst than the current disjunctive // landmark.. landmarks .entrySet() .removeIf( c -> c.getKey().getNodes().containsAll(symmetric_difference) && c.getKey().getNodes().size() > symmetric_difference.size()); // We consider the current disjunctive landmark only if there are not better candidates nor // better landmarks.. if (candidates.stream().noneMatch(c -> symmetric_difference.containsAll(c.getNodes())) && landmarks .entrySet() .stream() .noneMatch(c -> symmetric_difference.containsAll(c.getKey().getNodes()))) { // We can add the disjunctive landmark to candidates candidates.add(new Landmark(symmetric_difference)); } } } // We compute natural orders between landmarks.. ILandmark[] lms = landmarks.keySet().stream().toArray(ILandmark[]::new); for (int i = 0; i < lms.length; i++) { if (lms[i].getNodes().size() == 1) { // We extract orderings between unary landmarks.. RelaxedPlanningGraph c_rpg = rpgs.get(lms[i]); for (int j = i + 1; j < lms.length; j++) { if (lms[i].getNodes().size() == 1 && lms[j] .getNodes() .stream() .allMatch(node -> Double.isInfinite(c_rpg.level(node)))) { // there is a natural order between landmarks lms[i] and lms[j] landmarks.get(lms[i]).add(lms[j]); } } } } }