/** Discard all context sensitive tuples which are covered by insensitive ones */
 @Override
 public void drop_duplicates() {
   for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext(); ) {
     HeapInsIntervalManager im = pt_objs.get(it.next());
     im.remove_useless_intervals();
   }
 }
  /** Remember to clean the is_new flag */
  @Override
  public void do_after_propagation() {
    for (HeapInsIntervalManager im : new_pts.values()) {
      im.flush();
    }

    new_pts = new HashMap<AllocNode, HeapInsIntervalManager>();
  }
  @Override
  public boolean add_simple_constraint_3(IVarAbstraction qv, long I1, long I2, long L) {
    HeapInsIntervalManager im = flowto.get(qv);

    if (im == null) {
      im = new HeapInsIntervalManager();
      flowto.put((HeapInsNode) qv, im);
    }

    return im.add_new_interval(I1, I2, L) != null;
  }
  @Override
  public boolean add_points_to_3(AllocNode obj, long I1, long I2, long L) {
    HeapInsIntervalManager im = pt_objs.get(obj);

    if (im == null) {
      im = new HeapInsIntervalManager();
      pt_objs.put(obj, im);
    }

    SegmentNode p = im.add_new_interval(I1, I2, L);
    if (p != null) {
      new_pts.put(obj, im);
      return true;
    }

    return false;
  }
  @Override
  public void reconstruct() {
    new_pts = new HashMap<AllocNode, HeapInsIntervalManager>();

    if (complex_cons != null) complex_cons.clear();

    if (flowto != null) {
      for (HeapInsIntervalManager him : flowto.values()) {
        him.clear();
      }
    }

    if (pt_objs != null) {
      for (HeapInsIntervalManager him : pt_objs.values()) {
        him.clear();
      }
    }
  }
 private void do_flow_edge_interval_merge() {
   for (HeapInsIntervalManager him : flowto.values()) {
     if (him.isThereUnprocessedObject()) him.merge_flow_edges();
   }
 }
 /** Merge the context sensitive tuples, and make a single insensitive tuple */
 private void do_pts_interval_merge() {
   for (HeapInsIntervalManager him : new_pts.values()) {
     him.merge_points_to_tuples(compact_budget_rep);
   }
 }
 private SegmentNode[] find_points_to(AllocNode obj) {
   HeapInsIntervalManager im = pt_objs.get(obj);
   if (im == null) return null;
   return im.get_intervals();
 }
 // ---------------------------------Private Functions----------------------------------------
 private SegmentNode[] find_flowto(HeapInsNode qv) {
   HeapInsIntervalManager im = flowto.get(qv);
   if (im == null) return null;
   return im.get_intervals();
 }
  /** An efficient implementation of differential propagation. */
  @Override
  public void propagate(GeomPointsTo ptAnalyzer, IWorklist worklist) {
    int i, j;
    AllocNode obj;
    SegmentNode pts, pe, int_entry1[], int_entry2[];
    HeapInsIntervalManager him;
    HeapInsNode qn, objn;
    boolean added, has_new_edges;

    // We first build the new flow edges via the field dereferences
    if (complex_cons != null) {
      for (Map.Entry<AllocNode, HeapInsIntervalManager> entry : new_pts.entrySet()) {
        obj = entry.getKey();
        int_entry1 = entry.getValue().get_intervals();

        for (PlainConstraint pcons : complex_cons) {
          // Construct the two variables in assignment
          objn = (HeapInsNode) ptAnalyzer.findAndInsertInstanceField(obj, pcons.f);
          qn = (HeapInsNode) pcons.otherSide;

          for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
            pts = int_entry1[i];
            while (pts != null && pts.is_new) {
              switch (pcons.type) {
                case GeomPointsTo.STORE_CONS:
                  // Store, qv -> pv.field
                  // pts.I2 may be zero, pts.L may be less than zero
                  if (qn.add_simple_constraint_3(
                      objn,
                      pcons.code == GeomPointsTo.ONE_TO_ONE ? pts.I1 : 0,
                      pts.I2,
                      pts.L < 0 ? -pts.L : pts.L)) worklist.push(qn);
                  break;

                case GeomPointsTo.LOAD_CONS:
                  // Load, pv.field -> qv
                  if (objn.add_simple_constraint_3(
                      qn,
                      pts.I2,
                      pcons.code == GeomPointsTo.ONE_TO_ONE ? pts.I1 : 0,
                      pts.L < 0 ? -pts.L : pts.L)) worklist.push(objn);
                  break;

                default:
                  throw new RuntimeException("Wrong Complex Constraint");
              }

              pts = pts.next;
            }
          }
        }
      }
    }

    for (Map.Entry<HeapInsNode, HeapInsIntervalManager> entry1 : flowto.entrySet()) {
      // Second get the flow-to intervals
      added = false;
      qn = entry1.getKey();
      him = entry1.getValue();
      int_entry2 = him.get_intervals();
      has_new_edges = him.isThereUnprocessedObject();
      Map<AllocNode, HeapInsIntervalManager> objs = (has_new_edges ? pt_objs : new_pts);

      for (Map.Entry<AllocNode, HeapInsIntervalManager> entry2 : objs.entrySet()) {
        // First get the points-to intervals
        obj = entry2.getKey();
        if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getWrappedNode().getType())) continue;

        int_entry1 = entry2.getValue().get_intervals();

        // We pair up all the interval points-to tuples and interval flow edges
        for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
          pts = int_entry1[i];
          while (pts != null) {
            if (!has_new_edges && !pts.is_new) break;

            for (j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
              pe = int_entry2[j];
              while (pe != null) {
                if (pts.is_new || pe.is_new) {
                  // Propagate this object
                  if (add_new_points_to_tuple(pts, pe, obj, qn)) added = true;
                } else break;

                pe = pe.next;
              }
            }

            pts = pts.next;
          }
        }
      }

      if (added) worklist.push(qn);

      // Now, we clean the new edges if necessary
      if (has_new_edges) {
        him.flush();
      }
    }
  }