Exemplo n.º 1
0
  @Override
  public boolean test_points_to_has_types(Set<Type> types) {
    for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext(); ) {
      AllocNode an = it.next();
      if (types.contains(an.getType())) {
        return true;
      }
    }

    return false;
  }
Exemplo n.º 2
0
  protected void assignObjectToImpl(ReferenceVariable lhs, AbstractObject obj) {
    AllocNode objNode =
        pag.makeAllocNode(new Pair("AbstractObject", obj.getType()), obj.getType(), null);

    VarNode var;
    if (lhs instanceof FieldRefNode) {
      var = pag.makeGlobalVarNode(objNode, objNode.getType());
      pag.addEdge((Node) lhs, var);
    } else {
      var = (VarNode) lhs;
    }
    pag.addEdge(objNode, var);
  }
Exemplo n.º 3
0
 @Override
 public void print_context_sensitive_points_to(PrintStream outPrintStream) {
   for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext(); ) {
     AllocNode obj = it.next();
     SegmentNode[] int_entry = find_points_to(obj);
     for (int j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
       SegmentNode p = int_entry[j];
       while (p != null) {
         outPrintStream.println(
             "(" + obj.toString() + ", " + p.I1 + ", " + p.I2 + ", " + p.L + ")");
         p = p.next;
       }
     }
   }
 }
Exemplo n.º 4
0
  private void countNode(Map<SootClass, Integer> nodeCount, AllocNode node) {
    SootClass clz = null;
    if (node.getType() instanceof RefType) {
      clz = ((RefType) node.getType()).getSootClass();
    } else if (node.getType() instanceof ArrayType
        && ((ArrayType) node.getType()).getArrayElementType() instanceof RefType) {
      clz = ((RefType) ((ArrayType) node.getType()).getArrayElementType()).getSootClass();
    }

    if (clz != null) {
      if (!nodeCount.containsKey(clz)) {
        nodeCount.put(clz, 0);
      }

      nodeCount.put(clz, nodeCount.get(clz) + 1);
    }
  }
Exemplo n.º 5
0
  /** Create the bi map of NewExpr <-> AllocNode */
  private void createNewToAllocMap() {
    newToAllocNodeMap = HashBiMap.create();
    allAllocNodes = new LinkedHashSet<AllocNode>();

    Map<SootClass, Integer> nodeCount = new LinkedHashMap<SootClass, Integer>();

    int realSize = 0;

    for (AllocNode node : ptsProvider.getAllocNodes()) {
      if (!(node instanceof InsensitiveAllocNode)) {
        logger.error("Found non-insensitive node in ptsProvider.getAllocNodes()");
        droidsafe.main.Main.exit(1);
      }

      InsensitiveAllocNode insNode = (InsensitiveAllocNode) node;

      newToAllocNodeMap.put(node.getNewExpr(), insNode);
      realSize++;
      allAllocNodes.add(node);

      // countNode(nodeCount, node);

      for (Map.Entry<Context, ObjectSensitiveAllocNode> entry :
          insNode.getContextNodeMap().entrySet()) {
        allAllocNodes.add(entry.getValue());
        // countNode(nodeCount, node);
      }
    }

    System.out.println("Alloc node size (insensitive objects): " + realSize);

    /* used to print a sorted list of alloc nodes created
    Map<SootClass, Integer> sortedNodeCount = SootUtils.sortByValue(nodeCount);
    for (Map.Entry<SootClass, Integer> entry : sortedNodeCount.entrySet()) {
        System.out.println(entry.getValue() + " " + entry.getKey());
    }
     */
  }
Exemplo n.º 6
0
  /** An efficient implementation of differential propagation. */
  @Override
  public void propagate(GeomPointsTo ptAnalyzer, IWorklist worklist) {
    int i, j;
    AllocNode obj;
    SegmentNode pts, pe, int_entry1[], int_entry2[];
    HeapInsIntervalManager him;
    HeapInsNode qn, objn;
    boolean added, has_new_edges;

    // We first build the new flow edges via the field dereferences
    if (complex_cons != null) {
      for (Map.Entry<AllocNode, HeapInsIntervalManager> entry : new_pts.entrySet()) {
        obj = entry.getKey();
        int_entry1 = entry.getValue().get_intervals();

        for (PlainConstraint pcons : complex_cons) {
          // Construct the two variables in assignment
          objn = (HeapInsNode) ptAnalyzer.findAndInsertInstanceField(obj, pcons.f);
          qn = (HeapInsNode) pcons.otherSide;

          for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
            pts = int_entry1[i];
            while (pts != null && pts.is_new) {
              switch (pcons.type) {
                case GeomPointsTo.STORE_CONS:
                  // Store, qv -> pv.field
                  // pts.I2 may be zero, pts.L may be less than zero
                  if (qn.add_simple_constraint_3(
                      objn,
                      pcons.code == GeomPointsTo.ONE_TO_ONE ? pts.I1 : 0,
                      pts.I2,
                      pts.L < 0 ? -pts.L : pts.L)) worklist.push(qn);
                  break;

                case GeomPointsTo.LOAD_CONS:
                  // Load, pv.field -> qv
                  if (objn.add_simple_constraint_3(
                      qn,
                      pts.I2,
                      pcons.code == GeomPointsTo.ONE_TO_ONE ? pts.I1 : 0,
                      pts.L < 0 ? -pts.L : pts.L)) worklist.push(objn);
                  break;

                default:
                  throw new RuntimeException("Wrong Complex Constraint");
              }

              pts = pts.next;
            }
          }
        }
      }
    }

    for (Map.Entry<HeapInsNode, HeapInsIntervalManager> entry1 : flowto.entrySet()) {
      // Second get the flow-to intervals
      added = false;
      qn = entry1.getKey();
      him = entry1.getValue();
      int_entry2 = him.get_intervals();
      has_new_edges = him.isThereUnprocessedObject();
      Map<AllocNode, HeapInsIntervalManager> objs = (has_new_edges ? pt_objs : new_pts);

      for (Map.Entry<AllocNode, HeapInsIntervalManager> entry2 : objs.entrySet()) {
        // First get the points-to intervals
        obj = entry2.getKey();
        if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getWrappedNode().getType())) continue;

        int_entry1 = entry2.getValue().get_intervals();

        // We pair up all the interval points-to tuples and interval flow edges
        for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
          pts = int_entry1[i];
          while (pts != null) {
            if (!has_new_edges && !pts.is_new) break;

            for (j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
              pe = int_entry2[j];
              while (pe != null) {
                if (pts.is_new || pe.is_new) {
                  // Propagate this object
                  if (add_new_points_to_tuple(pts, pe, obj, qn)) added = true;
                } else break;

                pe = pe.next;
              }
            }

            pts = pts.next;
          }
        }
      }

      if (added) worklist.push(qn);

      // Now, we clean the new edges if necessary
      if (has_new_edges) {
        him.flush();
      }
    }
  }