public static void writeReport() {
    // make sure collapsed call graph has been run
    CollaspedCallGraph.v();

    FileWriter fw;
    try {
      String name = "";
      if ("".equals(Config.v().appName)) {
        name += "android-app";
      } else {
        name += Config.v().appName;
      }

      String additionalInfo = Config.v().additionalInfo;
      if (!"".equals(additionalInfo)) {
        additionalInfo = "_" + additionalInfo.replaceAll(" ", "_");
      }

      String fileName =
          name + "_" + getConfiguration().replaceAll(" ", "_") + additionalInfo + "_pta-report.txt";

      fw = new FileWriter(Project.v().getOutputDir() + File.separator + fileName);

      // write configuration details
      fw.write("App Name: " + name + "\n");
      fw.write("Config: " + getConfiguration() + "\n");
      fw.write("Cmdline supplied extra info: " + Config.v().additionalInfo + "\n");

      fw.write(refinementStats.toString());

      // write final run of pta
      fw.write(SparkEvaluator.v().toString());

      // write total lines of code
      fw.write("\nTotal Reachable LOC: " + getReachableLines() + "\n\n");

      // write information flow
      fw.write(infoFlowResults());

      // fw.write(finegrainedFlowResults());

      fw.close();
    } catch (IOException e) {

    }
  }
  @Override
  protected void runInternal() {
    // don't print crap to screen!
    G.v().out = new PrintStream(NullOutputStream.NULL_OUTPUT_STREAM);
    Scene.v().loadDynamicClasses();

    setSparkPointsToAnalysis();

    // other passes can print crap now
    G.v().out = System.out;

    ptsProvider = (PAG) Scene.v().getPointsToAnalysis();

    typeManager = ptsProvider.getTypeManager();

    // cache the call graph
    callGraph = Scene.v().getCallGraph();

    createNewToAllocMap();

    /*
    for (SootMethod method : getReachableMethods()) {
        Set<MethodOrMethodContext> mcs = getMethodContexts(method);
        if (mcs.size() > 30)
            System.out.println(method + " " + mcs.size());
    }
     */

    // dumpReachablesAndAllocNodes();
    // dumpCallGraphReachablesCSV();
    // dumpOutdegreesCSV();

    if (Config.v().dumpPta) {
      dumpPTA(Project.v().getOutputDir() + File.separator + "pta.txt");
    }

    if (Config.v().dumpCallGraph) {
      // dumpCallGraph(Project.v().getOutputDir() + File.separator + "callgraph.dot");
      String fileName = String.format("callgraph%d.txt", runCount++);
      dumpTextGraph(Project.v().getOutputDir() + File.separator + fileName);
    }

    // System.out.println(SparkEvaluator.v().toString());
  }
  private static String getConfiguration() {
    StringBuffer buf = new StringBuffer();

    buf.append(Config.v().kobjsens + "-");

    if (Config.v().ignoreNoContextFlows) buf.append("ignore-no-context-flows ");

    if (Config.v().fullContextForStrings) buf.append("full-context-for-strings ");

    if (Config.v().fullContextForGUI) buf.append("full-context-for-gui ");

    if (Config.v().staticinitcontext) buf.append("static-init-context ");

    if (!Config.v().cloneStaticCalls) {
      buf.append("noclonestatics ");
    }

    if (!Config.v().addFallbackModeling) {
      buf.append("nofallback ");
    }

    return buf.toString();
  }
  /** Run context insensitive spark analysis. */
  void setSparkPointsToAnalysis() {
    logger.info("[spark] Starting analysis ...");

    HashMap<String, String> opt = new HashMap<String, String>();
    opt.put("enabled", "true");
    opt.put("verbose", "false");
    opt.put("ignore-types", "false");
    opt.put("force-gc", "false");
    opt.put("pre-jimplify", "false");
    opt.put("vta", "false");
    opt.put("rta", "false");
    opt.put("field-based", "false");

    opt.put("simulate-natives", "false");
    opt.put("simple-edges-bidirectional", "false");
    opt.put("on-fly-cg", "true");
    opt.put("simplify-offline", "false");
    opt.put("simplify-sccs", "false");
    opt.put("ignore-types-for-sccs", "false");
    opt.put("propagator", "worklist");
    opt.put("set-impl", "double");
    opt.put("double-set-old", "hybrid");
    opt.put("double-set-new", "hybrid");
    opt.put("dump-html", "false");
    opt.put("dump-pag", "false");
    opt.put("dump-solution", "false");
    opt.put("topo-sort", "false");
    opt.put("dump-types", "true");
    opt.put("class-method-var", "true");
    opt.put("dump-answer", "false");
    opt.put("add-tags", "false");
    opt.put("set-mass", "false");
    opt.put("types-for-sites", "false");

    opt.put("merge-stringbuffer", Boolean.toString(Config.v().impreciseStrings));
    opt.put("string-constants", "true");

    opt.put("kobjsens", Integer.toString(Config.v().kobjsens));

    opt.put("kobjsens-context-for-static-inits", Boolean.toString(Config.v().staticinitcontext));

    opt.put("kobjsens-no-context-list", buildNoContextList());

    opt.put("kobjsens-types-for-context", Boolean.toString(Config.v().typesForContext));

    opt.put("kobjsens-important-allocators", "");

    if (Config.v().extraArrayContext) opts.put("kobjsens-extra-array-context", "true");

    StringBuffer limitHeapContext = new StringBuffer();

    if (!Config.v().fullContextForGUI) {
      addGUIClasses(limitHeapContext);
    }

    if (!Config.v().fullContextForStrings) {
      addStringClasses(limitHeapContext);
    }

    logger.info("limit heap context list: {}", limitHeapContext.toString());
    opt.put("kobjsens-limit-heap-context", limitHeapContext.toString());

    // now overwrite options with options that are passed in
    for (Map.Entry<String, String> entry : opts.entrySet()) {
      opt.put(entry.getKey(), entry.getValue());
    }

    /*
    //some context sensitivity
    opt.put("cs-demand", "false");
    opt.put("lazy-pts", "true");
    opt.put("passes", "10");
    opt.put("traversal", "75000");
     */

    // set k for the run...
    k = Integer.parseInt(opt.get("kobjsens"));

    SparkTransformer.v().transform("", opt);

    logger.info("[spark] Done!");
  }