Beispiel #1
0
  private static void startCDC() throws IOException, Exception {
    long remainder = 7;
    for (int i = startBoundary; i <= endBoundary; i += increment) {
      long minBoundary = min_multiplier * i; // we will set the mod value as the minimum boundary
      long maxBoundary = max_multiplier * i; // we will set this as the maximum boundary
      long divisor1 = i; // this will be used to mod the results
      long divisor2 = i / 2 + 1; // the backup divisor is half the original divisor
      long divisor3 = i / 4 + 1;
      totalSize =
          fileArray.get(1)
              .length; // note we only care about the size of the second file since that's the file
                       // we are measuring
      System.out.print(divisor1 + " " + divisor2 + " " + divisor3 + " ");
      runBytes(
          window,
          divisor1,
          divisor2,
          divisor3,
          remainder,
          minBoundary,
          maxBoundary); // run the karb rabin algorithm
      // this is the block size per boundary
      double blockSize = (double) totalSize / (double) numOfPieces;
      double ratio = (double) coverage / (double) totalSize;
      System.out.println(blockSize + " " + ratio);

      // clear the hashTable, and counters so we can reset the values for the next round of
      // boundaries
      coverage = 0;
      numOfPieces = 0;
      table.clear();
      HashClass.duplicate_counter = 0;
      HashClass.max_list_length = 0;
    }
  }
Beispiel #2
0
  public static int close() {
    int count = 0;

    Iterator iterator = m_notUsedConnection.iterator();
    while (iterator.hasNext()) {
      try {
        ((ConnectionWrapper) iterator.next()).close();
        count++;
      } catch (Exception e) {
      }
    }
    m_notUsedConnection.clear();

    iterator = m_usedUsedConnection.iterator();
    while (iterator.hasNext()) {
      try {
        ConnectionWrapper wrapper = (ConnectionWrapper) iterator.next();
        wrapper.close();
        if (DEBUG) {
          wrapper.debugInfo.printStackTrace();
        }
        count++;
      } catch (Exception e) {
      }
    }
    m_usedUsedConnection.clear();

    return count;
  }
Beispiel #3
0
  /*
  	- Overloaded method just for the internet archive dataset
  	- The first two params hold the block size and ratioSize respectively (for all the runnings)
  	- The last set of params are the actual file in byte and the hashed versions of the file we will be running the code against
  	- current_ -- are the lists that contain the most recent file version
  	- previous_ -- are the listrs that contain the previous versions
  */
  private static void startCDC(
      double[] block_size_list,
      double[] ratio_size_list,
      byte[] current_array,
      byte[] previous_array,
      ArrayList<Long> current_md5Hashes,
      ArrayList<Long> previous_md5Hashes)
      throws Exception {
    long remainder = 7; // this is the remainder that we will be comparing with
    int index = 0; // used to traverse the two lists
    for (int i = startBoundary; i <= endBoundary; i += increment) {
      long minBoundary = min_multiplier * i; // we will set the mod value as the minimum boundary
      long maxBoundary = max_multiplier * i; // we will set this as the maximum boundary
      long divisor1 = i; // this will be used to mod the results
      long divisor2 = i / 2 + 1; // the backup divisor is half the original divisor
      long divisor3 = i / 4 + 1;
      // System.out.print( i+" ");
      storeChunks(
          previous_array,
          previous_md5Hashes,
          divisor1,
          divisor2,
          divisor3,
          remainder,
          minBoundary,
          maxBoundary); // cut up the first file and store it
      runTddd(
          current_array,
          current_md5Hashes,
          divisor1,
          divisor2,
          divisor3,
          remainder,
          minBoundary,
          maxBoundary); // call the method again, but on the second file only
      // this is the block size per boundary
      double blockSize = (double) totalSize / (double) numOfPieces;
      double ratio = (double) coverage / (double) totalSize;

      // extra step, add the data back into the list
      block_size_list[index] += blockSize;
      ratio_size_list[index] += ratio;
      ++index;
      // clear the hashTable, and counters so we can reset the values for the next round of
      // boundaries
      coverage = 0;
      numOfPieces = 0;
      table.clear();
      HashClass.duplicate_counter = 0;
      HashClass.max_list_length = 0;
    }
  }
Beispiel #4
0
 public boolean hashSetContainsSameOperations(HashSet set1, HashSet set2) {
   boolean result = false;
   HashSet s1 = (HashSet) set1.clone();
   HashSet s2 = (HashSet) set2.clone();
   // first part, are all elements of s1 also in s2?
   boolean one = false;
   Iterator it = set1.iterator();
   while (it.hasNext()) {
     PDMOperation d = (PDMOperation) it.next();
     if (s2.contains(d)) {
       s1.remove(d);
     }
   }
   if (s1.isEmpty()) {
     one = true;
   }
   // second part, are all elements of s21 also in s1?
   boolean two = false;
   HashSet s3 = (HashSet) set1.clone();
   HashSet s4 = (HashSet) set2.clone();
   Iterator it2 = set2.iterator();
   while (it2.hasNext()) {
     PDMOperation d = (PDMOperation) it2.next();
     if (s3.contains(d)) {
       s4.remove(d);
     }
   }
   if (s4.isEmpty()) {
     two = true;
   }
   // administrative stuff
   s1.clear();
   s2.clear();
   s3.clear();
   s4.clear();
   result = one && two;
   return result;
 }
Beispiel #5
0
  /* process unification constraints */
  protected void process(UnifyConstraint c) {
    Node n1 = c.node1.getRep();
    Node n2 = c.node2.getRep();
    assert (n1.graph == this);
    assert (n2.graph == this);
    if (n1 == n2) return;

    if (heapfix && n2.isheap && !n1.isheap) {
      Node sw = n1;
      n1 = n2;
      n2 = sw;
    }

    n1.mergeTags(n2);
    n1.mergeFlags(n2);
    n2.parent = n1;
    n2.tag = null;
    n2.region = null;

    if (secondary_index[0]) {
      for (FieldEdge e : n2.outfields.values()) {
        assert (e.src == n2);
        if (e.dst == n2) w.addLast(new FieldConstraint(n1, n1, e.field));
        else {
          w.addLast(new FieldConstraint(n1, e.dst, e.field));
          e.dst.infields.get(e.field).remove(e);
        }
        fedges.get(e.field).remove(e);
      }

      for (Field f : n2.infields.keySet())
        for (FieldEdge e : n2.infields.get(f)) {
          assert (e.dst == n2);
          e.dst = n1;
          addInField(n1, f, e);
        }

      n2.outfields = null;
      n2.infields = null;
    } else {
      HashSet<FieldEdge> fremove = new HashSet<FieldEdge>();
      for (Field f : fedges.keySet()) {
        Collection<FieldEdge> fed = fedges.get(f);
        int size = fed.size();
        fremove.clear();
        for (FieldEdge e : fed) {
          if (e.src == n2) {
            if (e.dst == n2) w.addLast(new FieldConstraint(n1, n1, e.field));
            else w.addLast(new FieldConstraint(n1, e.dst, e.field));
          } else {
            if (e.dst == n2) e.dst = n1;
            continue;
          }
          if (!fremove.contains(e)) {
            fremove.add(e);
            size--;
          }
        }
        fed.removeAll(fremove);
        assert (fed.size() == size);
      }
    }

    HashSet<CallEdge> cremove = new HashSet<CallEdge>();
    for (Call ce : cedges.keySet()) {
      Collection<CallEdge> ced = cedges.get(ce);
      cremove.clear();
      int size = ced.size();
      for (CallEdge e : ced) {
        if (e.dst == n2) {
          if (e.src == n2) w.addLast(new CallConstraint(n1, n1, e.call));
          else w.addLast(new CallConstraint(e.src, n1, e.call));
        } else {
          if (e.src == n2) e.src = n1;
          continue;
        }
        if (!cremove.contains(e)) {
          cremove.add(e);
          size--;
        }
      }
      ced.removeAll(cremove);
      assert (ced.size() == size);
    }
  }
  /**
   * This does the work of main, but it never calls System.exit, so it is appropriate to be called
   * progrmmatically. Termination of the program with a message to the user is indicated by throwing
   * Daikon.TerminationMessage.
   *
   * @see #main(String[])
   * @see daikon.Daikon.TerminationMessage
   */
  public static void mainHelper(final String[] args)
      throws FileNotFoundException, StreamCorruptedException, OptionalDataException, IOException,
          ClassNotFoundException {
    daikon.LogHelper.setupLogs(daikon.LogHelper.INFO);

    LongOpt[] longopts =
        new LongOpt[] {
          new LongOpt(Daikon.config_option_SWITCH, LongOpt.REQUIRED_ARGUMENT, null, 0),
          new LongOpt(output_SWITCH, LongOpt.REQUIRED_ARGUMENT, null, 0),
          new LongOpt(dir_SWITCH, LongOpt.REQUIRED_ARGUMENT, null, 0),
          new LongOpt(conf_SWITCH, LongOpt.NO_ARGUMENT, null, 0),
          new LongOpt(filter_SWITCH, LongOpt.NO_ARGUMENT, null, 0),
          new LongOpt(Daikon.debugAll_SWITCH, LongOpt.NO_ARGUMENT, null, 0),
          new LongOpt(Daikon.debug_SWITCH, LongOpt.REQUIRED_ARGUMENT, null, 0),
          new LongOpt(Daikon.ppt_regexp_SWITCH, LongOpt.REQUIRED_ARGUMENT, null, 0),
          new LongOpt(Daikon.track_SWITCH, LongOpt.REQUIRED_ARGUMENT, null, 0),
        };
    Getopt g = new Getopt("daikon.tools.InvariantChecker", args, "h", longopts);
    int c;
    while ((c = g.getopt()) != -1) {
      switch (c) {
        case 0:
          // got a long option
          String option_name = longopts[g.getLongind()].getName();
          if (Daikon.help_SWITCH.equals(option_name)) {
            System.out.println(usage);
            throw new Daikon.TerminationMessage();
          } else if (conf_SWITCH.equals(option_name)) {
            doConf = true;
          } else if (filter_SWITCH.equals(option_name)) {
            doFilter = true;
          } else if (dir_SWITCH.equals(option_name)) {
            dir_file = new File(g.getOptarg());
            if (!dir_file.exists() || !dir_file.isDirectory())
              throw new Daikon.TerminationMessage("Error reading the directory " + dir_file);

          } else if (output_SWITCH.equals(option_name)) {
            output_file = new File(g.getOptarg());
            output_stream = new PrintStream(new FileOutputStream(output_file));
          } else if (Daikon.config_option_SWITCH.equals(option_name)) {
            String item = g.getOptarg();
            daikon.config.Configuration.getInstance().apply(item);
            break;
          } else if (Daikon.debugAll_SWITCH.equals(option_name)) {
            Global.debugAll = true;
          } else if (Daikon.debug_SWITCH.equals(option_name)) {
            LogHelper.setLevel(g.getOptarg(), LogHelper.FINE);
          } else if (Daikon.track_SWITCH.equals(option_name)) {
            LogHelper.setLevel("daikon.Debug", LogHelper.FINE);
            String error = Debug.add_track(g.getOptarg());
            if (error != null) {
              throw new Daikon.TerminationMessage(
                  "Error parsing track argument '" + g.getOptarg() + "' - " + error);
            }
          } else {
            throw new RuntimeException("Unknown long option received: " + option_name);
          }
          break;
        case 'h':
          System.out.println(usage);
          throw new Daikon.TerminationMessage();
        case '?':
          break; // getopt() already printed an error
        default:
          System.out.println("getopt() returned " + c);
          break;
      }
    }

    // Loop through each filename specified
    for (int i = g.getOptind(); i < args.length; i++) {

      // Get the file and make sure it exists
      File file = new File(args[i]);
      if (!file.exists()) {
        throw new Error("File " + file + " not found.");
      }

      // These aren't "endsWith()" because there might be a suffix on the end
      // (eg, a date).
      String filename = file.toString();
      if (filename.indexOf(".inv") != -1) {
        if (inv_file != null) {
          throw new Daikon.TerminationMessage("multiple inv files specified", usage);
        }
        inv_file = file;
      } else if (filename.indexOf(".dtrace") != -1) {
        dtrace_files.add(filename);
      } else {
        throw new Error("Unrecognized argument: " + file);
      }
    }
    if (dir_file == null) {
      checkInvariants();
      return;
    }

    // Yoav additions:
    File[] filesInDir = dir_file.listFiles();
    if (filesInDir == null || filesInDir.length == 0)
      throw new Daikon.TerminationMessage("The directory " + dir_file + " is empty", usage);
    ArrayList<File> invariants = new ArrayList<File>();
    for (File f : filesInDir) if (f.toString().indexOf(".inv") != -1) invariants.add(f);
    if (invariants.size() == 0)
      throw new Daikon.TerminationMessage(
          "Did not find any invariant files in the directory " + dir_file, usage);
    ArrayList<File> dtraces = new ArrayList<File>();
    for (File f : filesInDir) if (f.toString().indexOf(".dtrace") != -1) dtraces.add(f);
    if (dtraces.size() == 0)
      throw new Daikon.TerminationMessage(
          "Did not find any dtrace files in the directory " + dir_file, usage);

    System.out.println(
        "Collecting data for invariants files " + invariants + " and dtrace files " + dtraces);

    dtrace_files.clear();
    for (File dtrace : dtraces) {
      dtrace_files.add(dtrace.toString());
    }

    String commaLine = "";
    for (File inFile : invariants) {
      String name = inFile.getName().replace(".inv", "").replace(".gz", "");
      commaLine += "," + name;
    }
    outputComma.add(commaLine);

    commaLine = "";
    for (File inFile : invariants) {
      inv_file = inFile;
      failedInvariants.clear();
      testedInvariants.clear();
      error_cnt = 0;

      output_stream =
          new PrintStream(
              new FileOutputStream(
                  inFile.toString().replace(".inv", "").replace(".gz", "")
                      + ".false-positives.txt"));
      checkInvariants();
      output_stream.close();

      int failedCount = failedInvariants.size();
      int testedCount = testedInvariants.size();
      String percent = toPercentage(failedCount, testedCount);
      commaLine += "," + percent;
    }
    outputComma.add(commaLine);

    System.out.println();
    for (String output : outputComma) System.out.println(output);
  }