public void trace(float x, float y) {

      println(x);

      if (frameCounter > 2 && mousePressed) {
        coords.add(new PVector(x, y));
        frameCounter = 0;
        if (coordCounter > 0) {
          PVector tempCoord = (PVector) coords.get(coordCounter - 1);
          line(x, y, tempCoord.x, tempCoord.y);
        }
        coordCounter++;
      }
      // println(coordCounter);

      for (int i = 0; i < coords.size(); i++) {
        PVector tempCoordnear = (PVector) coords.get(i);
        float coordDist = dist(x, y, tempCoordnear.x, tempCoordnear.y);
        if (coordDist < distance) {
          stroke(255, 0, 0);
          line(x, y, tempCoordnear.x, tempCoordnear.y);
        }
      }
      frameCounter++;
    }
Example #2
0
  /*
  	-- This is a helper method run datasets such as emacs, gcc etc

  */
  private static void runOtherDataSets() throws Exception {
    System.out.println("Running tddd " + directory);
    ReadFile.readFile(directory, fileList); // read the two files
    System.out.println(fileList.get(0) + " " + fileList.get(1));
    preliminaryStep(directory);
    startCDC();
  }
  public void draw() {
    background(255);

    // Turn off highlighting for all obstalces
    for (int i = 0; i < obstacles.size(); i++) {
      Obstacle o = (Obstacle) obstacles.get(i);
      o.highlight(false);
    }

    // Act on all boids
    for (int i = 0; i < boids.size(); i++) {
      Boid b = (Boid) boids.get(i);
      b.avoid(obstacles);
      b.run();
    }

    // Display the obstacles
    for (int i = 0; i < obstacles.size(); i++) {
      Obstacle o = (Obstacle) obstacles.get(i);
      o.display();
    }

    // Instructions
    textFont(f);
    fill(0);
    text(
        "Hit space bar to toggle debugging lines.\nClick the mouse to generate a new boids.",
        10,
        height - 30);
  }
Example #4
0
  /*
  	-- This is a helper methid to run the morph files
  */
  private static void runMorphDataSet() throws Exception {

    String morph_directory =
        "../../thesis-datasets/morph/"; // directory where all the morph code is stored
    File d = new File(morph_directory);
    // get all the files from a directory
    File[] fList = d.listFiles();
    List<String> dir_list = new ArrayList<String>();
    for (File file : fList) {
      if (file.isDirectory()) {
        dir_list.add(file.getName());
      }
    }
    for (String dir : dir_list) {
      directory = morph_directory + dir + "/";
      System.out.println("Running TDDD " + directory);
      ReadFile.readFile(directory, fileList); // read the two files
      System.out.println(fileList.get(0) + " " + fileList.get(1));
      preliminaryStep(directory);
      startCDC();
      fileList.clear();
      fileArray.clear();
      hashed_File_List.clear();
    }
  }
Example #5
0
  /*
  	Read in all the files and loop through all the files
  	We already have the hashed version of the documents
  	First, we cut up the first document into chunks (using the CDC algorhtim) and store it
  	Then we cut up the second document (usually a different version of the same document) and see how many chunks match
  */
  private static void runBytes(
      int window,
      long divisor1,
      long divisor2,
      long divisor3,
      long remainder,
      Long minBoundary,
      Long maxBoundary)
      throws Exception {

    storeChunks(
        fileArray.get(0),
        hashed_File_List.get(0),
        divisor1,
        divisor2,
        divisor3,
        remainder,
        minBoundary,
        maxBoundary); // here we run 2min, ck how similar the documents are to the one already in
                      // the system
    runTddd(
        fileArray.get(1),
        hashed_File_List.get(1),
        divisor1,
        divisor2,
        divisor3,
        remainder,
        minBoundary,
        maxBoundary); // here we run 2min, ck how similar the documents are to the one already in
                      // the system
  } // end of the function
Example #6
0
 public void drawOrbits(ArrayList alObjectsArchive) {
   //  println("SIZE:" + alObjectsArchive.size());
   //  ArrayList alObjectsArchive = timeline.getObjectStateArchive();
   ArrayList alPrevPos = new ArrayList();
   ArrayList alColors = new ArrayList();
   alColors.add(color(255, 0, 0));
   alColors.add(color(255, 255, 0));
   alColors.add(color(255, 0, 255));
   //  for (int i = timeline.getTimeIdx(); i >= 0 && i > (timeline.getTimeIdx() - 1 - 100); i--)
   for (int i = 0; i < alObjectsArchive.size(); i++) {
     ArrayList objects = (ArrayList) alObjectsArchive.get(i);
     for (int j = 0; j < objects.size(); j++) {
       CelestialObject obj = (CelestialObject) objects.get(j);
       //      CelestialObject obj = (CelestialObject)objects.get(1);
       PVector pos = obj.getPosition();
       //      stroke(0, 0, 255);
       stroke((Integer) alColors.get(j));
       if (alPrevPos.size() == objects.size()) {
         PVector prevPos = (PVector) alPrevPos.get(j);
         line(prevPos.x, prevPos.y, pos.x, pos.y);
         alPrevPos.set(j, pos);
       } else alPrevPos.add(pos);
     }
   }
 }
  public boolean readDocument() {
    try {
      myStream = new ZLInputStreamWithOffset(myFile.getInputStream());

      PdbHeader header = new PdbHeader(myStream);

      setMainTextModel();
      myFont = FontType.FT_REGULAR;

      for (int index = 0; index < header.Offsets.length; ++index) {
        int currentOffset = myStream.offset();
        int pit = header.Offsets[index];
        if (currentOffset > pit) {
          break;
        }
        // myStream.seek(pit - currentOffset, false);
        myStream.skip(pit - currentOffset);

        if (myStream.offset() != pit) {
          break;
        }
        int recordSize =
            ((index != header.Offsets.length - 1) ? header.Offsets[index + 1] : myFileSize) - pit;
        readRecord(recordSize);
      }
      myStream.close();
    } catch (IOException e) {
      return false;
    }

    for (Iterator it = myReferencedParagraphs.iterator(); it.hasNext(); ) {
      Pair pair = (Pair) it.next();
      int first = (Integer) pair.myFirst;
      int second = (Integer) pair.mySecond;
      ArrayList /*<Integer>*/ list = (ArrayList) myParagraphMap.get(first);
      if (list != null) {
        for (int k = second; k < list.size(); ++k) {
          if (((Integer) ((Pair) list.get(k)).myFirst) != -1) {
            // addHyperlinkLabel(fromNumber(first) + '#' + fromNumber(second),
            // (Integer)list.get(k));
            final Pair p = (Pair) list.get(k);
            // addHyperlinkLabel(fromNumber(first) + '#' + fromNumber(second), (Integer) p.mySecond,
            // (Integer) p.myFirst);
            break;
          }
        }
      }
    }
    myReferencedParagraphs.clear();
    myParagraphMap.clear();
    return true;
  }
Example #8
0
    public void calculateForces(ArrayList objects) {
      for (int i = 0; i < objects.size(); i++) {
        CelestialObject obj = (CelestialObject) objects.get(i);
        ArrayList forces = obj.getForces();
        float totalForceX = 0;
        float totalForceY = 0;

        for (int j = 0; j < forces.size(); j++) {
          totalForceX += ((PVector) forces.get(j)).x;
          totalForceY += ((PVector) forces.get(j)).y;
        }

        PVector newAccel = new PVector(totalForceX / obj.getMass(), totalForceY / obj.getMass());

        obj.setAcceleration(newAccel);
      }

      for (int i = 0; i < objects.size(); i++) {
        CelestialObject obj1 = (CelestialObject) objects.get(i);
        float forceX = 0;
        float forceY = 0;
        obj1.clearForces();

        if (obj1.getClass() == Star.class) {
          println(obj1.getVelocity());
          continue;
        }

        for (int j = 0; j < objects.size(); j++) {
          CelestialObject obj2 = (CelestialObject) objects.get(j);

          if (i == j) continue;

          PVector pvDistance = PVector.sub(obj2.getPosition(), obj1.getPosition());
          //    println("distance: x:" + pvDistance.x + " y:" + pvDistance.y);
          float distance = sqrt(sq(pvDistance.y) + sq(pvDistance.x));
          float angle = degrees(atan2(pvDistance.y, pvDistance.x));

          float force = (G * obj1.getMass() * obj2.getMass()) / sq(distance);
          forceX = force * cos(radians(angle));
          forceY = force * sin(radians(angle));
          //        println("FORCES on " + obj1.getName() + ":" + forceX + "," + forceY);
          obj1.addForce(new PVector(forceX, forceY));

          println();
        }
      }
    }
Example #9
0
  private static void startCDC() throws IOException, Exception {
    long remainder = 7;
    for (int i = startBoundary; i <= endBoundary; i += increment) {
      long minBoundary = min_multiplier * i; // we will set the mod value as the minimum boundary
      long maxBoundary = max_multiplier * i; // we will set this as the maximum boundary
      long divisor1 = i; // this will be used to mod the results
      long divisor2 = i / 2 + 1; // the backup divisor is half the original divisor
      long divisor3 = i / 4 + 1;
      totalSize =
          fileArray.get(1)
              .length; // note we only care about the size of the second file since that's the file
                       // we are measuring
      System.out.print(divisor1 + " " + divisor2 + " " + divisor3 + " ");
      runBytes(
          window,
          divisor1,
          divisor2,
          divisor3,
          remainder,
          minBoundary,
          maxBoundary); // run the karb rabin algorithm
      // this is the block size per boundary
      double blockSize = (double) totalSize / (double) numOfPieces;
      double ratio = (double) coverage / (double) totalSize;
      System.out.println(blockSize + " " + ratio);

      // clear the hashTable, and counters so we can reset the values for the next round of
      // boundaries
      coverage = 0;
      numOfPieces = 0;
      table.clear();
      HashClass.duplicate_counter = 0;
      HashClass.max_list_length = 0;
    }
  }
  public void setup() {
    size(600, 400);
    smooth();
    f = createFont("Georgia", 12, true);

    obstacles = new ArrayList();
    boids = new ArrayList();

    // A little algorithm to pick a bunch of random obstacles that don't overlap
    for (int i = 0; i < 100; i++) {
      float x = random(width);
      float y = random(height);
      float r = random(50 - i / 2, 50);
      boolean ok = true;
      for (int j = 0; j < obstacles.size(); j++) {
        Obstacle o = (Obstacle) obstacles.get(j);
        if (dist(x, y, o.loc.x, o.loc.y) < o.radius + r + 20) {
          ok = false;
        }
      }
      if (ok) obstacles.add(new Obstacle(x, y, r));
    }

    // Starting with three boids
    boids.add(new Boid(new PVector(random(width), random(height)), 3f, 0.2f));
    boids.add(new Boid(new PVector(random(width), random(height)), 3f, 0.1f));
    boids.add(new Boid(new PVector(random(width), random(height)), 2f, 0.05f));
  }
Example #11
0
    // Does a deepcopy of an array list
    public static ArrayList cloneArrayList(ArrayList al) {
      ArrayList alNew = new ArrayList(al.size());
      for (int i = 0; i < al.size(); i++) {
        alNew.add(((CelestialObject) al.get(i)).clone());
      }

      return alNew;
    }
Example #12
0
    // Does a deepcopy of an array list
    public ArrayList cloneArrayList(ArrayList al) {
      ArrayList alNew = new ArrayList(al.size());
      for (int i = 0; i < al.size(); i++) {
        PVector pv = (PVector) al.get(i);
        alNew.add(new PVector(pv.x, pv.y));
      }

      return alNew;
    }
    public void avoid(ArrayList obstacles) {

      // Make a vector that will be the position of the object
      // relative to the Boid rotated in the direction of boid's velocity
      PVector closestRotated = new PVector(sight + 1, sight + 1);
      float closestDistance = 99999;
      Obstacle avoid = null;

      // Let's look at each obstacle
      for (int i = 0; i < obstacles.size(); i++) {
        Obstacle o = (Obstacle) obstacles.get(i);

        float d = PVector.dist(loc, o.loc);
        PVector dir = vel.get();
        dir.normalize();
        PVector diff = PVector.sub(o.loc, loc);

        // Now we use the dot product to rotate the vector that points from boid to obstacle
        // Velocity is the new x-axis
        PVector rotated = new PVector(diff.dot(dir), diff.dot(getNormal(dir)));

        // Is the obstacle in our path?
        if (PApplet.abs(rotated.y) < (o.radius + r)) {
          // Is it the closest obstacle?
          if ((rotated.x > 0) && (rotated.x < closestRotated.x)) {
            closestRotated = rotated;
            avoid = o;
          }
        }
      }

      // Can we actually see the closest one?
      if (PApplet.abs(closestRotated.x) < sight) {

        // The desired vector should point away from the obstacle
        // The closer to the obstacle, the more it should steer
        PVector desired =
            new PVector(closestRotated.x, -closestRotated.y * sight / closestRotated.x);
        desired.normalize();
        desired.mult(closestDistance);
        desired.limit(maxspeed);
        // Rotate back to the regular coordinate system
        rotateVector(desired, vel.heading2D());

        // Draw some debugging stuff
        if (debug) {
          stroke(0);
          line(loc.x, loc.y, loc.x + desired.x * 10, loc.y + desired.y * 10);
          avoid.highlight(true);
        }

        // Apply Reynolds steering rules
        desired.sub(vel);
        desired.limit(maxforce);
        acc.add(desired);
      }
    }
Example #14
0
 public void mouseClicked() {
   ArrayList objects = timeline.getStatefulObjects();
   for (int i = 0; i < objects.size(); i++) {
     CelestialObject obj = (CelestialObject) objects.get(i);
     if (obj.isMouseOver()) {
       println(obj.getName() + " clicked!");
       break;
     }
   }
 }
  public void draw() {

    background(255);

    for (int i = 0; i < balls.size(); i++) {
      Ball ball = (Ball) balls.get(i);
      ball.calc();
      ball.display();
    }
  }
Example #16
0
  /** Update all of the Ball's and draw them */
  public void update() {

    if (balls.size() != 0) {

      for (int i = 0; i < balls.size(); i++) {
        Ball b = (Ball) balls.get(i);
        b.update();
        b.attract = kelly;
        b.drawBall();
      }
    }
  }
Example #17
0
  public void draw() {
    background(0);
    fill(255);

    if (!paused) timeline.moveForward();

    if (dragging) drawOrbits(timeline.getFutureObjectStates());
    else drawOrbits(timeline.getPastObjectStates());

    ArrayList objects = timeline.getStatefulObjects();
    for (int i = 0; i < objects.size(); i++) {
      CelestialObject obj = (CelestialObject) objects.get(i);
      obj.display();
    }
  }
  public void draw() {
    // background(255);
    fill(255);
    rect(-4, -4, width + 4, height + 4);

    for (int i = skaters.size() - 1; i >= 0; i--) {

      Skater skater = (Skater) skaters.get(i);

      // if (mousePressed) {
      skater.trace(mouseX, mouseY); // blob[i].x etc.

      println(i);
    }
  }
Example #19
0
    public int moveBackward() {
      intTimeIdx--;

      if (intTimeIdx < 0) {
        intTimeIdx = 0;
      } else {
        //      println("before:" + ((CelestialObject)alStatefulObjects.get(0)).getPosition().x +
        // "," + ((CelestialObject)alStatefulObjects.get(0)).getPosition().y);
        setCurrentState(cloneArrayList((ArrayList) alObjectStateArchive.get(intTimeIdx)));
        //      println("before:" + ((CelestialObject)alStatefulObjects.get(0)).getPosition().x +
        // "," + ((CelestialObject)alStatefulObjects.get(0)).getPosition().y);
      }

      sliderTimeline.setValue(intTimeIdx);

      return intTimeIdx;
    }
Example #20
0
  /** If a key is pressed perform the respective actions */
  public void keyPressed() {

    // Add 'stems' to the balls
    if (keyCode == SHIFT) {
      stems = !stems;
      for (int i = 0; i < balls.size(); i++) {
        Ball b = (Ball) balls.get(i);
        b.STEM = stems;
      }
    }
    // toggle repaint background
    else if (key == 'b') REPAINT = !REPAINT;
    // Empty the ArrayList of Balls
    else if (key == 'x') balls.clear();
    // Add a ball
    else if (key == 'f') addBall();
  }
Example #21
0
    public int moveForward() {
      //    println("forward!");
      intTimeIdx++;

      // if the future values have already been calculated, just fetch them instead of calculating
      // them again
      if (alObjectStateArchive.size() > intTimeIdx)
        setCurrentState(cloneArrayList((ArrayList) alObjectStateArchive.get(intTimeIdx)));
      else {
        //      println("calculating...");
        sim.calculateForces(alStatefulObjects);
        alObjectStateArchive.add(cloneArrayList(alStatefulObjects));
      }

      sliderTimeline.setValue(intTimeIdx);

      return intTimeIdx;
    }
Example #22
0
  public void mouseDragged() {
    if (paused) {
      ArrayList objects = timeline.getStatefulObjects();
      for (int i = 0; i < objects.size(); i++) {
        CelestialObject obj = (CelestialObject) objects.get(i);
        if (obj.isMouseOver()) {
          dragging = true;
          PVector pos = obj.getPosition();
          pos.x = mouseX;
          pos.y = mouseY;

          timeline.reset();
          timeline.setCurrentState(objects);
          sliderTimeline.setValue(0);
          break;
        }
      }
    }
  }
Example #23
0
  // this method basically will chop up the blocks and get their frequencies
  private static void getBlockFrequency() throws Exception {
    directory = "../../thesis-datasets/morph_file_100MB/";
    ReadFile.readFile(directory, fileList); // read the two files
    HashMap<Integer, Integer> blockFreq =
        new HashMap<Integer, Integer>(); // this stores the block in the map along there frequencies
    int start = 0; // start of the sliding window
    int end = start + window - 1; // ending boundary
    preliminaryStep(directory);
    // System.out.println("Choping the document TDDD " + fileList.get(0));
    long[] divisorArray = {
      1000
    }; // run the frequency code for these divisor values (AKA expected block Size)
    for (long i : divisorArray) {

      long divisor1 = i;
      long divisor2 = i / 2;
      long divisor3 = i / 4;
      long remainder = 7;
      long minBoundary = min_multiplier * i;
      long maxBoundary = max_multiplier * i;
      // System.out.println("Running Likelihood for " + i + " " + divisor2 + " " + divisor3);
      int totalBlocks =
          chopDocument(
              fileArray.get(0),
              hashed_File_List.get(0),
              divisor1,
              divisor2,
              divisor3,
              remainder,
              minBoundary,
              maxBoundary,
              blockFreq);
      // now output the block sizes, along with there frequencies and probilities
      for (Map.Entry<Integer, Integer> tuple : blockFreq.entrySet()) {
        // output the block freq
        double prob = (double) tuple.getValue() / (double) totalBlocks;
        System.out.println(tuple.getKey() + " " + tuple.getValue() + " " + prob);
      }

      blockFreq.clear();
    }
  }
Example #24
0
  /*
  	- This reads the file and hashses the document, which are then stored in our arrayLisrs
  	- we do this before, so we dont have to hash again later ( which is time consuming)
  */
  private static void preliminaryStep(String dir) throws Exception {
    int start = 0; // start of the sliding window
    int end = start + window - 1; // ending boundary
    // prepoccessing step to hash the document, since we dont need to hash the document again
    for (int i = 0; i < fileList.size(); ++i) {
      // System.out.println("preliminaryStep " + fileList.get(i));
      Path p = Paths.get(dir + fileList.get(i)); // read this file
      byte[] array = Files.readAllBytes(p); // read the file in bytes
      // System.out.println(array.length);

      ArrayList<Long> md5Hashes = new ArrayList<Long>(); // make a new arrayList for this document
      HashDocument.hashDocument(
          array, md5Hashes, start,
          end); // this hashes the entire document using the window and stores itto md5hashes array

      // add the fileArray and hashedFile to our lists so we can use them later to run the
      // algorithms
      // note we hash and read file before, so we don't have to do it again
      fileArray.add(array);
      hashed_File_List.add(md5Hashes);
    }
  }
  public void build_bricks() {

    ImagePlus imp;
    ImagePlus orgimp;
    ImageStack stack;
    FileInfo finfo;

    if (lvImgTitle.isEmpty()) return;
    orgimp = WindowManager.getImage(lvImgTitle.get(0));
    imp = orgimp;

    finfo = imp.getFileInfo();
    if (finfo == null) return;

    int[] dims = imp.getDimensions();
    int imageW = dims[0];
    int imageH = dims[1];
    int nCh = dims[2];
    int imageD = dims[3];
    int nFrame = dims[4];
    int bdepth = imp.getBitDepth();
    double xspc = finfo.pixelWidth;
    double yspc = finfo.pixelHeight;
    double zspc = finfo.pixelDepth;
    double z_aspect = Math.max(xspc, yspc) / zspc;

    int orgW = imageW;
    int orgH = imageH;
    int orgD = imageD;
    double orgxspc = xspc;
    double orgyspc = yspc;
    double orgzspc = zspc;

    lv = lvImgTitle.size();
    if (filetype == "JPEG") {
      for (int l = 0; l < lv; l++) {
        if (WindowManager.getImage(lvImgTitle.get(l)).getBitDepth() != 8) {
          IJ.error("A SOURCE IMAGE MUST BE 8BIT GLAYSCALE");
          return;
        }
      }
    }

    // calculate levels
    /*		int baseXY = 256;
    		int baseZ = 256;

    		if (z_aspect < 0.5) baseZ = 128;
    		if (z_aspect > 2.0) baseXY = 128;
    		if (z_aspect >= 0.5 && z_aspect < 1.0) baseZ = (int)(baseZ*z_aspect);
    		if (z_aspect > 1.0 && z_aspect <= 2.0) baseXY = (int)(baseXY/z_aspect);

    		IJ.log("Z_aspect: " + z_aspect);
    		IJ.log("BaseXY: " + baseXY);
    		IJ.log("BaseZ: " + baseZ);
    */

    int baseXY = 256;
    int baseZ = 128;
    int dbXY = Math.max(orgW, orgH) / baseXY;
    if (Math.max(orgW, orgH) % baseXY > 0) dbXY *= 2;
    int dbZ = orgD / baseZ;
    if (orgD % baseZ > 0) dbZ *= 2;
    lv = Math.max(log2(dbXY), log2(dbZ)) + 1;

    int ww = orgW;
    int hh = orgH;
    int dd = orgD;
    for (int l = 0; l < lv; l++) {
      int bwnum = ww / baseXY;
      if (ww % baseXY > 0) bwnum++;
      int bhnum = hh / baseXY;
      if (hh % baseXY > 0) bhnum++;
      int bdnum = dd / baseZ;
      if (dd % baseZ > 0) bdnum++;

      if (bwnum % 2 == 0) bwnum++;
      if (bhnum % 2 == 0) bhnum++;
      if (bdnum % 2 == 0) bdnum++;

      int bw = (bwnum <= 1) ? ww : ww / bwnum + 1 + (ww % bwnum > 0 ? 1 : 0);
      int bh = (bhnum <= 1) ? hh : hh / bhnum + 1 + (hh % bhnum > 0 ? 1 : 0);
      int bd = (bdnum <= 1) ? dd : dd / bdnum + 1 + (dd % bdnum > 0 ? 1 : 0);

      bwlist.add(bw);
      bhlist.add(bh);
      bdlist.add(bd);

      IJ.log("LEVEL: " + l);
      IJ.log("  width: " + ww);
      IJ.log("  hight: " + hh);
      IJ.log("  depth: " + dd);
      IJ.log("  bw: " + bw);
      IJ.log("  bh: " + bh);
      IJ.log("  bd: " + bd);

      int xyl2 = Math.max(ww, hh) / baseXY;
      if (Math.max(ww, hh) % baseXY > 0) xyl2 *= 2;
      if (lv - 1 - log2(xyl2) <= l) {
        ww /= 2;
        hh /= 2;
      }
      IJ.log("  xyl2: " + (lv - 1 - log2(xyl2)));

      int zl2 = dd / baseZ;
      if (dd % baseZ > 0) zl2 *= 2;
      if (lv - 1 - log2(zl2) <= l) dd /= 2;
      IJ.log("  zl2: " + (lv - 1 - log2(zl2)));

      if (l < lv - 1) {
        lvImgTitle.add(lvImgTitle.get(0) + "_level" + (l + 1));
        IJ.selectWindow(lvImgTitle.get(0));
        IJ.run(
            "Scale...",
            "x=- y=- z=- width="
                + ww
                + " height="
                + hh
                + " depth="
                + dd
                + " interpolation=Bicubic average process create title="
                + lvImgTitle.get(l + 1));
      }
    }

    for (int l = 0; l < lv; l++) {
      IJ.log(lvImgTitle.get(l));
    }

    Document doc = newXMLDocument();
    Element root = doc.createElement("BRK");
    root.setAttribute("version", "1.0");
    root.setAttribute("nLevel", String.valueOf(lv));
    root.setAttribute("nChannel", String.valueOf(nCh));
    root.setAttribute("nFrame", String.valueOf(nFrame));
    doc.appendChild(root);

    for (int l = 0; l < lv; l++) {
      IJ.showProgress(0.0);

      int[] dims2 = imp.getDimensions();
      IJ.log(
          "W: "
              + String.valueOf(dims2[0])
              + " H: "
              + String.valueOf(dims2[1])
              + " C: "
              + String.valueOf(dims2[2])
              + " D: "
              + String.valueOf(dims2[3])
              + " T: "
              + String.valueOf(dims2[4])
              + " b: "
              + String.valueOf(bdepth));

      bw = bwlist.get(l).intValue();
      bh = bhlist.get(l).intValue();
      bd = bdlist.get(l).intValue();

      boolean force_pow2 = false;
      /*			if(IsPowerOf2(bw) && IsPowerOf2(bh) && IsPowerOf2(bd)) force_pow2 = true;

      			if(force_pow2){
      				//force pow2
      				if(Pow2(bw) > bw) bw = Pow2(bw)/2;
      				if(Pow2(bh) > bh) bh = Pow2(bh)/2;
      				if(Pow2(bd) > bd) bd = Pow2(bd)/2;
      			}

      			if(bw > imageW) bw = (Pow2(imageW) == imageW) ? imageW : Pow2(imageW)/2;
      			if(bh > imageH) bh = (Pow2(imageH) == imageH) ? imageH : Pow2(imageH)/2;
      			if(bd > imageD) bd = (Pow2(imageD) == imageD) ? imageD : Pow2(imageD)/2;

      */
      if (bw > imageW) bw = imageW;
      if (bh > imageH) bh = imageH;
      if (bd > imageD) bd = imageD;

      if (bw <= 1 || bh <= 1 || bd <= 1) break;

      if (filetype == "JPEG" && (bw < 8 || bh < 8)) break;

      Element lvnode = doc.createElement("Level");
      lvnode.setAttribute("lv", String.valueOf(l));
      lvnode.setAttribute("imageW", String.valueOf(imageW));
      lvnode.setAttribute("imageH", String.valueOf(imageH));
      lvnode.setAttribute("imageD", String.valueOf(imageD));
      lvnode.setAttribute("xspc", String.valueOf(xspc));
      lvnode.setAttribute("yspc", String.valueOf(yspc));
      lvnode.setAttribute("zspc", String.valueOf(zspc));
      lvnode.setAttribute("bitDepth", String.valueOf(bdepth));
      root.appendChild(lvnode);

      Element brksnode = doc.createElement("Bricks");
      brksnode.setAttribute("brick_baseW", String.valueOf(bw));
      brksnode.setAttribute("brick_baseH", String.valueOf(bh));
      brksnode.setAttribute("brick_baseD", String.valueOf(bd));
      lvnode.appendChild(brksnode);

      ArrayList<Brick> bricks = new ArrayList<Brick>();
      int mw, mh, md, mw2, mh2, md2;
      double tx0, ty0, tz0, tx1, ty1, tz1;
      double bx0, by0, bz0, bx1, by1, bz1;
      for (int k = 0; k < imageD; k += bd) {
        if (k > 0) k--;
        for (int j = 0; j < imageH; j += bh) {
          if (j > 0) j--;
          for (int i = 0; i < imageW; i += bw) {
            if (i > 0) i--;
            mw = Math.min(bw, imageW - i);
            mh = Math.min(bh, imageH - j);
            md = Math.min(bd, imageD - k);

            if (force_pow2) {
              mw2 = Pow2(mw);
              mh2 = Pow2(mh);
              md2 = Pow2(md);
            } else {
              mw2 = mw;
              mh2 = mh;
              md2 = md;
            }

            if (filetype == "JPEG") {
              if (mw2 < 8) mw2 = 8;
              if (mh2 < 8) mh2 = 8;
            }

            tx0 = i == 0 ? 0.0d : ((mw2 - mw + 0.5d) / mw2);
            ty0 = j == 0 ? 0.0d : ((mh2 - mh + 0.5d) / mh2);
            tz0 = k == 0 ? 0.0d : ((md2 - md + 0.5d) / md2);

            tx1 = 1.0d - 0.5d / mw2;
            if (mw < bw) tx1 = 1.0d;
            if (imageW - i == bw) tx1 = 1.0d;

            ty1 = 1.0d - 0.5d / mh2;
            if (mh < bh) ty1 = 1.0d;
            if (imageH - j == bh) ty1 = 1.0d;

            tz1 = 1.0d - 0.5d / md2;
            if (md < bd) tz1 = 1.0d;
            if (imageD - k == bd) tz1 = 1.0d;

            bx0 = i == 0 ? 0.0d : (i + 0.5d) / (double) imageW;
            by0 = j == 0 ? 0.0d : (j + 0.5d) / (double) imageH;
            bz0 = k == 0 ? 0.0d : (k + 0.5d) / (double) imageD;

            bx1 = Math.min((i + bw - 0.5d) / (double) imageW, 1.0d);
            if (imageW - i == bw) bx1 = 1.0d;

            by1 = Math.min((j + bh - 0.5d) / (double) imageH, 1.0d);
            if (imageH - j == bh) by1 = 1.0d;

            bz1 = Math.min((k + bd - 0.5d) / (double) imageD, 1.0d);
            if (imageD - k == bd) bz1 = 1.0d;

            int x, y, z;
            x = i - (mw2 - mw);
            y = j - (mh2 - mh);
            z = k - (md2 - md);
            bricks.add(
                new Brick(
                    x, y, z, mw2, mh2, md2, 0, 0, tx0, ty0, tz0, tx1, ty1, tz1, bx0, by0, bz0, bx1,
                    by1, bz1));
          }
        }
      }

      Element fsnode = doc.createElement("Files");
      lvnode.appendChild(fsnode);

      stack = imp.getStack();

      int totalbricknum = nFrame * nCh * bricks.size();
      int curbricknum = 0;
      for (int f = 0; f < nFrame; f++) {
        for (int ch = 0; ch < nCh; ch++) {
          int sizelimit = bdsizelimit * 1024 * 1024;
          int bytecount = 0;
          int filecount = 0;
          int pd_bufsize = Math.max(sizelimit, bw * bh * bd * bdepth / 8);
          byte[] packed_data = new byte[pd_bufsize];
          String base_dataname =
              basename
                  + "_Lv"
                  + String.valueOf(l)
                  + "_Ch"
                  + String.valueOf(ch)
                  + "_Fr"
                  + String.valueOf(f);
          String current_dataname = base_dataname + "_data" + filecount;

          Brick b_first = bricks.get(0);
          if (b_first.z_ != 0) IJ.log("warning");
          int st_z = b_first.z_;
          int ed_z = b_first.z_ + b_first.d_;
          LinkedList<ImageProcessor> iplist = new LinkedList<ImageProcessor>();
          for (int s = st_z; s < ed_z; s++)
            iplist.add(stack.getProcessor(imp.getStackIndex(ch + 1, s + 1, f + 1)));

          //					ImagePlus test;
          //					ImageStack tsst;
          //					test = NewImage.createByteImage("test", imageW, imageH, imageD,
          // NewImage.FILL_BLACK);
          //					tsst = test.getStack();
          for (int i = 0; i < bricks.size(); i++) {
            Brick b = bricks.get(i);

            if (ed_z > b.z_ || st_z < b.z_ + b.d_) {
              if (b.z_ > st_z) {
                for (int s = 0; s < b.z_ - st_z; s++) iplist.pollFirst();
                st_z = b.z_;
              } else if (b.z_ < st_z) {
                IJ.log("warning");
                for (int s = st_z - 1; s > b.z_; s--)
                  iplist.addFirst(stack.getProcessor(imp.getStackIndex(ch + 1, s + 1, f + 1)));
                st_z = b.z_;
              }

              if (b.z_ + b.d_ > ed_z) {
                for (int s = ed_z; s < b.z_ + b.d_; s++)
                  iplist.add(stack.getProcessor(imp.getStackIndex(ch + 1, s + 1, f + 1)));
                ed_z = b.z_ + b.d_;
              } else if (b.z_ + b.d_ < ed_z) {
                IJ.log("warning");
                for (int s = 0; s < ed_z - (b.z_ + b.d_); s++) iplist.pollLast();
                ed_z = b.z_ + b.d_;
              }
            } else {
              IJ.log("warning");
              iplist.clear();
              st_z = b.z_;
              ed_z = b.z_ + b.d_;
              for (int s = st_z; s < ed_z; s++)
                iplist.add(stack.getProcessor(imp.getStackIndex(ch + 1, s + 1, f + 1)));
            }

            if (iplist.size() != b.d_) {
              IJ.log("Stack Error");
              return;
            }

            //						int zz = st_z;

            int bsize = 0;
            byte[] bdata = new byte[b.w_ * b.h_ * b.d_ * bdepth / 8];
            Iterator<ImageProcessor> ipite = iplist.iterator();
            while (ipite.hasNext()) {

              //							ImageProcessor tsip = tsst.getProcessor(zz+1);

              ImageProcessor ip = ipite.next();
              ip.setRoi(b.x_, b.y_, b.w_, b.h_);
              if (bdepth == 8) {
                byte[] data = (byte[]) ip.crop().getPixels();
                System.arraycopy(data, 0, bdata, bsize, data.length);
                bsize += data.length;
              } else if (bdepth == 16) {
                ByteBuffer buffer = ByteBuffer.allocate(b.w_ * b.h_ * bdepth / 8);
                buffer.order(ByteOrder.LITTLE_ENDIAN);
                short[] data = (short[]) ip.crop().getPixels();
                for (short e : data) buffer.putShort(e);
                System.arraycopy(buffer.array(), 0, bdata, bsize, buffer.array().length);
                bsize += buffer.array().length;
              } else if (bdepth == 32) {
                ByteBuffer buffer = ByteBuffer.allocate(b.w_ * b.h_ * bdepth / 8);
                buffer.order(ByteOrder.LITTLE_ENDIAN);
                float[] data = (float[]) ip.crop().getPixels();
                for (float e : data) buffer.putFloat(e);
                System.arraycopy(buffer.array(), 0, bdata, bsize, buffer.array().length);
                bsize += buffer.array().length;
              }
            }

            String filename =
                basename
                    + "_Lv"
                    + String.valueOf(l)
                    + "_Ch"
                    + String.valueOf(ch)
                    + "_Fr"
                    + String.valueOf(f)
                    + "_ID"
                    + String.valueOf(i);

            int offset = bytecount;
            int datasize = bdata.length;

            if (filetype == "RAW") {
              int dummy = -1;
              // do nothing
            }
            if (filetype == "JPEG" && bdepth == 8) {
              try {
                DataBufferByte db = new DataBufferByte(bdata, datasize);
                Raster raster = Raster.createPackedRaster(db, b.w_, b.h_ * b.d_, 8, null);
                BufferedImage img =
                    new BufferedImage(b.w_, b.h_ * b.d_, BufferedImage.TYPE_BYTE_GRAY);
                img.setData(raster);
                ByteArrayOutputStream baos = new ByteArrayOutputStream();
                ImageOutputStream ios = ImageIO.createImageOutputStream(baos);
                String format = "jpg";
                Iterator<javax.imageio.ImageWriter> iter =
                    ImageIO.getImageWritersByFormatName("jpeg");
                javax.imageio.ImageWriter writer = iter.next();
                ImageWriteParam iwp = writer.getDefaultWriteParam();
                iwp.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
                iwp.setCompressionQuality((float) jpeg_quality * 0.01f);
                writer.setOutput(ios);
                writer.write(null, new IIOImage(img, null, null), iwp);
                // ImageIO.write(img, format, baos);
                bdata = baos.toByteArray();
                datasize = bdata.length;
              } catch (IOException e) {
                e.printStackTrace();
                return;
              }
            }
            if (filetype == "ZLIB") {
              byte[] tmpdata = new byte[b.w_ * b.h_ * b.d_ * bdepth / 8];
              Deflater compresser = new Deflater();
              compresser.setInput(bdata);
              compresser.setLevel(Deflater.DEFAULT_COMPRESSION);
              compresser.setStrategy(Deflater.DEFAULT_STRATEGY);
              compresser.finish();
              datasize = compresser.deflate(tmpdata);
              bdata = tmpdata;
              compresser.end();
            }

            if (bytecount + datasize > sizelimit && bytecount > 0) {
              BufferedOutputStream fis = null;
              try {
                File file = new File(directory + current_dataname);
                fis = new BufferedOutputStream(new FileOutputStream(file));
                fis.write(packed_data, 0, bytecount);
              } catch (IOException e) {
                e.printStackTrace();
                return;
              } finally {
                try {
                  if (fis != null) fis.close();
                } catch (IOException e) {
                  e.printStackTrace();
                  return;
                }
              }
              filecount++;
              current_dataname = base_dataname + "_data" + filecount;
              bytecount = 0;
              offset = 0;
              System.arraycopy(bdata, 0, packed_data, bytecount, datasize);
              bytecount += datasize;
            } else {
              System.arraycopy(bdata, 0, packed_data, bytecount, datasize);
              bytecount += datasize;
            }

            Element filenode = doc.createElement("File");
            filenode.setAttribute("filename", current_dataname);
            filenode.setAttribute("channel", String.valueOf(ch));
            filenode.setAttribute("frame", String.valueOf(f));
            filenode.setAttribute("brickID", String.valueOf(i));
            filenode.setAttribute("offset", String.valueOf(offset));
            filenode.setAttribute("datasize", String.valueOf(datasize));
            filenode.setAttribute("filetype", String.valueOf(filetype));

            fsnode.appendChild(filenode);

            curbricknum++;
            IJ.showProgress((double) (curbricknum) / (double) (totalbricknum));
          }
          if (bytecount > 0) {
            BufferedOutputStream fis = null;
            try {
              File file = new File(directory + current_dataname);
              fis = new BufferedOutputStream(new FileOutputStream(file));
              fis.write(packed_data, 0, bytecount);
            } catch (IOException e) {
              e.printStackTrace();
              return;
            } finally {
              try {
                if (fis != null) fis.close();
              } catch (IOException e) {
                e.printStackTrace();
                return;
              }
            }
          }
        }
      }

      for (int i = 0; i < bricks.size(); i++) {
        Brick b = bricks.get(i);
        Element bricknode = doc.createElement("Brick");
        bricknode.setAttribute("id", String.valueOf(i));
        bricknode.setAttribute("st_x", String.valueOf(b.x_));
        bricknode.setAttribute("st_y", String.valueOf(b.y_));
        bricknode.setAttribute("st_z", String.valueOf(b.z_));
        bricknode.setAttribute("width", String.valueOf(b.w_));
        bricknode.setAttribute("height", String.valueOf(b.h_));
        bricknode.setAttribute("depth", String.valueOf(b.d_));
        brksnode.appendChild(bricknode);

        Element tboxnode = doc.createElement("tbox");
        tboxnode.setAttribute("x0", String.valueOf(b.tx0_));
        tboxnode.setAttribute("y0", String.valueOf(b.ty0_));
        tboxnode.setAttribute("z0", String.valueOf(b.tz0_));
        tboxnode.setAttribute("x1", String.valueOf(b.tx1_));
        tboxnode.setAttribute("y1", String.valueOf(b.ty1_));
        tboxnode.setAttribute("z1", String.valueOf(b.tz1_));
        bricknode.appendChild(tboxnode);

        Element bboxnode = doc.createElement("bbox");
        bboxnode.setAttribute("x0", String.valueOf(b.bx0_));
        bboxnode.setAttribute("y0", String.valueOf(b.by0_));
        bboxnode.setAttribute("z0", String.valueOf(b.bz0_));
        bboxnode.setAttribute("x1", String.valueOf(b.bx1_));
        bboxnode.setAttribute("y1", String.valueOf(b.by1_));
        bboxnode.setAttribute("z1", String.valueOf(b.bz1_));
        bricknode.appendChild(bboxnode);
      }

      if (l < lv - 1) {
        imp = WindowManager.getImage(lvImgTitle.get(l + 1));
        int[] newdims = imp.getDimensions();
        imageW = newdims[0];
        imageH = newdims[1];
        imageD = newdims[3];
        xspc = orgxspc * ((double) orgW / (double) imageW);
        yspc = orgyspc * ((double) orgH / (double) imageH);
        zspc = orgzspc * ((double) orgD / (double) imageD);
        bdepth = imp.getBitDepth();
      }
    }

    File newXMLfile = new File(directory + basename + ".vvd");
    writeXML(newXMLfile, doc);

    for (int l = 1; l < lv; l++) {
      imp = WindowManager.getImage(lvImgTitle.get(l));
      imp.changes = false;
      imp.close();
    }
  }
Example #26
0
  /*
  	-- This is a helper method to run the periodic dataset basically

  */
  private static void runPeriodic() throws Exception {
    System.out.println("Running TDDD Periodic");
    // this is alll the directories we will be running
    int arr[] = {10, 15, 20, 25, 30}; // this is the input number we will be running on
    // this is the base of the two files
    // these two are directories, we will concanate with the numbers to get the full dir name
    String base_old_file = "../../thesis-datasets/input_";
    String base_new_file = "../../thesis-datasets/periodic_";

    int total_iter_count =
        0; // this is used check how many times we will iterate through the data so we can make an
           // array of that size
    for (int i = startBoundary; i <= endBoundary; i += increment) total_iter_count++;

    for (int dir_num : arr) {
      // set up our directories

      String old_file_dir = base_old_file + dir_num + "/";
      String new_file_dir = base_new_file + dir_num + "/";
      System.out.println(old_file_dir);

      // read all the files in these two directories in sorted order
      ArrayList<String> old_file_list = new ArrayList<String>();
      ArrayList<String> new_file_list = new ArrayList<String>();

      ReadFile.readFile(old_file_dir, old_file_list);
      ReadFile.readFile(new_file_dir, new_file_list);

      // used to store all the runnings for the periodic data
      double[] block_size_list = new double[total_iter_count];
      double[] ratio_size_list = new double[total_iter_count];
      int totalRuns = 0;

      for (int i = 0; i < old_file_list.size(); ++i) {
        // System.out.println(old_file_list.get(i) + " " + new_file_list.get(i));
        String[] s1 = old_file_list.get(i).split("_");
        String[] s2 = new_file_list.get(i).split("_");
        // input file should corrospond to the output file
        if (!s1[1].equals(s2[1]) || !s1[2].equals(s2[2]))
          System.out.println("We got a huge problem");

        // basically same code as in the prelinaryStep method, but we need to modify it for
        // perdiodic files
        int start = 0; // start of the sliding window
        int end = start + window - 1; // ending boundary
        // we cant call preliminary function, so hash the two files individually
        // System.out.println("preliminaryStep " + fileList.get(i));
        Path p =
            Paths.get(
                old_file_dir
                    + old_file_list.get(
                        i)); // read the old file ( the one which we will be using as the base
                             // comparason)
        Path p2 =
            Paths.get(
                new_file_dir
                    + new_file_list.get(
                        i)); // read the old file ( the one which we will be using as the base
                             // comparason)

        byte[] old_file = Files.readAllBytes(p); // read the file in bytes
        byte[] new_file = Files.readAllBytes(p2);
        // System.out.println(array.length);
        ArrayList<Long> old_file_hashes =
            new ArrayList<Long>(); // make a new arrayList for this document
        ArrayList<Long> new_file_hashes =
            new ArrayList<Long>(); // make a new arrayList for this document

        HashDocument.hashDocument(
            old_file,
            old_file_hashes,
            start,
            end); // this hashes the entire document using the window and stores itto md5hashes
                  // array
        HashDocument.hashDocument(
            new_file,
            new_file_hashes,
            start,
            end); // this hashes the entire document using the window and stores itto md5hashes
                  // array

        // now call the startCdc method
        totalSize = new_file.length; // this is the length of the file
        startCDC(
            block_size_list, ratio_size_list, new_file, old_file, new_file_hashes, old_file_hashes);

        if (totalRuns % 10 == 0) System.out.println(totalRuns);
        totalRuns++;
      }

      // now output the results
      System.out.println("File dir = " + dir_num + " totalRuns = " + totalRuns);
      int index = 0;
      for (int i = startBoundary; i <= endBoundary; i += increment) {
        // avg out the outputs
        double blockSize = block_size_list[index] / (double) totalRuns;
        double ratio = ratio_size_list[index] / (double) totalRuns;
        System.out.println(i + " " + blockSize + " " + ratio);
        index++;
      }

      // now each index matches the corrosponding file
    }
  } // end of methid
Example #27
0
  /* -------------------------------------------------------------------------------------------------------
  This method:

  	This method:
  	--	Takes in three paramters:
  		1. array - this is the byte array that actually holds the document contents
  		2. md5Hases - holds the entire hash values of the document
  		3. Divisor1/Divisor2/divisor3... - main and back up divisors
  		5. The remainder we are looking for
  		6/7. min/max boundaries

  	-- We will start running the karb rabin algorithm
  	-- We will find the boundaries using mod values and once they equal the mod value we have stored
  	-- we also have the divsor2/3 .. which are backup divisors. If we don't find a boundary by the divisor1 once we hit the maxBoundary
  	-- we will see if we have one with divisor2, if not, then we will see if we have one with divisor3 and so on
  	-- We will hash everything in that hash boundary and store it
  -------------------------------------------------------------------------------------------------------- */
  private static void runTddd(
      byte[] array,
      ArrayList<Long> md5Hashes,
      long divisor1,
      long divisor2,
      long divisor3,
      long remainder,
      long minBoundary,
      long maxBoundary) {
    int documentStart = 0; // used to keep track of where the boundaries are
    boolean match = false; // used to ck if we encountered a match
    int backUpBreakPoint = -1; // used to store the backup breakpoint
    int secondBackUpBreakPoint = -1; // used with the divisor3
    StringBuilder builder = new StringBuilder();
    int i = documentStart + (int) minBoundary - 1; // so we start at the minimum
    // loop through all the values in the document
    for (; i < md5Hashes.size() - 1; ++i) {
      // if ((i - documentStart + 1) < minBoundary) //  if the size of this boundary is less than
      // the min, continue looping
      // 	continue;
      /*-----------------------------------------------------------------
      	- If the mod of this equals the modvalue we defined, then
      	- this is a boundary
      ------------------------------------------------------------------*/

      if ((md5Hashes.get(i - 1) + md5Hashes.get(i) + md5Hashes.get(i + 1)) % divisor1
          == remainder) // ck if this equals the mod value
      {
        // Hash all the values in the range (documentStart,current(i))
        // Remember we only want to hash the original VALUES from the array that contains the
        // original
        // content of the file. Not the hash values in the md5Hash Array
        for (int j = documentStart; j <= i; ++j) {
          builder.append(array[j]); // store everything upto the current value
        }
        String original = builder.toString();
        // if the string is a perfect match ( hash and original string)
        if (HashClass.is_string_match(original, table)) // iinsert the hash in the table)
        coverage += i - documentStart + 1; // this is the amount of bytes we saved
        documentStart = i + 1; // set this as the beginning of the new boundary
        backUpBreakPoint = -1; // reset this
        secondBackUpBreakPoint = -1;
        numOfPieces++; // increment the num of pieces
        i = i + (int) minBoundary - 1; // skip all the way here
        builder.setLength(0); // reset the stringBuilder for the next round
      } else if ((md5Hashes.get(i - 1) + md5Hashes.get(i) + md5Hashes.get(i + 1)) % divisor2
          == remainder) { //  check if this is the backup point
        backUpBreakPoint = i; // this is the backup breakpoint
      } else if ((md5Hashes.get(i - 1) + md5Hashes.get(i) + md5Hashes.get(i + 1)) % divisor2
          == remainder) {
        secondBackUpBreakPoint = i; // set the second backup point
      }
      if ((i - documentStart + 1) >= maxBoundary) { // we have reached the maximum
        // ck if we have a backUpbreakpoint
        int point;
        if (backUpBreakPoint != -1) // if we do, set this as the boundary
        point = backUpBreakPoint;
        else if (secondBackUpBreakPoint != -1) {
          point = secondBackUpBreakPoint; // if we don't have a first break point, find the second
        } else point = i; // else this current value of i is the breakpoint

        // Hash all the values in the range (documentStart,current(i))
        // Remember we only want to hash the original VALUES from the array that contains the
        // original
        // content of the file. Not the hash values in the md5Hash Array
        for (int j = documentStart; j <= point; ++j) {
          builder.append(array[j]); // store everything upto the current value
        }
        String original = builder.toString();
        if (HashClass.is_string_match(original, table))
          coverage += point - documentStart + 1; // this is the amount of bytes we saved
        numOfPieces++; // increment the num of pieces
        documentStart = point + 1; // set this as the beginning of the new boundary
        backUpBreakPoint = -1; // reset this
        secondBackUpBreakPoint = -1; // reset the secondBackUp point
        i = point + (int) minBoundary - 1; // skip all the way here ;
        builder.setLength(0); // reset the stringBuilder for the next round
      }
    } // end of the for loop

    // -------------------------------------------------------------------------------------------
    //  we are missing the last boundary, so hash that last value
    //	We will also check against our values of the strings we already have, and if we encountered
    // this
    //	already, then we will simply increment the counter, otherwise we will insert it in the
    // hashtable
    //	and increase our miss counter
    // ----------------------------------------------------------------------------------------------
    for (int j = documentStart; j < array.length; ++j) {
      builder.append(array[j]);
    }
    // only compute hash and insert into our hashtable only if the string buffer isn't empty
    String original = builder.toString();
    if (HashClass.is_string_match(original, table))
      coverage += array.length - documentStart; // this is the amount of bytes we saved
    numOfPieces++; // increment the num of pieces
  } // end of the method
Example #28
0
  /* -------------------------------------------------------------------------------------------------------
  This method:
  	--	Takes in three paramters:
  		1. array - this is the byte array that actually holds the document contents
  		2. md5Hases - holds the entire hash values of the document
  		3. Divisor1/Divisor2 - main and back up divisor
  		5. The remainder we are looking for
  		6/7. min/max boundaries
  		8. blockFreq - store the block sizes and how many time's they occur

  	-- We are simply choping up the first file
  -------------------------------------------------------------------------------------------------------- */
  private static int chopDocument(
      byte[] array,
      ArrayList<Long> md5Hashes,
      long divisor1,
      long divisor2,
      long divisor3,
      long remainder,
      long minBoundary,
      long maxBoundary,
      HashMap<Integer, Integer> blockFreq) {

    int documentStart = 0; // used to keep track of where the boundaries are
    boolean match = false; // used to ck if we encountered a match
    int backUpBreakPoint = -1; // used to store the backup breakpoint
    int secondBackUpBreakPoint = -1; // this is the second backup point with the divisor3
    StringBuilder builder = new StringBuilder();
    int counter = 0;
    int i = documentStart + (int) minBoundary - 1; // so we start at the minimum
    // loop through all the values in the document
    for (; i < md5Hashes.size(); ++i) {

      if (md5Hashes.get(i) % divisor1 == remainder) // ck if this equals the mod value
      {
        int size = i - documentStart + 1; // we only care about the size
        if (blockFreq.get(size) == null) // if not in there, then simply store it
        blockFreq.put(size, 1);
        else // increment it's integer count
        blockFreq.put(size, blockFreq.get(size) + 1); // increment the count
        counter++; // increment the block count
        documentStart = i + 1; // set this as the beginning of the new boundary
        backUpBreakPoint = -1; // reset this
        secondBackUpBreakPoint = -1; // second backup point reset it!
        i = i + (int) minBoundary - 1; // skip all the way here
      } else if (md5Hashes.get(i) % divisor2 == remainder) { //  check if this is the backup point
        backUpBreakPoint = i; // this is the backup breakpoint
      } else if (md5Hashes.get(i) % divisor3 == remainder) {
        secondBackUpBreakPoint = i; // we found a second backup point with divisor3
      }
      if ((i - documentStart + 1) >= maxBoundary) { // we have reached the maximum
        // ck if we have a backUpbreakpoint
        int point;
        if (backUpBreakPoint != -1) // if we do, set this as the boundary
        point = backUpBreakPoint;
        else if (secondBackUpBreakPoint != -1)
          point = secondBackUpBreakPoint; // if we don't have a first backup, ck if we have a second
        else point = i; // else this current value of i is the breakpoint

        // Hash all the values in the range (documentStart,point
        // Remember we only want to hash the original VALUES from the array that contains the
        // original
        // content of the file. Not the hash values in the md5Hash Array
        int size = point - documentStart + 1; // we only care about the size
        if (blockFreq.get(size) == null) // if not in there, then simply store it
        blockFreq.put(size, 1);
        else // increment it's integer count
        blockFreq.put(size, blockFreq.get(size) + 1); // increment the count
        counter++; // increment the block count
        documentStart = point + 1; // set this as the beginning of the new boundary
        backUpBreakPoint = -1; // reset this
        secondBackUpBreakPoint = -1; // reset second backup break point
        i = point + (int) minBoundary - 1; // so we start at the minimum
      }
    } // end of the for loop

    // -------------------------------------------------------------------------------------------
    //  we are missing the last boundary, so hash that last value
    //	We will also check against our values of the strings we already have, and if we encountered
    // this
    //	already, then we will simply increment the counter, otherwise we will insert it in the
    // hashtable
    //	and increase our miss counter
    // ----------------------------------------------------------------------------------------------
    int size = array.length - documentStart; // we start from the point
    if (blockFreq.get(size) == null) // if not in there, then simply store it
    blockFreq.put(size, 1);
    else // increment it's integer count
    blockFreq.put(size, blockFreq.get(size) + 1); // increment the count
    return ++counter; // increment the block count
  } // end of the method
Example #29
0
  /*
  	- This method is used has a helper method to run the algo for the archive dataset
  	- Note the archive set has multiple directories ( one for each url )
  	- So Read all of the directories in first and for each directory run the code
  */
  private static void runArchiveSet() throws Exception {

    System.out.println("Running TDDD archive");
    directory = "../../thesis-datasets/datasets2/";
    File file = new File(directory);
    String[] directory_list =
        file.list(
            new FilenameFilter() {
              @Override
              public boolean accept(File current, String name) {
                return new File(current, name).isDirectory(); // make sure its a directory
              }
            });

    int totalRuns = 0; // used to avg the runs in the end
    int total_iter_count =
        0; // this is used check how many times we will iterate through the data so we can make an
           // array of that size
    for (int i = startBoundary; i <= endBoundary; i += increment) total_iter_count++;

    // System.out.println(Arrays.toString(directory_list));
    int sets = 0;
    // make the arrays to hold the respecitve info for the different verions\
    // run it simulateounsly to speed the from the program!
    double[] block_size_list_last_year = new double[total_iter_count];
    double[] ratio_size_list_last_year = new double[total_iter_count];

    double[] block_size_list_six_month = new double[total_iter_count];
    double[] ratio_size_list__six_month = new double[total_iter_count];

    double[] block_size_list_two_year = new double[total_iter_count];
    double[] ratio_size_list_two_year = new double[total_iter_count];

    int current = 0;
    int six_month = 2;
    int last_year = 1;
    int two_year = 3;
    // loop through and run the cdc for each directory
    for (String dir : directory_list) {

      ReadFile.readFile(directory + dir, fileList); // read all the files in this directory
      preliminaryStep(directory + dir + "/"); // call the preliminaryStep on all the files

      totalRuns++;

      totalSize =
          fileArray.get(current)
              .length; // get the length of the file we will be running it against!

      // run it against six month
      startCDC(
          block_size_list_six_month,
          ratio_size_list__six_month,
          fileArray.get(current),
          fileArray.get(six_month),
          hashed_File_List.get(current),
          hashed_File_List.get(six_month));

      // run it against last year
      startCDC(
          block_size_list_last_year,
          ratio_size_list_last_year,
          fileArray.get(current),
          fileArray.get(last_year),
          hashed_File_List.get(current),
          hashed_File_List.get(last_year));

      // run it against 2
      startCDC(
          block_size_list_two_year,
          ratio_size_list_two_year,
          fileArray.get(current),
          fileArray.get(two_year),
          hashed_File_List.get(current),
          hashed_File_List.get(two_year));

      // // clear the fileList and hashed_file_list array
      fileArray.clear();
      hashed_File_List.clear();
      fileList.clear();

      // if (Double.isNaN(ratio_size_list[0])){
      // 	System.out.println(sets+" "+Arrays.toString(ratio_size_list));
      // 	test = true;
      // 	break;
      // }
      if (sets % 200 == 0) System.out.println(sets);
      ++sets;
    } // end of directory list for loop

    // now output the avged value for all the runs
    // System.out.println(Arrays.toString(ratio_size_list));
    System.out.println("Printing six_month");
    int index = 0;
    for (int i = startBoundary; i <= endBoundary; i += increment) {
      // avg out the outputs
      double blockSize = block_size_list_six_month[index] / (double) totalRuns;
      double ratio = ratio_size_list__six_month[index] / (double) totalRuns;
      System.out.println(i + " " + i / 2 + 1 + " " + i / 4 + 1 + " " + blockSize + " " + ratio);
      index++;
    }
    System.out.println("Printing last year");
    index = 0;
    for (int i = startBoundary; i <= endBoundary; i += increment) {
      double blockSize = block_size_list_last_year[index] / (double) totalRuns;
      double ratio = ratio_size_list_last_year[index] / (double) totalRuns;
      System.out.println(i + " " + blockSize + " " + ratio);
      index++;
    }

    System.out.println("Printing two year");
    index = 0;
    for (int i = startBoundary; i <= endBoundary; i += increment) {
      double blockSize = block_size_list_two_year[index] / (double) totalRuns;
      double ratio = ratio_size_list_two_year[index] / (double) totalRuns;
      System.out.println(i + " " + blockSize + " " + ratio);
      index++;
    }
  }
Example #30
0
  public byte[] ZipIt(ArrayList<ResultType> items, String[] algorithm, String[] filetype)
      throws IOException {

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ZipOutputStream zos = new ZipOutputStream(baos);
    BufferedInputStream bis;

    String[] files = new String[10000];

    File file;

    int bytesRead;

    byte[] buffer = new byte[1024];

    int j = 0;
    boolean flag;

    for (int i = 0; i < items.size(); i++) {

      ResultType g = (ResultType) items.get(i);

      String folder = g.get("genomeNames");

      file = new File("/storage/brcdownloads/patric2/genomes/" + folder);

      if (!file.exists()) {

        System.err.println("Skipping Folder: " + "/storage/brcdownloads/patric2/genomes/" + folder);
        continue;

      } else {
        for (int k = 0; k < algorithm.length; k++) {
          for (int m = 0; m < filetype.length; m++) {

            flag = false;

            file =
                new File(
                    "/storage/brcdownloads/patric2/genomes/"
                        + folder
                        + "/"
                        + folder
                        + (filetype[m].equals(".fna") ? "" : algorithm[k])
                        + filetype[m]);

            if (!file.exists()) {
              System.err.println("Skipping File: " + file.getAbsolutePath());
            } else {
              // System.out.println("File: " + file.getAbsolutePath());
              for (int z = 0; z < files.length; z++) {
                if (files[z] != null && files[z].equals(file.getAbsolutePath())) {
                  flag = true;
                  break;
                }
              }
              // System.out.println(flag);
              if (!flag) {
                files[j] = file.getAbsolutePath();
                j++;
              }
            }
          }
        }
      }
    }
    // System.out.println(j);
    if (j > 0) {
      for (int i = 0; i < j; i++) {
        file = new File(files[i]);
        bis = new BufferedInputStream(new FileInputStream(file));
        ZipEntry entry = new ZipEntry(file.getName());
        zos.putNextEntry(entry);
        while ((bytesRead = bis.read(buffer)) != -1) {
          zos.write(buffer, 0, bytesRead);
        }
        bis.close();
        zos.closeEntry();
      }
      zos.close();
    }

    return baos.toByteArray();
  }