Exemple #1
0
 public StringBuilder toString(StringBuilder sb, String[] fs, long idx) {
   Vec vecs[] = vecs();
   for (int c = 0; c < fs.length; c++) {
     Vec vec = vecs[c];
     if (vec.isEnum()) {
       String s = "----------";
       if (!vec.isNA(idx)) {
         int x = (int) vec.at8(idx);
         if (x >= 0 && x < vec._domain.length) s = vec._domain[x];
       }
       sb.append(String.format(fs[c], s));
     } else if (vec.isInt()) {
       if (vec.isNA(idx)) {
         Chunk C = vec.elem2BV(0); // 1st Chunk
         int len = C.pformat_len0(); // Printable width
         for (int i = 0; i < len; i++) sb.append('-');
       } else {
         try {
           sb.append(String.format(fs[c], vec.at8(idx)));
         } catch (IllegalFormatException ife) {
           System.out.println("Format: " + fs[c] + " col=" + c + " not for ints");
           ife.printStackTrace();
         }
       }
     } else {
       sb.append(String.format(fs[c], vec.at(idx)));
       if (vec.isNA(idx)) sb.append(' ');
     }
     sb.append(' '); // Column seperator
   }
   sb.append('\n');
   return sb;
 }
  Vec vec_by_mouse(int x, int y) {
    Vec ans = new Vec();
    ans.x = (x - dim.width / 2.) / size;
    ans.y = -(y - dim.height / 2.) / size;

    return ans;
  }
Exemple #3
0
 @Override
 public void map(Chunk[] ix, NewChunk[] ncs) {
   final Vec[] vecs = new Vec[_cols.length];
   final Vec anyv = _base.anyVec();
   final long nrow = anyv.length();
   long r = ix[0].at80(0);
   int last_ci = anyv.elem2ChunkIdx(r < nrow ? r : 0); // memoize the last chunk index
   long last_c0 = anyv._espc[last_ci]; // ...         last chunk start
   long last_c1 = anyv._espc[last_ci + 1]; // ...         last chunk end
   Chunk[] last_cs = new Chunk[vecs.length]; // ...         last chunks
   for (int c = 0; c < _cols.length; c++) {
     vecs[c] = _base.vecs()[_cols[c]];
     last_cs[c] = vecs[c].elem2BV(last_ci);
   }
   for (int i = 0; i < ix[0]._len; i++) {
     // select one row
     r = ix[0].at80(i) - 1; // next row to select
     if (r < 0) continue;
     if (r >= nrow) {
       for (int c = 0; c < vecs.length; c++) ncs[c].addNum(Double.NaN);
     } else {
       if (r < last_c0 || r >= last_c1) {
         last_ci = anyv.elem2ChunkIdx(r);
         last_c0 = anyv._espc[last_ci];
         last_c1 = anyv._espc[last_ci + 1];
         for (int c = 0; c < vecs.length; c++) last_cs[c] = vecs[c].elem2BV(last_ci);
       }
       for (int c = 0; c < vecs.length; c++) ncs[c].addNum(last_cs[c].at(r));
     }
   }
 }
 @Override
 void computeTangent(KeyFrame prev, KeyFrame next) {
   tgPVec = Vec.multiply(Vec.subtract(next.position(), prev.position()), 0.5f);
   tgQuat =
       Quat.squadTangent(
           (Quat) prev.orientation(), (Quat) orientation(), (Quat) next.orientation());
 }
 Vec wall_power(Ball p) {
   Vec ans = new Vec();
   is_colide = false;
   ans.x = wall_power2(p.pos.x);
   ans.y = wall_power2(p.pos.y);
   if (is_colide) ans.sub_to(p.speed.mult(10));
   return ans;
 }
Exemple #6
0
 public Vec replace(int col, Vec nv) {
   assert col < _names.length;
   Vec rv = vecs()[col];
   assert rv.group().equals(nv.group());
   _vecs[col] = nv;
   _keys[col] = nv._key;
   if (DKV.get(nv._key) == null) // If not already in KV, put it there
   DKV.put(nv._key, nv);
   return rv;
 }
Exemple #7
0
 /**
  * Check that the vectors are all compatible. All Vecs have their content sharded using same
  * number of rows per chunk.
  */
 public void checkCompatible() {
   try {
     Vec v0 = anyVec();
     int nchunks = v0.nChunks();
     for (Vec vec : vecs()) {
       if (vec instanceof AppendableVec) continue; // New Vectors are endlessly compatible
       if (vec.nChunks() != nchunks)
         throw new IllegalArgumentException(
             "Vectors different numbers of chunks, " + nchunks + " and " + vec.nChunks());
     }
     // Also check each chunk has same rows
     for (int i = 0; i < nchunks; i++) {
       long es = v0.chunk2StartElem(i);
       for (Vec vec : vecs())
         if (!(vec instanceof AppendableVec) && vec.chunk2StartElem(i) != es)
           throw new IllegalArgumentException(
               "Vector chunks different numbers of rows, "
                   + es
                   + " and "
                   + vec.chunk2StartElem(i));
     }
   } catch (Throwable ex) {
     Throwables.propagate(ex);
   }
 }
  Vec calc_collide_power(Ball p1, Ball p2, double dist) {
    if (dist > radius * 2) return new Vec(); // Friday, August 29, 2008 15:26:33: stickiness bug fix
    Vec speed_diff = p2.speed.sub(p1.speed);
    double force = 1000 * (dist - radius * 2);
    Vec npos1 = p1.pos.div(dist); // normalized
    Vec npos2 = p2.pos.div(dist);
    force += 10 * speed_diff.dot_mult(npos2.sub(npos1));
    Vec ans = npos2.sub(npos1).mult(force);
    return ans;
    /*    colide_power_x=force*(x2-x1)/dist;
    colide_power_y=force*(y2-y1)/dist;*/

  }
  void animate() {
    dim = getSize();
    size = (int) (Math.min(dim.height, dim.width) / 2.2);
    timer.tell_time();
    if (timer.time_diff == 0) return; // not enought time has passed, dont animate-crach fix
    dragged_speed = dragged_vec.sub(last_dragged_vec).div(timer.time_diff);
    last_dragged_vec = dragged_vec;
    if (dragged_ball != -1) {
      balls.get2(dragged_ball).pos = dragged_vec.add(find_offset).trim(-1, 1);
      balls.get2(dragged_ball).speed = dragged_speed;
    }

    balls = new WorldAnimate().calc_new_frame(balls, springs, RADIUS, timer);
  }
Exemple #10
0
 public Frame(String[] names, Vec[] vecs) {
   // assert names==null || names.length == vecs.length : "Number of columns does not match to
   // number of cols' names.";
   _names = names;
   _vecs = vecs;
   _keys = new Key[vecs.length];
   for (int i = 0; i < vecs.length; i++) {
     Key k = _keys[i] = vecs[i]._key;
     if (DKV.get(k) == null) // If not already in KV, put it there
     DKV.put(k, vecs[i]);
   }
   Vec v0 = anyVec();
   if (v0 == null) return;
   VectorGroup grp = v0.group();
   for (int i = 0; i < vecs.length; i++) assert grp.equals(vecs[i].group());
 }
  Vec calc_spring_power(Ball p1, Ball p2) {
    double dist = p1.pos.calc_dist(p2.pos);
    // if (abs(dist-STRING_LEN)<.1)
    //	return;
    Vec speed_diff = p2.speed.sub(p1.speed);
    double force = 1000 * (dist - STRING_LEN);
    Vec npos1 = p1.pos.div(dist); // normalized
    Vec npos2 = p2.pos.div(dist);
    force += 100 * speed_diff.dot_mult(npos2.sub(npos1));
    Vec ans = npos2.sub(npos1).mult(force);
    return ans;
    // force*=force;
    //    colide_power_x=(x2-x1)/dist*force;
    //   colide_power_y=(y2-y1)/dist*force;

  }
Exemple #12
0
  @Override
  Val apply(Env env, Env.StackHelp stk, AST asts[]) {

    // Compute the variable args.  Find the common row count
    Val vals[] = new Val[asts.length];
    Vec vec = null;
    for (int i = 1; i < asts.length; i++) {
      vals[i] = stk.track(asts[i].exec(env));
      if (vals[i].isFrame()) {
        Vec anyvec = vals[i].getFrame().anyVec();
        if (anyvec == null) continue; // Ignore the empty frame
        if (vec == null) vec = anyvec;
        else if (vec.length() != anyvec.length())
          throw new IllegalArgumentException(
              "cbind frames must have all the same rows, found "
                  + vec.length()
                  + " and "
                  + anyvec.length()
                  + " rows.");
      }
    }
    boolean clean = false;
    if (vec == null) {
      vec = Vec.makeZero(1);
      clean = true;
    } // Default to length 1

    // Populate the new Frame
    Frame fr = new Frame();
    for (int i = 1; i < asts.length; i++) {
      switch (vals[i].type()) {
        case Val.FRM:
          fr.add(fr.makeCompatible(vals[i].getFrame()));
          break;
        case Val.FUN:
          throw H2O.unimpl();
        case Val.STR:
          throw H2O.unimpl();
        case Val.NUM:
          // Auto-expand scalars to fill every row
          double d = vals[i].getNum();
          fr.add(Double.toString(d), vec.makeCon(d));
          break;
        default:
          throw H2O.unimpl();
      }
    }
    if (clean) vec.remove();

    return new ValFrame(fr);
  }
Exemple #13
0
 /** Appends a named column, keeping the last Vec as the response */
 public void add(String name, Vec vec) {
   assert _vecs.length == 0 || anyVec().group().equals(vec.group());
   final int len = _names.length;
   _names = Arrays.copyOf(_names, len + 1);
   _vecs = Arrays.copyOf(_vecs, len + 1);
   _keys = Arrays.copyOf(_keys, len + 1);
   _names[len] = name;
   _vecs[len] = vec;
   _keys[len] = vec._key;
 }
Exemple #14
0
  private void floodFillSimilarClusters(
      int unitpos, int[] clusterassoc, double[][] lkvalues, int[][] neighbors, int startunitpos) {
    // check if already associated to other cluster (end of recursion)
    if (clusterassoc[unitpos] != unitpos && clusterassoc[unitpos] != startunitpos) return;

    Vector<Integer> joinedunits = new Vector<Integer>();
    // check neighbors (starts with index 1) for join candidates
    for (int i = 1; i < neighbors[unitpos].length; i++) {
      if (neighbors[unitpos][i] != -1
          && // on the som
          clusterassoc[neighbors[unitpos][i]] != startunitpos
          && // not already in cluster
          clusterassoc[neighbors[unitpos][i]]
              == neighbors[unitpos][i]) { // not part of any other cluster

        // join two units iff summed up vector differs below threshold from both original vectors
        double[] cosNormA = Vec.cosineNormalize(Vec.cloneVector(lkvalues[unitpos]));
        double[] cosNormB = Vec.cosineNormalize(Vec.cloneVector(lkvalues[neighbors[unitpos][i]]));
        double[] sumAB = Vec.add(lkvalues[unitpos], lkvalues[neighbors[unitpos][i]]);
        double[] cosNormAB = Vec.cosineNormalize(Vec.cloneVector(sumAB));

        if (Vec.euclDist(cosNormA, cosNormAB) < 0.6 && Vec.euclDist(cosNormB, cosNormAB) < 0.6) {
          // similar units -> join
          clusterassoc[neighbors[unitpos][i]] = startunitpos;

          // write new vector to all associated
          for (int k = 0; k < clusterassoc.length; k++) {
            if (clusterassoc[k] == startunitpos) {
              lkvalues[k] = sumAB;
            }
          }
          //					Vec.copyTo(lkvalues[unitpos], sumAB);
          //					Vec.copyTo(lkvalues[neighbors[unitpos][i]], sumAB);

          // add to list for recursion
          joinedunits.addElement(new Integer(neighbors[unitpos][i]));
        }
      }
    }
    // recursion over joined units
    Iterator<Integer> iit = joinedunits.iterator();
    while (iit.hasNext()) {
      floodFillSimilarClusters(
          iit.next().intValue(), clusterassoc, lkvalues, neighbors, startunitpos);
    }
  }
Exemple #15
0
 @Override
 ValFrame apply(Env env, Env.StackHelp stk, AST asts[]) {
   Frame fr = stk.track(asts[1].exec(env)).getFrame();
   double frac = asts[2].exec(env).getNum();
   double nrow = fr.numRows() * frac;
   Vec vecs[] = fr.vecs();
   long[] idxs = new long[fr.numCols()];
   int j = 0;
   for (int i = 0; i < idxs.length; i++) if (vecs[i].naCnt() < nrow) idxs[j++] = i;
   Vec vec = Vec.makeVec(Arrays.copyOf(idxs, j), null, Vec.VectorGroup.VG_LEN1.addVec());
   return new ValFrame(new Frame(vec));
 }
Exemple #16
0
 @Override
 public String toString() {
   // Across
   Vec vecs[] = vecs();
   if (vecs.length == 0) return "{}";
   String s = "{" + _names[0];
   long bs = vecs[0].byteSize();
   for (int i = 1; i < vecs.length; i++) {
     s += "," + _names[i];
     bs += vecs[i].byteSize();
   }
   s += "}, " + PrettyPrint.bytes(bs) + "\n";
   // Down
   Vec v0 = anyVec();
   if (v0 == null) return s;
   int nc = v0.nChunks();
   s += "Chunk starts: {";
   for (int i = 0; i < nc; i++) s += v0.elem2BV(i)._start + ",";
   s += "}";
   return s;
 }
 protected void updateSplineCache() {
   Vec deltaP =
       Vec.subtract(
           keyFrameList.get(currentFrame2.nextIndex()).position(),
           keyFrameList.get(currentFrame1.nextIndex()).position());
   pv1 =
       Vec.add(
           Vec.multiply(deltaP, 3.0f),
           Vec.multiply(keyFrameList.get(currentFrame1.nextIndex()).tgP(), (-2.0f)));
   pv1 = Vec.subtract(pv1, keyFrameList.get(currentFrame2.nextIndex()).tgP());
   pv2 = Vec.add(Vec.multiply(deltaP, (-2.0f)), keyFrameList.get(currentFrame1.nextIndex()).tgP());
   pv2 = Vec.add(pv2, keyFrameList.get(currentFrame2.nextIndex()).tgP());
   splineCacheIsValid = true;
 }
 int find_ball(Vec v) {
   int num_balls = balls.size();
   for (int i = 0; i < num_balls; i++) {
     Ball p = balls.get2(i);
     double dist = v.calc_dist(p.pos);
     if (dist < RADIUS) {
       //		    printf("found ball %d\n",i);
       find_offset = p.pos.sub(dragged_vec);
       // last_dragged_vec=v;
       return i;
     }
   }
   return -1;
 }
  /**
   * Interpolate {@link #frame()} at time {@code time} (expressed in seconds). {@link
   * #interpolationTime()} is set to {@code time} and {@link #frame()} is set accordingly.
   *
   * <p>If you simply want to change {@link #interpolationTime()} but not the {@link #frame()}
   * state, use {@link #setInterpolationTime(float)} instead.
   */
  public void interpolateAtTime(float time) {
    this.checkValidity();
    setInterpolationTime(time);

    if ((keyFrameList.isEmpty()) || (frame() == null)) return;

    if (!valuesAreValid) updateModifiedFrameValues();

    updateCurrentKeyFrameForTime(time);

    if (!splineCacheIsValid) updateSplineCache();

    float alpha;
    float dt =
        keyFrameList.get(currentFrame2.nextIndex()).time()
            - keyFrameList.get(currentFrame1.nextIndex()).time();
    if (Util.zero(dt)) alpha = 0.0f;
    else alpha = (time - keyFrameList.get(currentFrame1.nextIndex()).time()) / dt;

    Vec pos =
        Vec.add(
            keyFrameList.get(currentFrame1.nextIndex()).position(),
            Vec.multiply(
                Vec.add(
                    keyFrameList.get(currentFrame1.nextIndex()).tgP(),
                    Vec.multiply(Vec.add(pv1, Vec.multiply(pv2, alpha)), alpha)),
                alpha));

    float mag =
        Util.lerp(
            keyFrameList.get(currentFrame1.nextIndex()).magnitude(),
            keyFrameList.get(currentFrame2.nextIndex()).magnitude(),
            alpha);

    Rotation q;
    if (gScene.is3D()) {
      q =
          Quat.squad(
              (Quat) keyFrameList.get(currentFrame1.nextIndex()).orientation(),
              ((KeyFrame3D) keyFrameList.get(currentFrame1.nextIndex())).tgQ(),
              ((KeyFrame3D) keyFrameList.get(currentFrame2.nextIndex())).tgQ(),
              (Quat) keyFrameList.get(currentFrame2.nextIndex()).orientation(),
              alpha);
    } else {
      q =
          new Rot(
              Util.lerp(
                  keyFrameList.get(currentFrame1.nextIndex()).orientation().angle(),
                  keyFrameList.get(currentFrame2.nextIndex()).orientation().angle(),
                  (alpha)));
    }

    frame().setPositionWithConstraint(pos);
    frame().setRotationWithConstraint(q);
    frame().setMagnitude(mag);
  }
Exemple #20
0
 @Override
 public void map(Chunk cs) {
   int idx = _chunkOffset + cs.cidx();
   Key ckey = Vec.chunkKey(_v._key, idx);
   if (_cmap != null) {
     assert !cs.hasFloat()
         : "Input chunk (" + cs.getClass() + ") has float, but is expected to be categorical";
     NewChunk nc = new NewChunk(_v, idx);
     // loop over rows and update ints for new domain mapping according to vecs[c].domain()
     for (int r = 0; r < cs._len; ++r) {
       if (cs.isNA(r)) nc.addNA();
       else nc.addNum(_cmap[(int) cs.at8(r)], 0);
     }
     nc.close(_fs);
   } else {
     DKV.put(ckey, cs.deepCopy(), _fs, true);
   }
 }
 Vec mult(double scalar) {
   Vec ans = new Vec();
   ans.x = x * scalar;
   ans.y = y * scalar;
   return ans;
 }
 Vec sub(Vec right) {
   Vec ans = new Vec();
   ans.x = x - right.x;
   ans.y = y - right.y;
   return ans;
 }
 Vec add(Vec right) {
   Vec ans = new Vec();
   ans.x = x + right.x;
   ans.y = y + right.y;
   return ans;
 }
 Vec div(double a) {
   Vec ans = new Vec();
   ans.x = x / a;
   ans.y = y / a;
   return ans;
 }
Exemple #25
0
 public int find(Vec vec) {
   for (int i = 0; i < _vecs.length; i++) if (vec.equals(_vecs[i])) return i;
   return -1;
 }
Exemple #26
0
 /** Returns the first readable vector. */
 public Vec anyVec() {
   if (_col0 != null) return _col0;
   for (Vec v : vecs()) if (v.readable()) return (_col0 = v);
   return null;
 }
Exemple #27
0
  @Override
  Val apply(Env env, Env.StackHelp stk, AST asts[]) {

    // Execute all args.  Find a canonical frame; all Frames must look like this one.
    // Each argument turns into either a Frame (whose rows are entirely
    // inlined) or a scalar (which is replicated across as a single row).
    Frame fr = null; // Canonical Frame; all frames have the same column count, types and names
    int nchks = 0; // Total chunks
    Val vals[] = new Val[asts.length]; // Computed AST results
    for (int i = 1; i < asts.length; i++) {
      vals[i] = stk.track(asts[i].exec(env));
      if (vals[i].isFrame()) {
        fr = vals[i].getFrame();
        nchks += fr.anyVec().nChunks(); // Total chunks
      } else nchks++; // One chunk per scalar
    }
    // No Frame, just a pile-o-scalars?
    Vec zz = null; // The zero-length vec for the zero-frame frame
    if (fr == null) { // Zero-length, 1-column, default name
      fr = new Frame(new String[] {Frame.defaultColName(0)}, new Vec[] {zz = Vec.makeZero(0)});
      if (asts.length == 1) return new ValFrame(fr);
    }

    // Verify all Frames are the same columns, names, and types.  Domains can vary, and will be the
    // union
    final Frame frs[] = new Frame[asts.length]; // Input frame
    final byte[] types = fr.types(); // Column types
    final int ncols = fr.numCols();
    final long[] espc = new long[nchks + 1]; // Compute a new layout!
    int coffset = 0;

    for (int i = 1; i < asts.length; i++) {
      Val val = vals[i]; // Save values computed for pass 2
      Frame fr0 =
          val.isFrame()
              ? val.getFrame()
              // Scalar: auto-expand into a 1-row frame
              : stk.track(new Frame(fr._names, Vec.makeCons(val.getNum(), 1L, fr.numCols())));

      // Check that all frames are compatible
      if (fr.numCols() != fr0.numCols())
        throw new IllegalArgumentException(
            "rbind frames must have all the same columns, found "
                + fr.numCols()
                + " and "
                + fr0.numCols()
                + " columns.");
      if (!Arrays.deepEquals(fr._names, fr0._names))
        throw new IllegalArgumentException(
            "rbind frames must have all the same column names, found "
                + Arrays.toString(fr._names)
                + " and "
                + Arrays.toString(fr0._names));
      if (!Arrays.equals(types, fr0.types()))
        throw new IllegalArgumentException(
            "rbind frames must have all the same column types, found "
                + Arrays.toString(types)
                + " and "
                + Arrays.toString(fr0.types()));

      frs[i] = fr0; // Save frame

      // Roll up the ESPC row counts
      long roffset = espc[coffset];
      long[] espc2 = fr0.anyVec().espc();
      for (int j = 1; j < espc2.length; j++) // Roll up the row counts
      espc[coffset + j] = (roffset + espc2[j]);
      coffset += espc2.length - 1; // Chunk offset
    }
    if (zz != null) zz.remove();

    // build up the new domains for each vec
    HashMap<String, Integer>[] dmap = new HashMap[types.length];
    String[][] domains = new String[types.length][];
    int[][][] cmaps = new int[types.length][][];
    for (int k = 0; k < types.length; ++k) {
      dmap[k] = new HashMap<>();
      int c = 0;
      byte t = types[k];
      if (t == Vec.T_CAT) {
        int[][] maps = new int[frs.length][];
        for (int i = 1; i < frs.length; i++) {
          maps[i] = new int[frs[i].vec(k).domain().length];
          for (int j = 0; j < maps[i].length; j++) {
            String s = frs[i].vec(k).domain()[j];
            if (!dmap[k].containsKey(s)) dmap[k].put(s, maps[i][j] = c++);
            else maps[i][j] = dmap[k].get(s);
          }
        }
        cmaps[k] = maps;
      } else {
        cmaps[k] = new int[frs.length][];
      }
      domains[k] = c == 0 ? null : new String[c];
      for (Map.Entry<String, Integer> e : dmap[k].entrySet()) domains[k][e.getValue()] = e.getKey();
    }

    // Now make Keys for the new Vecs
    Key<Vec>[] keys = fr.anyVec().group().addVecs(fr.numCols());
    Vec[] vecs = new Vec[fr.numCols()];
    int rowLayout = Vec.ESPC.rowLayout(keys[0], espc);
    for (int i = 0; i < vecs.length; i++)
      vecs[i] = new Vec(keys[i], rowLayout, domains[i], types[i]);

    // Do the row-binds column-by-column.
    // Switch to F/J thread for continuations
    ParallelRbinds t;
    H2O.submitTask(t = new ParallelRbinds(frs, espc, vecs, cmaps)).join();
    return new ValFrame(new Frame(fr.names(), t._vecs));
  }
Exemple #28
0
  public Frame deepSlice(Object orows, Object ocols) {
    // ocols is either a long[] or a Frame-of-1-Vec
    long[] cols;
    if (ocols == null) {
      cols = (long[]) ocols;
      assert cols == null;
    } else {
      if (ocols instanceof long[]) {
        cols = (long[]) ocols;
      } else if (ocols instanceof Frame) {
        Frame fr = (Frame) ocols;
        if (fr.numCols() != 1) {
          throw new IllegalArgumentException(
              "Columns Frame must have only one column (actually has "
                  + fr.numCols()
                  + " columns)");
        }

        long n = fr.anyVec().length();
        if (n > MAX_EQ2_COLS) {
          throw new IllegalArgumentException(
              "Too many requested columns (requested " + n + ", max " + MAX_EQ2_COLS + ")");
        }

        cols = new long[(int) n];
        Vec v = fr._vecs[0];
        for (long i = 0; i < v.length(); i++) {
          cols[(int) i] = v.at8(i);
        }
      } else {
        throw new IllegalArgumentException(
            "Columns is specified by an unsupported data type ("
                + ocols.getClass().getName()
                + ")");
      }
    }

    // Since cols is probably short convert to a positive list.
    int c2[] = null;
    if (cols == null) {
      c2 = new int[numCols()];
      for (int i = 0; i < c2.length; i++) c2[i] = i;
    } else if (cols.length == 0) {
      c2 = new int[0];
    } else if (cols[0] > 0) {
      c2 = new int[cols.length];
      for (int i = 0; i < cols.length; i++)
        c2[i] = (int) cols[i] - 1; // Convert 1-based cols to zero-based
    } else {
      c2 = new int[numCols() - cols.length];
      int j = 0;
      for (int i = 0; i < numCols(); i++) {
        if (j >= cols.length || i < (-cols[j] - 1)) c2[i - j] = i;
        else j++;
      }
    }
    for (int i = 0; i < c2.length; i++)
      if (c2[i] >= numCols())
        throw new IllegalArgumentException(
            "Trying to select column " + c2[i] + " but only " + numCols() + " present.");
    if (c2.length == 0)
      throw new IllegalArgumentException(
          "No columns selected (did you try to select column 0 instead of column 1?)");

    // Do Da Slice
    // orows is either a long[] or a Vec
    if (orows == null)
      return new DeepSlice((long[]) orows, c2)
          .doAll(c2.length, this)
          .outputFrame(names(c2), domains(c2));
    else if (orows instanceof long[]) {
      final long CHK_ROWS = 1000000;
      long[] rows = (long[]) orows;
      if (rows.length == 0)
        return new DeepSlice(rows, c2).doAll(c2.length, this).outputFrame(names(c2), domains(c2));
      if (rows[0] < 0)
        return new DeepSlice(rows, c2).doAll(c2.length, this).outputFrame(names(c2), domains(c2));
      // Vec'ize the index array
      AppendableVec av = new AppendableVec("rownames");
      int r = 0;
      int c = 0;
      while (r < rows.length) {
        NewChunk nc = new NewChunk(av, c);
        long end = Math.min(r + CHK_ROWS, rows.length);
        for (; r < end; r++) {
          nc.addNum(rows[r]);
        }
        nc.close(c++, null);
      }
      Vec c0 = av.close(null); // c0 is the row index vec
      Frame fr2 =
          new Slice(c2, this)
              .doAll(c2.length, new Frame(new String[] {"rownames"}, new Vec[] {c0}))
              .outputFrame(names(c2), domains(c2));
      UKV.remove(c0._key); // Remove hidden vector
      return fr2;
    }
    Frame frows = (Frame) orows;
    Vec vrows = frows.anyVec();
    // It's a compatible Vec; use it as boolean selector.
    // Build column names for the result.
    Vec[] vecs = new Vec[c2.length + 1];
    String[] names = new String[c2.length + 1];
    for (int i = 0; i < c2.length; ++i) {
      vecs[i] = _vecs[c2[i]];
      names[i] = _names[c2[i]];
    }
    vecs[c2.length] = vrows;
    names[c2.length] = "predicate";
    return new DeepSelect()
        .doAll(c2.length, new Frame(names, vecs))
        .outputFrame(names(c2), domains(c2));
  }
  /** Intenal use. Call {@link #checkValidity()} and if path is not valid recomputes it. */
  protected void updatePath() {
    checkValidity();
    if (!pathIsValid) {
      path.clear();
      int nbSteps = 30;

      if (keyFrameList.isEmpty()) return;

      if (!valuesAreValid) updateModifiedFrameValues();

      if (keyFrameList.get(0) == keyFrameList.get(keyFrameList.size() - 1))
        // TODO experimenting really
        path.add(
            new Frame(
                keyFrameList.get(0).position(),
                keyFrameList.get(0).orientation(),
                keyFrameList.get(0).magnitude()));
      else {
        KeyFrame[] kf = new KeyFrame[4];
        kf[0] = keyFrameList.get(0);
        kf[1] = kf[0];

        int index = 1;
        kf[2] = (index < keyFrameList.size()) ? keyFrameList.get(index) : null;
        index++;
        kf[3] = (index < keyFrameList.size()) ? keyFrameList.get(index) : null;

        while (kf[2] != null) {
          Vec pdiff = Vec.subtract(kf[2].position(), kf[1].position());
          Vec pvec1 = Vec.add(Vec.multiply(pdiff, 3.0f), Vec.multiply(kf[1].tgP(), (-2.0f)));
          pvec1 = Vec.subtract(pvec1, kf[2].tgP());
          Vec pvec2 = Vec.add(Vec.multiply(pdiff, (-2.0f)), kf[1].tgP());
          pvec2 = Vec.add(pvec2, kf[2].tgP());

          for (int step = 0; step < nbSteps; ++step) {
            Frame frame = new Frame();
            float alpha = step / (float) nbSteps;
            frame.setPosition(
                Vec.add(
                    kf[1].position(),
                    Vec.multiply(
                        Vec.add(
                            kf[1].tgP(),
                            Vec.multiply(Vec.add(pvec1, Vec.multiply(pvec2, alpha)), alpha)),
                        alpha)));
            if (gScene.is3D()) {
              frame.setOrientation(
                  Quat.squad(
                      (Quat) kf[1].orientation(),
                      ((KeyFrame3D) kf[1]).tgQ(),
                      ((KeyFrame3D) kf[2]).tgQ(),
                      (Quat) kf[2].orientation(),
                      alpha));
            } else {
              // linear interpolation
              float start = kf[1].orientation().angle();
              float stop = kf[2].orientation().angle();
              frame.setOrientation(new Rot(start + (stop - start) * alpha));
            }
            frame.setMagnitude(Util.lerp(kf[1].magnitude(), kf[2].magnitude(), alpha));
            path.add(frame.get());
          }

          // Shift
          kf[0] = kf[1];
          kf[1] = kf[2];
          kf[2] = kf[3];

          index++;
          kf[3] = (index < keyFrameList.size()) ? keyFrameList.get(index) : null;
        }
        // Add last KeyFrame
        path.add(new Frame(kf[1].position(), kf[1].orientation(), kf[1].magnitude()));
      }
      pathIsValid = true;
    }
  }
Exemple #30
0
  public void threadEnded() {
    if (wtet != null && artterms == null) artterms = wtet.getArtistsAndTermVectors();

    int cols = som.getNumberOfColumns(), rows = som.getNumberOfRows();

    // use lagus-kaski labelling technique
    // generate summed tf vector per cluster
    int[][] clustervecs = new int[cols * rows][MusicDictionary.getDictionary().size()];
    int[] clustersize = new int[cols * rows];
    int[] vecsum = new int[MusicDictionary.getDictionary().size()];

    // for lagus kaski G2 determine r0 and r1 zone indices
    int[][] r0elements = new int[cols * rows][5];
    int[][] r1elements = new int[cols * rows][8];

    for (int i = 0; i < cols; i++) { // for each column in codebook
      for (int j = 0; j < rows; j++) { // for each row in codebook
        int mappos = i * som.getNumberOfRows() + j;
        // get Voronoi-Set for current map unit
        Vector temp = (Vector) som.voronoiSet.elementAt(mappos);
        clustersize[mappos] = temp.size();
        // get this set ordered
        temp = som.getPrototypesForMU(mappos, clustersize[mappos]);
        int[] clustterms = new int[MusicDictionary.getDictionary().size()];
        for (int k = 0; k < clustersize[mappos]; k++) {
          String artist = (String) temp.elementAt(k);
          int[] termvec = artterms.get(artist);
          if (termvec == null) {
            System.err.println("no term vector for artist " + artist);
            continue;
          }
          Vec.addTo(clustterms, termvec);
        }
        // remove all terms with tf < 3 in cluster
        for (int k = 0; k < clustterms.length; k++) {
          if (clustterms[k] < Math.min(3, clustersize[mappos])) clustterms[k] = 0;
        }
        Vec.addTo(vecsum, clustterms);

        //				System.out.println("total "+Stat.sum(clustterms)+" terms in cluster "+mappos);
        clustervecs[mappos] = clustterms; // Vec.divide(clustterms, (count>0)?count:1);

        // finally, determine r0 and r1 zone elements
        // for r0
        r0elements[mappos][0] = mappos;
        r0elements[mappos][1] = i > 0 ? (i - 1) * rows + j : -1;
        r0elements[mappos][2] = i + 1 < cols ? (i + 1) * rows + j : -1;
        r0elements[mappos][3] = j > 0 ? i * rows + j - 1 : -1;
        r0elements[mappos][4] = j + 1 < rows ? i * rows + j + 1 : -1;
        // for r1
        r1elements[mappos][0] = i > 1 ? (i - 2) * rows + j : -1;
        r1elements[mappos][1] = i + 2 < cols ? (i + 2) * rows + j : -1;
        r1elements[mappos][2] = j > 1 ? i * rows + j - 2 : -1;
        r1elements[mappos][3] = j + 2 < rows ? i * rows + j + 2 : -1;
        r1elements[mappos][4] = i > 0 && j > 0 ? (i - 1) * rows + j - 1 : -1;
        r1elements[mappos][5] = i + 1 < cols && j > 0 ? (i + 1) * rows + j - 1 : -1;
        r1elements[mappos][6] = i > 0 && j + 1 < rows ? (i - 1) * rows + j + 1 : -1;
        r1elements[mappos][7] = i + 1 < cols && j + 1 < rows ? (i + 1) * rows + j + 1 : -1;
      }
    }

    double[] summedterms = new double[MusicDictionary.getDictionary().size()];
    int[] clustertermsums = new int[cols * rows];
    for (int i = 0; i < clustervecs.length; i++) {
      clustertermsums[i] = Stat.sum(clustervecs[i]);
      //			for (int j=0; j<clustervecs[i].length; j++) {
      //				summedterms[j] += clustertermsums[i]>0?clustervecs[i][j]/clustertermsums[i]:0;
      //			}
      if (clustertermsums[i] == 0) continue;
      Vec.addTo(summedterms, Vec.divide(clustervecs[i], clustertermsums[i]));
    }
    double[][] lkvalues = new double[cols * rows][MusicDictionary.getDictionary().size()];
    // determine min and max laguskaski values for value normalization between 0 and 1
    double minlk = 0.01d;
    double maxlk = 0.;
    // for each term in each cluster
    for (int i = 0; i < clustervecs.length; i++) {

      // create r0 zone sum vector
      // modification to reflect number of entries
      double[] r0sum = new double[MusicDictionary.getDictionary().size()];
      for (int j = 0; j < r0elements[i].length; j++) {
        if (r0elements[i][j] == -1) continue;
        if (clustertermsums[r0elements[i][j]] == 0) continue;
        Vec.addTo(
            r0sum, Vec.divide(clustervecs[r0elements[i][j]], clustertermsums[r0elements[i][j]]));
      }
      // create non-r1 zone sum vector
      double[] nonr1sum = Vec.cloneVector(summedterms);
      for (int j = 0; j < r1elements[i].length; j++) {
        if (r1elements[i][j] == -1) continue;
        if (clustertermsums[r1elements[i][j]] == 0) continue;
        Vec.subtractFrom(
            nonr1sum, Vec.divide(clustervecs[r1elements[i][j]], clustertermsums[r1elements[i][j]]));
      }

      for (int j = 0; j < clustervecs[i].length; j++) {
        //				// lagus kaski G0
        //				double fclust = clustertermsums[i]>0?clustervecs[i][j]/clustertermsums[i]:0;
        //				double fpen = summedterms[j];

        // lagus kaski G2
        double fclust = r0sum[j];
        double fpen = nonr1sum[j];

        if (clustervecs[i][j] == 0 // only accept words, that were on the island before G2
            || fclust == 0.
            || fpen == 0.) continue;
        Double ftc = new Double(fclust * fclust / fpen);

        // find max for normalization
        if (ftc > maxlk) maxlk = ftc;

        // smooth -> only values with score >= minlk
        if (ftc >= minlk) lkvalues[i][j] = ftc;
      }
    }

    //		// calculate all pairwise distances matrix of cos norm vectors
    //		Vector<Double> dists = new Vector<Double>();
    //		for (int i=0; i<lkvalues.length-1; i++) {
    //			for (int j=i+1; j<lkvalues.length; j++) {
    //				double[] cosNormA = Vec.cosineNormalize(Vec.cloneVector(lkvalues[i]));
    //				double[] cosNormB = Vec.cosineNormalize(Vec.cloneVector(lkvalues[j]));
    //				dists.addElement(new Double(Vec.euclDist(cosNormA, cosNormB)));
    //			}
    //		}
    //		Collections.sort(dists);
    //		System.out.println("sorted pairwise distances of all clusters");
    //		Iterator<Double> dit = dists.iterator();
    //		while (dit.hasNext()) {
    //			System.out.println(dit.next());
    //		}

    // init cluster formation map -> every unit is its own cluster
    int[] clusterassociations = new int[cols * rows];
    for (int i = 0; i < clusterassociations.length; i++) {
      clusterassociations[i] = i;
    }

    // copy original lkvalues (for coloring later)
    double[][] origlkvalues = new double[cols * rows][MusicDictionary.getDictionary().size()];
    for (int i = 0; i < cols * rows; i++) {
      for (int j = 0; j < MusicDictionary.getDictionary().size(); j++) {
        origlkvalues[i][j] = lkvalues[i][j];
      }
    }

    // find coherent regions on SOM
    findUnitClusters(clusterassociations, lkvalues, r0elements);

    //		// print clusterassoc map
    //		System.out.println("clusterassociations");
    //		for (int j=0; j<rows; j++) {		// for each row in codebook
    //			for (int i=0; i<cols; i++) {		// for each column in codebook
    //				int mappos = i*som.getNumberOfRows()+j;
    //				System.out.print(clusterassociations[mappos]+" ");
    //			}
    //			System.out.println("");
    //		}

    Vector[] clusterterms = new Vector[cols * rows];
    for (int i = 0; i < lkvalues.length; i++) {
      clusterterms[i] = new Vector();
      for (int j = 0; j < lkvalues[i].length; j++) {
        if (lkvalues[i][j] > minlk) {
          ObjectComparablePair ocp =
              new ObjectComparablePair(
                  MusicDictionary.getDictionary().elementAt(j),
                  new Double(
                      Math.min(maxlk, lkvalues[i][j]))); // use old max as upper bound for all value
          clusterterms[i].addElement(ocp);
        }
        // if (lkvalues[i][j] > maxlk) maxlk = lkvalues[i][j];
      }
      Collections.sort(clusterterms[i]);
      Collections.reverse(clusterterms[i]);
    }

    Vector<Vector<String>> mdmlabels = new Vector<Vector<String>>();
    // calc normalized lagus kaski
    for (int i = 0; i < cols; i++) { // for each column in codebook
      for (int j = 0; j < rows; j++) { // for each row in codebook
        int mappos = i * som.getNumberOfRows() + j;

        Vector<String> unitterms = new Vector<String>();
        // get terms for current map unit
        for (int k = 0; k < clusterterms[mappos].size() && k < maxTermsPerUnit; k++) {
          ObjectComparablePair ocp = (ObjectComparablePair) (clusterterms[mappos].elementAt(k));
          double laguskaski = ((Double) (ocp.getComparable())).doubleValue();
          String word = (String) (ocp.getObject());
          if (k < minTermsPerUnit || laguskaski > minlk) {
            double normlk = (laguskaski - minlk) / (maxlk - minlk);
            String wordandval = word + "_" + normlk; // TextTool.doubleToString(laguskaski, 3)+")";
            unitterms.addElement(wordandval);
          }
        }
        mdmlabels.addElement(unitterms);
      }
    }

    som.setMDM(this);
    this.setLabels(mdmlabels);
    this.setClusterAssociations(clusterassociations);
    this.setNeighborhood(r0elements);

    if (colorByPCA) {
      Vector[] colorclusterterms = new Vector[cols * rows];
      for (int i = 0; i < origlkvalues.length; i++) {
        colorclusterterms[i] = new Vector();
        for (int j = 0; j < origlkvalues[i].length; j++) {
          if (origlkvalues[i][j] > minlk) {
            ObjectComparablePair ocp =
                new ObjectComparablePair(
                    MusicDictionary.getDictionary().elementAt(j),
                    new Double(
                        Math.min(
                            maxlk,
                            origlkvalues[i][j]))); // use old max as upper bound for all value
            colorclusterterms[i].addElement(ocp);
          }
        }
        Collections.sort(colorclusterterms[i]);
        Collections.reverse(colorclusterterms[i]);
      }

      HashSet<String> remainingwords = new HashSet<String>();
      Hashtable<String, Double>[] mdmvalues = new Hashtable[cols * rows];
      // calc normalized lagus kaski
      for (int i = 0; i < cols; i++) { // for each column in codebook
        for (int j = 0; j < rows; j++) { // for each row in codebook
          int mappos = i * som.getNumberOfRows() + j;

          mdmvalues[mappos] = new Hashtable<String, Double>();
          Vector<String> unitterms = new Vector<String>();
          // get terms for current map unit
          for (int k = 0; k < colorclusterterms[mappos].size() && k < maxTermsPerUnit; k++) {
            ObjectComparablePair ocp =
                (ObjectComparablePair) (colorclusterterms[mappos].elementAt(k));
            double laguskaski = ((Double) (ocp.getComparable())).doubleValue();
            String word = (String) (ocp.getObject());
            if (k < minTermsPerUnit || laguskaski > minlk) {
              double normlk = (laguskaski - minlk) / (maxlk - minlk);

              //							remainingwords.add(word);
              //							mdmvalues[mappos].put(word, new Double(laguskaski));
            }
            if (k < 4) {
              remainingwords.add(word);
              mdmvalues[mappos].put(word, new Double(laguskaski));
            }
          }
          mdmlabels.addElement(unitterms);
        }
      }

      //			// remove all words that never occur
      //			Vector<double[]> relevantDimensions = new Vector<double[]>();
      //			double[][] featuredims = new Matrix(lkvalues).transpose().getArray();
      //			for (int i=0; i<featuredims.length; i++) {
      //				if (Stat.sum(featuredims[i]) > 0.1) {
      //					relevantDimensions.addElement(featuredims[i]);
      //				}
      //			}
      //			double[][] reduceddims = new double[relevantDimensions.size()][lkvalues.length];
      //			for (int i=0; i<relevantDimensions.size(); i++) {
      //				reduceddims[i] = relevantDimensions.elementAt(i);
      //			}
      //			cellColors = PCAProjectionToColor.getColorsForFeatures(new
      // Matrix(reduceddims).transpose().getArray());
      //			---------------------------

      // construct new vocabulary vector from remaining words
      // ignore all empty vectors (discard cells w/o entries)
      String[] remainingvocab = remainingwords.toArray(new String[0]);
      Hashtable<Integer, double[]> reducedVectorMapping = new Hashtable<Integer, double[]>();
      int[] colorarraymapping = new int[cols * rows];
      int usefulfeats = 0;
      for (int i = 0; i < cols * rows; i++) {
        double[] nufeat = HashtableTool.getDoubleVectorRepresentation(mdmvalues[i], remainingvocab);
        if (Stat.max(nufeat) == 0.) {
          colorarraymapping[i] = -1;
        } else {
          reducedVectorMapping.put(new Integer(usefulfeats), nufeat);
          colorarraymapping[i] = usefulfeats;
          usefulfeats++;
        }
      }
      // recreate nufeature set
      double[][] nufeatures = new double[usefulfeats][remainingvocab.length];
      for (int i = 0; i < nufeatures.length; i++) {
        nufeatures[i] = Vec.cosineNormalize(reducedVectorMapping.get(new Integer(i)));
      }
      //			System.out.println(TextTool.toMatlabFormat(nufeatures));

      PCA pca = new PCA(nufeatures, 20);
      Color[] reducedSetCellColors =
          SammonsMappingToColor.getColorsForFeatures(pca.getPCATransformedDataAsDoubleArray());

      // recreate Color assignment
      cellColors = new Color[colorarraymapping.length];
      for (int i = 0; i < cellColors.length; i++) {
        if (colorarraymapping[i] == -1) cellColors[i] = Color.white;
        else cellColors[i] = reducedSetCellColors[colorarraymapping[i]];
      }

      //			cellColors = SammonsMappingToColor.getColorsForFeatures(new PCA(lkvalues,
      // 20).getPCATransformedDataAsDoubleArray());
    } else {
      cellColors = new Color[cols * rows];
    }

    for (Enumeration e = threadlisteners.elements(); e.hasMoreElements(); ) {
      ThreadListener tl = (ThreadListener) (e.nextElement());
      tl.threadEnded();
    }
  }