Пример #1
0
  /**
   * Extract (dense) rows from given chunks, one Vec at a time - should be slightly faster than
   * per-row
   *
   * @param chunks - chunk of dataset
   * @return array of dense rows
   */
  public final Row[] extractDenseRowsVertical(Chunk[] chunks) {
    Row[] rows = new Row[chunks[0]._len];

    for (int i = 0; i < rows.length; ++i) {
      rows[i] = new Row(false, _nums, _cats, _responses, 0);
      rows[i].rid = chunks[0].start() + i;
      if (_offset) {
        rows[i].offset = chunks[offsetChunkId()].atd(i);
        if (Double.isNaN(rows[i].offset)) rows[i].bad = true;
      }
      if (_weights) {
        rows[i].weight = chunks[weightChunkId()].atd(i);
        if (Double.isNaN(rows[i].weight)) rows[i].bad = true;
      }
    }
    for (int i = 0; i < _cats; ++i) {
      for (int r = 0; r < chunks[0]._len; ++r) {
        Row row = rows[r];
        if (row.bad) continue;
        if (chunks[i].isNA(r)) {
          if (_skipMissing) {
            row.bad = true;
          } else
            row.binIds[row.nBins++] =
                _catOffsets[i + 1] - 1; // missing value turns into extra (last) factor
        } else {
          int c = getCategoricalId(i, (int) chunks[i].at8(r));
          if (c >= 0) row.binIds[row.nBins++] = c;
        }
      }
    }
    int numStart = numStart();
    // generic numbers
    for (int cid = 0; cid < _nums; ++cid) {
      Chunk c = chunks[_cats + cid];
      for (int r = 0; r < c._len; ++r) {
        Row row = rows[r];
        if (row.bad) continue;
        if (c.isNA(r)) row.bad = _skipMissing;
        double d = c.atd(r);
        if (_normMul != null && _normSub != null) // either none or both
        d = (d - _normSub[cid]) * _normMul[cid];
        row.numVals[numStart + cid] = d;
      }
    }
    // response(s)
    for (int i = 1; i <= _responses; ++i) {
      Chunk rChunk = chunks[responseChunkId()];
      for (int r = 0; r < chunks[0]._len; ++r) {
        Row row = rows[r];
        if (row.bad) continue;
        row.response[row.response.length - i] = rChunk.atd(r);
        if (_normRespMul != null) {
          row.response[i - 1] = (row.response[i - 1] - _normRespSub[i - 1]) * _normRespMul[i - 1];
        }
        if (Double.isNaN(row.response[row.response.length - i])) row.bad = true;
      }
    }
    return rows;
  }
Пример #2
0
 public final Row extractDenseRow(Chunk[] chunks, int rid, Row row) {
   row.bad = false;
   row.rid = rid + chunks[0].start();
   row.cid = rid;
   if (_weights) row.weight = chunks[weightChunkId()].atd(rid);
   if (row.weight == 0) return row;
   if (_skipMissing) {
     int N = _cats + _nums;
     for (int i = 0; i < N; ++i)
       if (chunks[i].isNA(rid)) {
         row.bad = true;
         return row;
       }
   }
   int nbins = 0;
   for (int i = 0; i < _cats; ++i) {
     int cid = getCategoricalId(i, chunks[i].isNA(rid) ? _catModes[i] : (int) chunks[i].at8(rid));
     if (cid >= 0) row.binIds[nbins++] = cid;
   }
   row.nBins = nbins;
   final int n = _nums;
   int numValsIdx = 0; // since we're dense, need a second index to track interaction nums
   for (int i = 0; i < n; i++) {
     if (isInteractionVec(
         _cats + i)) { // categorical-categorical interaction is handled as plain categorical
       // (above)... so if we have interactions either v1 is categorical, v2 is
       // categorical, or neither are categorical
       int offset = getInteractionOffset(chunks, _cats + i, rid);
       row.numVals[numValsIdx + offset] =
           chunks[_cats + i].atd(
               rid); // essentially: chunks[v1].atd(rid) * chunks[v2].atd(rid) (see
       // InteractionWrappedVec)
       numValsIdx += nextNumericIdx(i);
     } else {
       double d = chunks[_cats + i].atd(rid); // can be NA if skipMissing() == false
       if (Double.isNaN(d)) d = _numMeans[i];
       if (_normMul != null && _normSub != null)
         d = (d - _normSub[numValsIdx]) * _normMul[numValsIdx];
       row.numVals[numValsIdx++] = d;
     }
   }
   for (int i = 0; i < _responses; ++i) {
     try {
       row.response[i] = chunks[responseChunkId(i)].atd(rid);
     } catch (Throwable t) {
       throw new RuntimeException(t);
     }
     if (_normRespMul != null)
       row.response[i] = (row.response[i] - _normRespSub[i]) * _normRespMul[i];
     if (Double.isNaN(row.response[i])) {
       row.bad = true;
       return row;
     }
   }
   if (_offset) row.offset = chunks[offsetChunkId()].atd(rid);
   return row;
 }
Пример #3
0
  public final Row extractDenseRow(double[] vals, Row row) {
    row.bad = false;
    row.rid = 0;
    row.cid = 0;
    if (row.weight == 0) return row;

    if (_skipMissing)
      for (double d : vals)
        if (Double.isNaN(d)) {
          row.bad = true;
          return row;
        }
    int nbins = 0;
    for (int i = 0; i < _cats; ++i) {
      int c = getCategoricalId(i, Double.isNaN(vals[i]) ? _catModes[i] : (int) vals[i]);
      if (c >= 0) row.binIds[nbins++] = c;
    }
    row.nBins = nbins;
    final int n = _nums;
    int numValsIdx = 0;
    for (int i = 0; i < n; ++i) {
      if (isInteractionVec(i)) {
        int offset;
        InteractionWrappedVec iwv = ((InteractionWrappedVec) _adaptedFrame.vec(_cats + i));
        int v1 = _adaptedFrame.find(iwv.v1());
        int v2 = _adaptedFrame.find(iwv.v2());
        if (v1 < _cats)
          offset = getCategoricalId(v1, Double.isNaN(vals[v1]) ? _catModes[v1] : (int) vals[v1]);
        else if (v2 < _cats)
          offset = getCategoricalId(v2, Double.isNaN(vals[v2]) ? _catModes[v1] : (int) vals[v2]);
        else offset = 0;
        row.numVals[numValsIdx + offset] = vals[_cats + i]; // essentially: vals[v1] * vals[v2])
        numValsIdx += nextNumericIdx(i);
      } else {
        double d = vals[_cats + i]; // can be NA if skipMissing() == false
        if (Double.isNaN(d)) d = _numMeans[numValsIdx];
        if (_normMul != null && _normSub != null)
          d = (d - _normSub[numValsIdx]) * _normMul[numValsIdx];
        row.numVals[numValsIdx++] = d;
      }
    }
    int off = responseChunkId(0);
    for (int i = off; i < Math.min(vals.length, off + _responses); ++i) {
      try {
        row.response[i] = vals[responseChunkId(i)];
      } catch (Throwable t) {
        throw new RuntimeException(t);
      }
      if (_normRespMul != null)
        row.response[i] = (row.response[i] - _normRespSub[i]) * _normRespMul[i];
      if (Double.isNaN(row.response[i])) {
        row.bad = true;
        return row;
      }
    }
    return row;
  }
Пример #4
0
  public final Row extractDenseRow(Chunk[] chunks, int rid, Row row) {
    row.bad = false;
    row.rid = rid + chunks[0].start();
    if (_weights) row.weight = chunks[weightChunkId()].atd(rid);
    if (row.weight == 0) return row;
    if (_skipMissing)
      for (Chunk c : chunks)
        if (c.isNA(rid)) {
          row.bad = true;
          return row;
        }
    int nbins = 0;
    for (int i = 0; i < _cats; ++i) {
      if (chunks[i].isNA(rid)) {
        if (_imputeMissing) {
          int c = getCategoricalId(i, _catModes[i]);
          if (c >= 0) row.binIds[nbins++] = c;
        } else // TODO: What if missingBucket = false?
        row.binIds[nbins++] =
              _catOffsets[i + 1] - 1; // missing value turns into extra (last) factor
      } else {
        int c = getCategoricalId(i, (int) chunks[i].at8(rid));
        if (c >= 0) row.binIds[nbins++] = c;
      }
    }
    row.nBins = nbins;
    final int n = _nums;
    for (int i = 0; i < n; ++i) {
      double d = chunks[_cats + i].atd(rid); // can be NA if skipMissing() == false
      if (_imputeMissing && Double.isNaN(d)) d = _numMeans[i];
      if (_normMul != null && _normSub != null) d = (d - _normSub[i]) * _normMul[i];
      row.numVals[i] = d;
    }
    for (int i = 0; i < _responses; ++i) {
      row.response[i] = chunks[responseChunkId()].atd(rid);
      if (_normRespMul != null)
        row.response[i] = (row.response[i] - _normRespSub[i]) * _normRespMul[i];
      if (Double.isNaN(row.response[i])) {
        row.bad = true;
        return row;
      }
    }
    if (_offset) row.offset = chunks[offsetChunkId()].atd(rid);

    return row;
  }
Пример #5
0
  /**
   * Extract (sparse) rows from given chunks. Note: 0 remains 0 - _normSub of DataInfo isn't used
   * (mean shift during standarization is not reverted) - UNLESS offset is specified (for GLM only)
   * Essentially turns the dataset 90 degrees.
   *
   * @param chunks - chunk of dataset
   * @param offset - adjustment for 0s if running with on-the-fly standardization (i.e. zeros are
   *     not really zeros because of centering)
   * @return array of sparse rows
   */
  public final Row[] extractSparseRows(Chunk[] chunks, double offset) {
    Row[] rows = new Row[chunks[0]._len];

    for (int i = 0; i < rows.length; ++i) {
      rows[i] = new Row(true, Math.min(_nums, 16), _cats, _responses, offset);
      rows[i].rid = chunks[0].start() + i;
      if (_offset) {
        rows[i].offset = chunks[offsetChunkId()].atd(i);
        if (Double.isNaN(rows[i].offset)) rows[i].bad = true;
      }
      if (_weights) {
        rows[i].weight = chunks[weightChunkId()].atd(i);
        if (Double.isNaN(rows[i].weight)) rows[i].bad = true;
      }
    }
    // categoricals
    for (int i = 0; i < _cats; ++i) {
      for (int r = 0; r < chunks[0]._len; ++r) {
        Row row = rows[r];
        if (row.bad) continue;
        if (chunks[i].isNA(r)) {
          if (_skipMissing) {
            row.bad = true;
          } else
            row.binIds[row.nBins++] =
                _catOffsets[i + 1] - 1; // missing value turns into extra (last) factor
        } else {
          int c = getCategoricalId(i, (int) chunks[i].at8(r));
          if (c >= 0) row.binIds[row.nBins++] = c;
        }
      }
    }
    int numStart = numStart();
    // generic numbers
    for (int cid = 0; cid < _nums; ++cid) {
      Chunk c = chunks[_cats + cid];
      int oldRow = -1;
      for (int r = c.nextNZ(-1); r < c._len; r = c.nextNZ(r)) {
        if (c.atd(r) == 0) continue;
        assert r > oldRow;
        oldRow = r;
        Row row = rows[r];
        if (row.bad) continue;
        if (c.isNA(r)) row.bad = _skipMissing;
        double d = c.atd(r);
        if (_normMul != null) d *= _normMul[cid];
        row.addNum(cid + numStart, d);
      }
    }
    // response(s)
    for (int i = 1; i <= _responses; ++i) {
      Chunk rChunk = chunks[responseChunkId()];
      for (int r = 0; r < chunks[0]._len; ++r) {
        Row row = rows[r];
        if (row.bad) continue;
        row.response[row.response.length - i] = rChunk.atd(r);
        if (_normRespMul != null) {
          row.response[i - 1] = (row.response[i - 1] - _normRespSub[i - 1]) * _normRespMul[i - 1];
        }
        if (Double.isNaN(row.response[row.response.length - i])) row.bad = true;
      }
    }
    return rows;
  }
Пример #6
0
 /**
  * Extract (sparse) rows from given chunks. Note: 0 remains 0 - _normSub of DataInfo isn't used
  * (mean shift during standarization is not reverted) - UNLESS offset is specified (for GLM only)
  * Essentially turns the dataset 90 degrees.
  *
  * @param chunks - chunk of dataset
  * @return array of sparse rows
  */
 public final Row[] extractSparseRows(Chunk[] chunks) {
   Row[] rows = new Row[chunks[0]._len];
   long startOff = chunks[0].start();
   for (int i = 0; i < rows.length; ++i) {
     rows[i] =
         new Row(
             true,
             Math.min(_nums, 16),
             _cats,
             _responses,
             i,
             startOff); // if sparse, _nums is the correct number of nonzero values! i.e., do not
     // use numNums()
     rows[i].rid = chunks[0].start() + i;
     if (_offset) {
       rows[i].offset = chunks[offsetChunkId()].atd(i);
       if (Double.isNaN(rows[i].offset)) rows[i].bad = true;
     }
     if (_weights) {
       rows[i].weight = chunks[weightChunkId()].atd(i);
       if (Double.isNaN(rows[i].weight)) rows[i].bad = true;
     }
     if (_skipMissing) {
       int N = _cats + _nums;
       for (int c = 0; c < N; ++c) if (chunks[c].isNA(i)) rows[i].bad = true;
     }
   }
   // categoricals
   for (int i = 0; i < _cats; ++i) {
     for (int r = 0; r < chunks[0]._len; ++r) {
       Row row = rows[r];
       if (row.bad) continue;
       int cid = getCategoricalId(i, chunks[i].isNA(r) ? _catModes[i] : (int) chunks[i].at8(r));
       if (cid >= 0) row.binIds[row.nBins++] = cid;
     }
   }
   // generic numbers + interactions
   int interactionOffset = 0;
   for (int cid = 0; cid < _nums; ++cid) {
     Chunk c = chunks[_cats + cid];
     int oldRow = -1;
     if (c
         instanceof
         InteractionWrappedVec
             .InteractionWrappedChunk) { // for each row, only 1 value in an interaction is 'hot'
       // all other values are off (i.e., are 0)
       for (int r = 0;
           r < c._len;
           ++r) { // the vec is "vertically" dense and "horizontally" sparse (i.e., every row has
         // one, and only one, value)
         Row row = rows[r];
         if (row.bad) continue;
         if (c.isNA(r)) row.bad = _skipMissing;
         int cidVirtualOffset =
             getInteractionOffset(
                 chunks, _cats + cid, r); // the "virtual" offset into the hot-expanded interaction
         row.addNum(
             _numOffsets[cid] + cidVirtualOffset,
             c.atd(r)); // FIXME: if this produces a "true" NA then should sub with mean? with?
       }
       interactionOffset += nextNumericIdx(cid);
     } else {
       for (int r = c.nextNZ(-1); r < c._len; r = c.nextNZ(r)) {
         if (c.atd(r) == 0) continue;
         assert r > oldRow;
         oldRow = r;
         Row row = rows[r];
         if (row.bad) continue;
         if (c.isNA(r)) row.bad = _skipMissing;
         double d = c.atd(r);
         if (Double.isNaN(d)) d = _numMeans[cid];
         if (_normMul != null) d *= _normMul[interactionOffset];
         row.addNum(_numOffsets[cid], d);
       }
       interactionOffset++;
     }
   }
   // response(s)
   for (int i = 1; i <= _responses; ++i) {
     int rid = responseChunkId(i - 1);
     Chunk rChunk = chunks[rid];
     for (int r = 0; r < chunks[0]._len; ++r) {
       Row row = rows[r];
       if (row.bad) continue;
       row.response[i - 1] = rChunk.atd(r);
       if (_normRespMul != null) {
         row.response[i - 1] = (row.response[i - 1] - _normRespSub[i - 1]) * _normRespMul[i - 1];
       }
       if (Double.isNaN(row.response[row.response.length - i])) row.bad = true;
     }
   }
   return rows;
 }