Example #1
0
    @Override
    public void map(Chunk cs[]) {
      _xsum = new double[_ncolx];
      _ysum = new double[_ncoly];

      double[] xvals = new double[_ncolx];
      double[] yvals = new double[_ncoly];

      double xval, yval;
      boolean add;
      int len = cs[0]._len;
      for (int row = 0; row < len; row++) {
        add = true;
        // reset existing arrays to 0 rather than initializing new ones to save on garbage
        // collection
        Arrays.fill(xvals, 0);
        Arrays.fill(yvals, 0);

        for (int y = 0; y < _ncoly; y++) {
          final Chunk cy = cs[y];
          yval = cy.atd(row);
          // if any yval along a row is NA, discard the entire row
          if (Double.isNaN(yval)) {
            _NACount++;
            add = false;
            break;
          }
          yvals[y] = yval;
        }
        if (add) {
          for (int x = 0; x < _ncolx; x++) {
            final Chunk cx = cs[x + _ncoly];
            xval = cx.atd(row);
            // if any xval along a row is NA, discard the entire row
            if (Double.isNaN(xval)) {
              _NACount++;
              add = false;
              break;
            }
            xvals[x] = xval;
          }
        }
        // add is true iff row has been traversed and found no NAs among yvals and xvals
        if (add) {
          ArrayUtils.add(_xsum, xvals);
          ArrayUtils.add(_ysum, yvals);
        }
      }
    }
Example #2
0
    @Override
    public void map(Chunk cs[]) {
      int ncoly = cs.length;
      _ysum = new double[ncoly];

      double[] yvals = new double[ncoly];
      double yval;
      boolean add;
      int len = cs[0]._len;
      for (int row = 0; row < len; row++) {
        add = true;
        Arrays.fill(yvals, 0);

        for (int y = 0; y < ncoly; y++) {
          final Chunk cy = cs[y];
          yval = cy.atd(row);
          // if any yval along a row is NA, discard the entire row
          if (Double.isNaN(yval)) {
            _NACount++;
            add = false;
            break;
          }
          yvals[y] = yval;
        }
        if (add) {
          ArrayUtils.add(_ysum, yvals);
        }
      }
    }
Example #3
0
 @Override
 public void reduce(Histo h) {
   for (int i = 0; i < _nbins; i++) { // Keep min/max
     if (_mins[i] > h._mins[i]) _mins[i] = h._mins[i];
     if (_maxs[i] < h._maxs[i]) _maxs[i] = h._maxs[i];
   }
   ArrayUtils.add(_bins, h._bins);
 }
Example #4
0
 @Override
 public void reduce(Lloyds mr) {
   for (int clu = 0; clu < _k; clu++) {
     long ra = _size[clu];
     long rb = mr._size[clu];
     double[] ma = _cMeans[clu];
     double[] mb = mr._cMeans[clu];
     for (int c = 0; c < ma.length; c++) // Recursive mean
     if (ra + rb > 0) ma[c] = (ma[c] * ra + mb[c] * rb) / (ra + rb);
   }
   ArrayUtils.add(_cats, mr._cats);
   ArrayUtils.add(_cSqr, mr._cSqr);
   ArrayUtils.add(_size, mr._size);
   // track global worst-row
   if (_worst_err < mr._worst_err) {
     _worst_err = mr._worst_err;
     _worst_row = mr._worst_row;
   }
 }
 /**
  * Add another model info into this This will add the weights/biases/learning rate helpers, and
  * the number of processed training samples Note: It will NOT add the elastic averaging helpers,
  * which are always kept constant (they already are the result of a reduction)
  *
  * @param other
  */
 public void add(DeepLearningModelInfo other) {
   for (int i = 0; i < dense_row_weights.length; ++i)
     ArrayUtils.add(get_weights(i).raw(), other.get_weights(i).raw());
   for (int i = 0; i < biases.length; ++i) ArrayUtils.add(biases[i].raw(), other.biases[i].raw());
   if (avg_activations != null)
     for (int i = 0; i < avg_activations.length; ++i)
       ArrayUtils.add(avg_activations[i].raw(), other.biases[i].raw());
   if (has_momenta()) {
     assert (other.has_momenta());
     for (int i = 0; i < dense_row_weights_momenta.length; ++i)
       ArrayUtils.add(get_weights_momenta(i).raw(), other.get_weights_momenta(i).raw());
     for (int i = 0; i < biases_momenta.length; ++i)
       ArrayUtils.add(biases_momenta[i].raw(), other.biases_momenta[i].raw());
   }
   if (adaDelta()) {
     assert (other.adaDelta());
     for (int i = 0; i < dense_row_ada_dx_g.length; ++i) {
       ArrayUtils.add(get_ada_dx_g(i).raw(), other.get_ada_dx_g(i).raw());
     }
   }
   add_processed_local(other.get_processed_local());
 }
Example #6
0
 public void reduce(MRTaskAtx other) {
   ArrayUtils.add(atx, other.atx);
 }
Example #7
0
 @Override
 public void reduce(AstCumu.CumuTask t) {
   if (_chkCumu != t._chkCumu) ArrayUtils.add(_chkCumu, t._chkCumu);
 }
Example #8
0
 @Override
 public void reduce(CoVarTaskCompleteObsSym cvt) {
   ArrayUtils.add(_covs, cvt._covs);
 }
Example #9
0
 @Override
 public void reduce(CoVarTaskCompleteObsMeanSym cvt) {
   ArrayUtils.add(_ysum, cvt._ysum);
   _NACount += cvt._NACount;
 }
Example #10
0
 @Override
 public void reduce(CoVarTaskEverything cvt) {
   ArrayUtils.add(_covs, cvt._covs);
 }
 @Override
 public void reduce(CoxPHTask that) {
   n += that.n;
   sumWeights += that.sumWeights;
   ArrayUtils.add(sumWeightedCatX, that.sumWeightedCatX);
   ArrayUtils.add(sumWeightedNumX, that.sumWeightedNumX);
   ArrayUtils.add(sizeRiskSet, that.sizeRiskSet);
   ArrayUtils.add(sizeCensored, that.sizeCensored);
   ArrayUtils.add(sizeEvents, that.sizeEvents);
   ArrayUtils.add(countEvents, that.countEvents);
   ArrayUtils.add(sumXEvents, that.sumXEvents);
   ArrayUtils.add(sumRiskEvents, that.sumRiskEvents);
   ArrayUtils.add(sumXRiskEvents, that.sumXRiskEvents);
   ArrayUtils.add(sumXXRiskEvents, that.sumXXRiskEvents);
   ArrayUtils.add(sumLogRiskEvents, that.sumLogRiskEvents);
   ArrayUtils.add(rcumsumRisk, that.rcumsumRisk);
   ArrayUtils.add(rcumsumXRisk, that.rcumsumXRisk);
   ArrayUtils.add(rcumsumXXRisk, that.rcumsumXXRisk);
 }