Ejemplo n.º 1
0
    public final double regularize(double[] u, Regularizer regularization) {
      if (u == null) return 0;
      double ureg = 0;

      switch (regularization) {
        case None:
          return 0;
        case Quadratic:
          for (int i = 0; i < u.length; i++) ureg += u[i] * u[i];
          return ureg;
        case L2:
          for (int i = 0; i < u.length; i++) ureg += u[i] * u[i];
          return Math.sqrt(ureg);
        case L1:
          for (int i = 0; i < u.length; i++) ureg += Math.abs(u[i]);
          return ureg;
        case NonNegative:
          for (int i = 0; i < u.length; i++) {
            if (u[i] < 0) return Double.POSITIVE_INFINITY;
          }
          return 0;
        case OneSparse:
          int card = 0;
          for (int i = 0; i < u.length; i++) {
            if (u[i] < 0) return Double.POSITIVE_INFINITY;
            else if (u[i] > 0) card++;
          }
          return card == 1 ? 0 : Double.POSITIVE_INFINITY;
        case UnitOneSparse:
          int ones = 0, zeros = 0;
          for (int i = 0; i < u.length; i++) {
            if (u[i] == 1) ones++;
            else if (u[i] == 0) zeros++;
            else return Double.POSITIVE_INFINITY;
          }
          return ones == 1 && zeros == u.length - 1 ? 0 : Double.POSITIVE_INFINITY;
        case Simplex:
          double sum = 0, absum = 0;
          for (int i = 0; i < u.length; i++) {
            if (u[i] < 0) return Double.POSITIVE_INFINITY;
            else {
              sum += u[i];
              absum += Math.abs(u[i]);
            }
          }
          return MathUtils.equalsWithinRecSumErr(sum, 1.0, u.length, absum)
              ? 0
              : Double.POSITIVE_INFINITY;
        default:
          throw new RuntimeException("Unknown regularization function " + regularization);
      }
    }
Ejemplo n.º 2
0
 /**
  * Initialization of neural net weights cf.
  * http://machinelearning.wustl.edu/mlpapers/paper_files/AISTATS2010_GlorotB10.pdf
  */
 private void randomizeWeights() {
   for (int w = 0; w < dense_row_weights.length; ++w) {
     final Random rng =
         water.util.RandomUtils.getRNG(
             get_params()._seed + 0xBAD5EED + w + 1); // to match NeuralNet behavior
     final double range = Math.sqrt(6. / (units[w] + units[w + 1]));
     for (int i = 0; i < get_weights(w).rows(); i++) {
       for (int j = 0; j < get_weights(w).cols(); j++) {
         if (get_params()._initial_weight_distribution
             == DeepLearningParameters.InitialWeightDistribution.UniformAdaptive) {
           // cf. http://machinelearning.wustl.edu/mlpapers/paper_files/AISTATS2010_GlorotB10.pdf
           if (w == dense_row_weights.length - 1 && _classification)
             get_weights(w)
                 .set(
                     i,
                     j,
                     (float)
                         (4.
                             * uniformDist(
                                 rng, -range,
                                 range))); // Softmax might need an extra factor 4, since it's like
                                           // a sigmoid
           else get_weights(w).set(i, j, (float) uniformDist(rng, -range, range));
         } else if (get_params()._initial_weight_distribution
             == DeepLearningParameters.InitialWeightDistribution.Uniform) {
           get_weights(w)
               .set(
                   i,
                   j,
                   (float)
                       uniformDist(
                           rng,
                           -get_params()._initial_weight_scale,
                           get_params()._initial_weight_scale));
         } else if (get_params()._initial_weight_distribution
             == DeepLearningParameters.InitialWeightDistribution.Normal) {
           get_weights(w)
               .set(i, j, (float) (rng.nextGaussian() * get_params()._initial_weight_scale));
         }
       }
     }
   }
 }
Ejemplo n.º 3
0
    protected void calcModelStats(
        CoxPHModel model, final double[] newCoef, final double newLoglik) {
      CoxPHModel.CoxPHParameters p = model._parms;
      CoxPHModel.CoxPHOutput o = model._output;

      final int n_coef = o.coef.length;
      final Matrix inv_hessian = new Matrix(o.hessian).inverse();
      for (int j = 0; j < n_coef; ++j) {
        for (int k = 0; k <= j; ++k) {
          final double elem = -inv_hessian.get(j, k);
          o.var_coef[j][k] = elem;
          o.var_coef[k][j] = elem;
        }
      }
      for (int j = 0; j < n_coef; ++j) {
        o.coef[j] = newCoef[j];
        o.exp_coef[j] = Math.exp(o.coef[j]);
        o.exp_neg_coef[j] = Math.exp(-o.coef[j]);
        o.se_coef[j] = Math.sqrt(o.var_coef[j][j]);
        o.z_coef[j] = o.coef[j] / o.se_coef[j];
      }
      if (o.iter == 0) {
        o.null_loglik = newLoglik;
        o.maxrsq = 1 - Math.exp(2 * o.null_loglik / o.n);
        o.score_test = 0;
        for (int j = 0; j < n_coef; ++j) {
          double sum = 0;
          for (int k = 0; k < n_coef; ++k) sum += o.var_coef[j][k] * o.gradient[k];
          o.score_test += o.gradient[j] * sum;
        }
      }
      o.loglik = newLoglik;
      o.loglik_test = -2 * (o.null_loglik - o.loglik);
      o.rsq = 1 - Math.exp(-o.loglik_test / o.n);
      o.wald_test = 0;
      for (int j = 0; j < n_coef; ++j) {
        double sum = 0;
        for (int k = 0; k < n_coef; ++k) sum -= o.hessian[j][k] * (o.coef[k] - p.init);
        o.wald_test += (o.coef[j] - p.init) * sum;
      }
    }
Ejemplo n.º 4
0
 @SuppressWarnings("unused")
 @Override
 protected void init() {
   super.init();
   // Initialize local variables
   _mtry =
       (mtries == -1)
           ? // classification: mtry=sqrt(_ncols), regression: mtry=_ncols/3
           (classification ? Math.max((int) Math.sqrt(_ncols), 1) : Math.max(_ncols / 3, 1))
           : mtries;
   if (!(1 <= _mtry && _mtry <= _ncols))
     throw new IllegalArgumentException(
         "Computed mtry should be in interval <1,#cols> but it is " + _mtry);
   if (!(0.0 < sample_rate && sample_rate <= 1.0))
     throw new IllegalArgumentException(
         "Sample rate should be interval (0,1> but it is " + sample_rate);
   if (DEBUG_DETERMINISTIC && seed == -1) _seed = 0x1321e74a0192470cL; // fixed version of seed
   else if (seed == -1) _seed = _seedGenerator.nextLong();
   else _seed = seed;
   if (sample_rate == 1f && validation != null)
     Log.warn(
         Sys.DRF__,
         "Sample rate is 100% and no validation dataset is required. There are no OOB data to perform validation!");
 }