public Vec optimizeColumn(final FuncC1 func, final Vec codingColumn) {
    final Vec mu = new ArrayVec(codingColumn.dim());
    for (int i = 0; i < mu.dim(); i++) {
      final double code = codingColumn.get(i);
      if (code == 1.0) mu.set(i, 1.0);
      else if (code == -1.0) mu.set(i, 0.0);
      else mu.set(i, 0.5);
    }

    double error = 100500;
    while (error > 1e-3) {
      final Vec muPrev = VecTools.copy(mu);
      final Vec gradient = func.gradient(mu);
      VecTools.incscale(mu, gradient, -step);

      for (int i = 0; i < mu.dim(); i++) {
        final double code = codingColumn.get(i);
        final double val = mu.get(i);
        if (code == 1.0 || val > 1.0) {
          mu.set(i, 1.0);
        } else if (code == -1.0 || val < 0) {
          mu.set(i, 0);
        }
      }
      System.out.println(mu);
      error = VecTools.norm(VecTools.subtract(muPrev, mu));
    }

    return new ArrayVec(codingColumn.dim());
  }
Esempio n. 2
0
  @Override
  public Ensemble fit(final VecDataSet learn, final GlobalLoss globalLoss) {
    final Vec cursor = new ArrayVec(globalLoss.xdim());
    final List<Trans> weakModels = new ArrayList<>(iterationsCount);
    final Trans gradient = globalLoss.gradient();

    for (int t = 0; t < iterationsCount; t++) {
      final Vec gradientValueAtCursor = gradient.trans(cursor);
      final L2 localLoss = DataTools.newTarget(factory, gradientValueAtCursor, learn);
      final Trans weakModel = weak.fit(learn, localLoss);
      weakModels.add(weakModel);
      invoke(new Ensemble(weakModels, -step));
      VecTools.append(cursor, VecTools.scale(weakModel.transAll(learn.data()), -step));
    }
    return new Ensemble(weakModels, -step);
  }
 public CMLMetricOptimization(
     final VecDataSet ds,
     final BlockwiseMLLLogit target,
     final Mx S,
     final double c,
     final double step) {
   this.ds = ds;
   this.target = target;
   this.step = step;
   this.classesIdxs = MCTools.splitClassesIdxs(target.labels());
   this.laplacian = VecTools.copy(S);
   VecTools.scale(laplacian, -1.0);
   for (int i = 0; i < laplacian.rows(); i++) {
     final double diagElem = VecTools.sum(S.row(i));
     laplacian.adjust(i, i, diagElem);
   }
   this.c = c;
 }
 public Mx trainProbs(final Mx codingMatrix, final Func[] binClassifiers) {
   final Mx result = new VecBasedMx(codingMatrix.rows(), codingMatrix.columns());
   for (int l = 0; l < result.columns(); l++) {
     System.out.println("Optimize column " + l);
     final FuncC1 columnTargetFunction = new ColumnTargetFunction(binClassifiers[l]);
     final Vec muColumn = optimizeColumn(columnTargetFunction, codingMatrix.col(l));
     VecTools.assign(result.col(l), muColumn);
   }
   return result;
 }
 @Override
 public double value(final Vec x) {
   final Mx predictMx = (Mx) x;
   int count = 0;
   for (int i = 0; i < predictMx.rows(); i++) {
     if (VecTools.distance(predictMx.row(i), targets.row(i)) < MathTools.EPSILON) {
       count++;
     }
   }
   return (double) count / targets.rows();
 }
 @Override
 public double value(final Vec mu) {
   double result = 0.0;
   for (int i = 0; i < ds.length(); i++) {
     final double trans = binClassifier.value(ds.data().row(i));
     final double sigmoid = MathTools.sigmoid(trans);
     final double underLog =
         mu.get(target.label(i)) * sigmoid + (1 - mu.get(target.label(i))) * (1 - sigmoid);
     result -= Math.log(underLog);
   }
   result += c * VecTools.multiply(MxTools.multiply(laplacian, mu), mu);
   return result;
 }
    @Override
    public Vec gradient(final Vec mu) {
      final Vec grad = new ArrayVec(mu.dim());
      for (int k = 0; k < grad.dim(); k++) {
        final TIntList idxs = classesIdxs.get(k);
        double val = 0.0;
        for (final TIntIterator listIter = idxs.iterator(); listIter.hasNext(); ) {
          final Vec x = ds.data().row(listIter.next());
          final double trans = binClassifier.value(x);
          final double sigmoid = MathTools.sigmoid(trans);
          val -= (2 * sigmoid - 1) / (mu.get(k) * sigmoid + (1 - mu.get(k)) * (1 - sigmoid));
          grad.set(k, val);
        }
      }

      final double norm = VecTools.norm(grad);
      VecTools.scale(grad, 1 / norm);

      for (int k = 0; k < grad.dim(); k++) {
        final double val = VecTools.multiply(laplacian.row(k), mu);
        grad.adjust(k, val);
      }
      return grad;
    }
Esempio n. 8
0
  public void backward() {
    Mx cnc = null;
    if (bias_b != 0) {
      cnc = leftContract(output);
    } else {
      cnc = VecTools.copy(output);
    }

    difference = MxTools.multiply(MxTools.transpose(cnc), activations);
    for (int i = 0; i < difference.dim(); i++) {
      difference.set(i, difference.get(i) / activations.rows());
    }

    input = MxTools.multiply(cnc, weights);

    rectifier.grad(activations, activations);
    for (int i = 0; i < input.dim(); i++) {
      input.set(i, input.get(i) * activations.get(i));
      if (dropoutFraction > 0) {
        input.set(i, input.get(i) * dropoutMask.get(i));
      }
    }
  }
Esempio n. 9
0
  public void forward() {
    if (bias != 0) {
      activations = leftExtend(input);
    } else {
      activations = VecTools.copy(input);
    }

    output = MxTools.multiply(activations, MxTools.transpose(weights));
    rectifier.value(output, output);

    if (dropoutFraction > 0) {
      if (isTrain) {
        dropoutMask = getDropoutMask();

        for (int i = 0; i < output.dim(); i++) {
          output.set(i, output.get(i) * dropoutMask.get(i));
        }
      } else {
        for (int i = 0; i < output.dim(); i++) {
          output.set(i, output.get(i) * (1 - dropoutFraction));
        }
      }
    }
  }
Esempio n. 10
0
 public Vec gradientTo(Vec x, Vec to) {
   final Vec trans = gradient(x);
   VecTools.assign(to, trans);
   return to;
 }