Пример #1
0
 /**
  * Apply the regularization
  *
  * @param layer
  * @param gradient
  * @param param
  */
 public void postApply(Layer layer, INDArray gradient, String param) {
   NeuralNetConfiguration conf = layer.conf();
   INDArray params = layer.getParam(param);
   if (conf.isUseRegularization()
       && conf.getLayer().getL2() > 0
       && !(param.equals(DefaultParamInitializer.BIAS_KEY)))
     gradient.addi(
         params.mul(
             conf.getLayer()
                 .getL2())); // dC/dw = dC0/dw + lambda/n * w where C0 is pre-l2 cost function
   if (conf.isUseRegularization()
       && conf.getLayer().getL1() > 0
       && !(param.equals(DefaultParamInitializer.BIAS_KEY)))
     gradient.addi(Transforms.sign(params).muli(conf.getLayer().getL1()));
   if (conf.isMiniBatch()) gradient.divi(layer.getInputMiniBatchSize());
   if (conf.isConstrainGradientToUnitNorm()) gradient.divi(gradient.norm2(Integer.MAX_VALUE));
 }