@Test
  public void testDbnFaces() {
    DataSetIterator iter = new LFWDataSetIterator(28, 28);

    DataSet next = iter.next();
    next.normalizeZeroMeanZeroUnitVariance();

    MultiLayerConfiguration conf =
        new NeuralNetConfiguration.Builder()
            .nIn(next.numInputs())
            .nOut(next.numOutcomes())
            .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
            .constrainGradientToUnitNorm(true)
            .weightInit(WeightInit.DISTRIBUTION)
            .dist(new NormalDistribution(0, 1e-5))
            .iterations(10)
            .learningRate(1e-3)
            .lossFunction(LossFunctions.LossFunction.RMSE_XENT)
            .visibleUnit(RBM.VisibleUnit.GAUSSIAN)
            .hiddenUnit(RBM.HiddenUnit.RECTIFIED)
            .layer(new RBM())
            .list(4)
            .hiddenLayerSizes(600, 250, 100)
            .override(3, new ClassifierOverride())
            .build();

    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();
    network.setListeners(
        Arrays.<IterationListener>asList(
            new ScoreIterationListener(10), new NeuralNetPlotterIterationListener(1)));
    network.fit(next);
  }
  @Test
  public void testBackProp() {
    Nd4j.getRandom().setSeed(123);
    MultiLayerConfiguration conf =
        new NeuralNetConfiguration.Builder()
            .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
            .iterations(10)
            .weightInit(WeightInit.XAVIER)
            .dist(new UniformDistribution(0, 1))
            .activationFunction("tanh")
            .nIn(4)
            .nOut(3)
            .layer(new org.deeplearning4j.nn.conf.layers.OutputLayer())
            .list(3)
            .backward(true)
            .pretrain(false)
            .hiddenLayerSizes(new int[] {3, 2})
            .override(
                2,
                new ConfOverride() {
                  @Override
                  public void overrideLayer(int i, NeuralNetConfiguration.Builder builder) {
                    builder.activationFunction("softmax");
                    builder.layer(new org.deeplearning4j.nn.conf.layers.OutputLayer());
                    builder.lossFunction(LossFunctions.LossFunction.MCXENT);
                  }
                })
            .build();

    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();
    network.setListeners(Lists.<IterationListener>newArrayList(new ScoreIterationListener(1)));

    DataSetIterator iter = new IrisDataSetIterator(150, 150);

    DataSet next = iter.next();
    next.normalizeZeroMeanZeroUnitVariance();
    SplitTestAndTrain trainTest = next.splitTestAndTrain(110);
    network.setInput(trainTest.getTrain().getFeatureMatrix());
    network.setLabels(trainTest.getTrain().getLabels());
    network.init();
    network.fit(trainTest.getTrain());

    DataSet test = trainTest.getTest();
    Evaluation eval = new Evaluation();
    INDArray output = network.output(test.getFeatureMatrix());
    eval.eval(test.getLabels(), output);
    log.info("Score " + eval.stats());
  }