@Test
  public void testBackProp() {
    Nd4j.getRandom().setSeed(123);
    MultiLayerConfiguration conf =
        new NeuralNetConfiguration.Builder()
            .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
            .iterations(10)
            .weightInit(WeightInit.XAVIER)
            .dist(new UniformDistribution(0, 1))
            .activationFunction("tanh")
            .nIn(4)
            .nOut(3)
            .layer(new org.deeplearning4j.nn.conf.layers.OutputLayer())
            .list(3)
            .backward(true)
            .pretrain(false)
            .hiddenLayerSizes(new int[] {3, 2})
            .override(
                2,
                new ConfOverride() {
                  @Override
                  public void overrideLayer(int i, NeuralNetConfiguration.Builder builder) {
                    builder.activationFunction("softmax");
                    builder.layer(new org.deeplearning4j.nn.conf.layers.OutputLayer());
                    builder.lossFunction(LossFunctions.LossFunction.MCXENT);
                  }
                })
            .build();

    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();
    network.setListeners(Lists.<IterationListener>newArrayList(new ScoreIterationListener(1)));

    DataSetIterator iter = new IrisDataSetIterator(150, 150);

    DataSet next = iter.next();
    next.normalizeZeroMeanZeroUnitVariance();
    SplitTestAndTrain trainTest = next.splitTestAndTrain(110);
    network.setInput(trainTest.getTrain().getFeatureMatrix());
    network.setLabels(trainTest.getTrain().getLabels());
    network.init();
    network.fit(trainTest.getTrain());

    DataSet test = trainTest.getTest();
    Evaluation eval = new Evaluation();
    INDArray output = network.output(test.getFeatureMatrix());
    eval.eval(test.getLabels(), output);
    log.info("Score " + eval.stats());
  }
  @Test
  public void testDbn() throws Exception {
    Nd4j.MAX_SLICES_TO_PRINT = -1;
    Nd4j.MAX_ELEMENTS_PER_SLICE = -1;
    MultiLayerConfiguration conf =
        new NeuralNetConfiguration.Builder()
            .iterations(100)
            .layer(new org.deeplearning4j.nn.conf.layers.RBM())
            .weightInit(WeightInit.DISTRIBUTION)
            .dist(new UniformDistribution(0, 1))
            .activationFunction("tanh")
            .momentum(0.9)
            .optimizationAlgo(OptimizationAlgorithm.LBFGS)
            .constrainGradientToUnitNorm(true)
            .k(1)
            .regularization(true)
            .l2(2e-4)
            .visibleUnit(org.deeplearning4j.nn.conf.layers.RBM.VisibleUnit.GAUSSIAN)
            .hiddenUnit(org.deeplearning4j.nn.conf.layers.RBM.HiddenUnit.RECTIFIED)
            .lossFunction(LossFunctions.LossFunction.RMSE_XENT)
            .nIn(4)
            .nOut(3)
            .list(2)
            .hiddenLayerSizes(3)
            .override(1, new ClassifierOverride(1))
            .build();

    NeuralNetConfiguration conf2 =
        new NeuralNetConfiguration.Builder()
            .layer(new org.deeplearning4j.nn.conf.layers.RBM())
            .nIn(784)
            .nOut(600)
            .applySparsity(true)
            .sparsity(0.1)
            .build();

    Layer l =
        LayerFactories.getFactory(conf2)
            .create(conf2, Arrays.<IterationListener>asList(new ScoreIterationListener(2)), 0);

    MultiLayerNetwork d = new MultiLayerNetwork(conf);

    DataSetIterator iter = new IrisDataSetIterator(150, 150);

    DataSet next = iter.next();

    Nd4j.writeTxt(next.getFeatureMatrix(), "iris.txt", "\t");

    next.normalizeZeroMeanZeroUnitVariance();

    SplitTestAndTrain testAndTrain = next.splitTestAndTrain(110);
    DataSet train = testAndTrain.getTrain();

    d.fit(train);

    DataSet test = testAndTrain.getTest();

    Evaluation eval = new Evaluation();
    INDArray output = d.output(test.getFeatureMatrix());
    eval.eval(test.getLabels(), output);
    log.info("Score " + eval.stats());
  }