private static MultiLayerNetwork deepConvNetwork(
      int seed, int iterations, int numRows, int numColumns, int outputNum) {
    MultiLayerConfiguration.Builder conf =
        new NeuralNetConfiguration.Builder()
            .seed(seed)
            .iterations(iterations)
            .activation("sigmoid")
            .weightInit(WeightInit.DISTRIBUTION)
            .dist(new NormalDistribution(0.0, 0.01))
            // .learningRate(7*10e-5)
            .learningRate(1e-3)
            .learningRateScoreBasedDecayRate(1e-1)
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .list(7)
            .layer(
                0,
                new ConvolutionLayer.Builder(new int[] {5, 5}, new int[] {1, 1})
                    .name("cnn1")
                    .nIn(numRows * numColumns)
                    .nOut(6)
                    .build())
            .layer(
                1,
                new SubsamplingLayer.Builder(
                        SubsamplingLayer.PoolingType.MAX, new int[] {2, 2}, new int[] {2, 2})
                    .name("maxpool1")
                    .build())
            .layer(
                2,
                new ConvolutionLayer.Builder(new int[] {5, 5}, new int[] {1, 1})
                    .name("cnn2")
                    .nOut(16)
                    .biasInit(1)
                    .build())
            .layer(
                3,
                new SubsamplingLayer.Builder(
                        SubsamplingLayer.PoolingType.MAX, new int[] {2, 2}, new int[] {2, 2})
                    .name("maxpool2")
                    .build())
            .layer(4, new DenseLayer.Builder().name("ffn1").nOut(120).build())
            .layer(5, new DenseLayer.Builder().name("ffn2").nOut(84).build())
            .layer(
                6,
                new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                    .name("output")
                    .nOut(outputNum)
                    .activation("softmax") // radial basis function
                    // required
                    .build())
            .backprop(true)
            .pretrain(false)
            .cnnInputSize(numRows, numColumns, 1);

    MultiLayerNetwork model = new MultiLayerNetwork(conf.build());

    return model;
  }
  public static void testAccuracy() {
    double[][][][] data = {
      {
        {
          {1.0, 2.0, 3.0},
          {4.0, 5.0, 6.0},
          {7.0, 8.0, 9.0}
        }
      }
    };
    double[] flat = ArrayUtil.flattenDoubleArray(data);
    int[] shape = {1, 1, 3, 3};
    INDArray input = Nd4j.create(flat, shape, 'c');

    TestCase testCase = new TestCase(1, 1, 2, 2, 1, 1, 0, 0, 3, 3);

    ConvolutionLayer convolutionLayerBuilder =
        new ConvolutionLayer.Builder(testCase.kW, testCase.kH)
            .nIn(testCase.nInputPlane)
            .stride(testCase.dW, testCase.dH)
            .padding(testCase.padW, testCase.padH)
            .nOut(testCase.nOutputPlane)
            .build();

    MultiLayerConfiguration.Builder builder =
        new NeuralNetConfiguration.Builder().list().layer(0, convolutionLayerBuilder);

    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    model.setInput(input);
    model.getLayer(0).setInput(input);
    org.deeplearning4j.nn.layers.convolution.ConvolutionLayer convolutionLayer =
        (org.deeplearning4j.nn.layers.convolution.ConvolutionLayer) model.getLayer(0);

    System.out.println(convolutionLayer.params());
    System.out.println(convolutionLayer.preOutput(false));
  }
  public static void testForward() {
    for (TestCase testCase : allTestCases) {
      try (BufferedWriter writer =
          new BufferedWriter(new FileWriter(new File("dl4jPerformance.csv"), true))) {
        ConvolutionLayer convolutionLayerBuilder =
            new ConvolutionLayer.Builder(testCase.kW, testCase.kH)
                .nIn(testCase.nInputPlane)
                .stride(testCase.dW, testCase.dH)
                .padding(testCase.padW, testCase.padH)
                .nOut(testCase.nOutputPlane)
                .build();

        MultiLayerConfiguration.Builder builder =
            new NeuralNetConfiguration.Builder().list().layer(0, convolutionLayerBuilder);

        MultiLayerConfiguration conf = builder.build();
        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        INDArray input =
            Nd4j.rand(
                seed, batchSize, testCase.nInputPlane, testCase.inputWidth, testCase.inputHeight);
        model.setInput(input);
        model.getLayer(0).setInput(input);
        org.deeplearning4j.nn.layers.convolution.ConvolutionLayer convolutionLayer =
            (org.deeplearning4j.nn.layers.convolution.ConvolutionLayer) model.getLayer(0);

        double start = System.nanoTime();
        for (int i = 0; i < forwardIterations; i++) {
          convolutionLayer.preOutput(false);
        }
        double end = System.nanoTime();
        double timeMillis = (end - start) / 1e6 / forwardIterations;

        writer.write(
            "Convolution("
                + testCase.nInputPlane
                + " "
                + testCase.nOutputPlane
                + " "
                + testCase.kW
                + " "
                + testCase.kH
                + " "
                + testCase.dW
                + " "
                + testCase.dH
                + " "
                + testCase.padW
                + " "
                + testCase.padH
                + " "
                + testCase.inputWidth
                + " "
                + testCase.inputHeight
                + ") "
                + " forward, "
                + timeMillis
                + "\n");
      } catch (Exception ex) {
        ex.printStackTrace();
      }
    }
  }
  public static void testBackward() {
    for (TestCase testCase : allTestCases) {
      try (BufferedWriter writer =
          new BufferedWriter(new FileWriter(new File("dl4jPerformance.csv"), true))) {
        ConvolutionLayer convolutionLayerBuilder =
            new ConvolutionLayer.Builder(testCase.kW, testCase.kH)
                .nIn(testCase.nInputPlane)
                .stride(testCase.dW, testCase.dH)
                .padding(testCase.padW, testCase.padH)
                .nOut(testCase.nOutputPlane)
                .build();

        MultiLayerConfiguration.Builder builder =
            new NeuralNetConfiguration.Builder().list().layer(0, convolutionLayerBuilder);

        MultiLayerConfiguration conf = builder.build();
        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        INDArray input =
            Nd4j.rand(
                seed, batchSize, testCase.nInputPlane, testCase.inputWidth, testCase.inputHeight);
        model.setInput(input);
        model.getLayer(0).setInput(input);
        model.feedForward();
        org.deeplearning4j.nn.api.Layer convolutionLayer = model.getLayer(0);

        INDArray output = convolutionLayer.activate();
        INDArray epsilon =
            Nd4j.rand(seed, output.size(0), output.size(1), output.size(2), output.size(3));
        Method initGradientView = model.getClass().getDeclaredMethod("initGradientsView");
        initGradientView.setAccessible(true);
        initGradientView.invoke(model);

        double start = System.nanoTime();
        for (int i = 0; i < backwardIterations; i++) {
          convolutionLayer.backpropGradient(epsilon);
        }
        double end = System.nanoTime();
        double timeMillis = (end - start) / 1e6 / backwardIterations;

        writer.write(
            "Convolution("
                + testCase.nInputPlane
                + " "
                + testCase.nOutputPlane
                + " "
                + testCase.kW
                + " "
                + testCase.kH
                + " "
                + testCase.dW
                + " "
                + testCase.dH
                + " "
                + testCase.padW
                + " "
                + testCase.padH
                + " "
                + testCase.inputWidth
                + " "
                + testCase.inputHeight
                + ") "
                + " backward, "
                + timeMillis
                + "\n");
      } catch (Exception ex) {
        ex.printStackTrace();
      }
    }
  }