@Test
  public void testCNNInputSetupMNIST() throws Exception {
    INDArray input = getMnistData();
    Layer layer = getMNISTConfig();
    layer.activate(input);

    assertEquals(input, layer.input());
    assertArrayEquals(input.shape(), layer.input().shape());
  }
  @Test
  public void testFeatureMapShape() throws Exception {
    INDArray input = getMnistData();

    Layer layer = getCNNConfig(nChannelsIn, depth, kernelSize, stride, padding);
    INDArray convActivations = layer.activate(input);

    assertEquals(featureMapWidth, convActivations.size(2));
    assertEquals(depth, convActivations.size(0));
  }
  @Test
  public void testCNNInputSetup() throws Exception {
    INDArray input = getMnistData();
    int[] stride = new int[] {3, 3};
    int[] padding = new int[] {1, 1};

    Layer layer = getCNNConfig(nChannelsIn, depth, kernelSize, stride, padding);
    layer.activate(input);

    assertEquals(input, layer.input());
    assertEquals(input.shape(), layer.input().shape());
  }
  @Test
  public void testFeatureMapShapeMNIST() throws Exception {
    int inputWidth = 28;
    int[] stride = new int[] {2, 2};
    int[] padding = new int[] {0, 0};
    int[] kernelSize = new int[] {9, 9};
    int nChannelsIn = 1;
    int depth = 20;
    int featureMapWidth = (inputWidth + padding[1] * 2 - kernelSize[1]) / stride[1] + 1;

    INDArray input = getMnistData();

    Layer layer = getCNNConfig(nChannelsIn, depth, kernelSize, stride, padding);
    INDArray convActivations = layer.activate(input);

    assertEquals(featureMapWidth, convActivations.size(2));
    assertEquals(depth, convActivations.size(1));
  }
  @Test
  public void testActivateResults() {
    Layer layer = getContainedConfig();
    INDArray input = getContainedData();
    INDArray expectedOutput =
        Nd4j.create(
            new double[] {
              0.98201379, 0.98201379, 0.98201379, 0.98201379, 0.99966465,
              0.99966465, 0.99966465, 0.99966465, 0.98201379, 0.98201379,
              0.98201379, 0.98201379, 0.99966465, 0.99966465, 0.99966465,
              0.99966465, 0.98201379, 0.98201379, 0.98201379, 0.98201379,
              0.99966465, 0.99966465, 0.99966465, 0.99966465, 0.98201379,
              0.98201379, 0.98201379, 0.98201379, 0.99966465, 0.99966465,
              0.99966465, 0.99966465
            },
            new int[] {1, 2, 4, 4});

    INDArray convActivations = layer.activate(input);

    assertEquals(expectedOutput, convActivations);
    assertEquals(expectedOutput.shape(), convActivations.shape());
  }
Пример #6
0
  public static void testBackward() {
    for (TestCase testCase : allTestCases) {
      try (BufferedWriter writer =
          new BufferedWriter(new FileWriter(new File("dl4jPerformance.csv"), true))) {
        ConvolutionLayer convolutionLayerBuilder =
            new ConvolutionLayer.Builder(testCase.kW, testCase.kH)
                .nIn(testCase.nInputPlane)
                .stride(testCase.dW, testCase.dH)
                .padding(testCase.padW, testCase.padH)
                .nOut(testCase.nOutputPlane)
                .build();

        MultiLayerConfiguration.Builder builder =
            new NeuralNetConfiguration.Builder().list().layer(0, convolutionLayerBuilder);

        MultiLayerConfiguration conf = builder.build();
        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        INDArray input =
            Nd4j.rand(
                seed, batchSize, testCase.nInputPlane, testCase.inputWidth, testCase.inputHeight);
        model.setInput(input);
        model.getLayer(0).setInput(input);
        model.feedForward();
        org.deeplearning4j.nn.api.Layer convolutionLayer = model.getLayer(0);

        INDArray output = convolutionLayer.activate();
        INDArray epsilon =
            Nd4j.rand(seed, output.size(0), output.size(1), output.size(2), output.size(3));
        Method initGradientView = model.getClass().getDeclaredMethod("initGradientsView");
        initGradientView.setAccessible(true);
        initGradientView.invoke(model);

        double start = System.nanoTime();
        for (int i = 0; i < backwardIterations; i++) {
          convolutionLayer.backpropGradient(epsilon);
        }
        double end = System.nanoTime();
        double timeMillis = (end - start) / 1e6 / backwardIterations;

        writer.write(
            "Convolution("
                + testCase.nInputPlane
                + " "
                + testCase.nOutputPlane
                + " "
                + testCase.kW
                + " "
                + testCase.kH
                + " "
                + testCase.dW
                + " "
                + testCase.dH
                + " "
                + testCase.padW
                + " "
                + testCase.padH
                + " "
                + testCase.inputWidth
                + " "
                + testCase.inputHeight
                + ") "
                + " backward, "
                + timeMillis
                + "\n");
      } catch (Exception ex) {
        ex.printStackTrace();
      }
    }
  }