// TODO remove/move, technically this is testing Nd4j functionality @Test public void testCreateFeatureMapMethod() { Layer layer = getContainedConfig(); INDArray input = getContainedData(); int inputWidth = input.shape()[0]; int featureMapWidth = (inputWidth + layer.conf().getPadding()[0] * 2 - layer.conf().getKernelSize()[0]) / layer.conf().getStride()[0] + 1; INDArray expectedOutput = Nd4j.create( new double[] { 1, 1, 1, 1, 3, 3, 3, 3, 1, 1, 1, 1, 3, 3, 3, 3, 1, 1, 1, 1, 3, 3, 3, 3, 1, 1, 1, 1, 3, 3, 3, 3, 2, 2, 2, 2, 4, 4, 4, 4, 2, 2, 2, 2, 4, 4, 4, 4, 2, 2, 2, 2, 4, 4, 4, 4, 2, 2, 2, 2, 4, 4, 4, 4 }, new int[] {1, 1, 2, 2, 4, 4}); layer.setInput(input); org.deeplearning4j.nn.layers.convolution.ConvolutionLayer layer2 = (org.deeplearning4j.nn.layers.convolution.ConvolutionLayer) layer; INDArray featureMaps = layer2.createFeatureMapColumn(); assertEquals(featureMapWidth, featureMaps.shape()[4]); assertEquals(expectedOutput.shape(), featureMaps.shape()); assertEquals(expectedOutput, featureMaps); }
@Test public void testBackpropResults() { Layer layer = getContainedConfig(); INDArray col = getContainedCol(); INDArray expectedWeightGradient = Nd4j.create( new double[] {-1440., -1440., -1984., -1984., -1440., -1440., -1984., -1984.}, new int[] {2, 1, 2, 2}); INDArray expectedBiasGradient = Nd4j.create( new double[] {-544., -544.}, new int[] { 2, }); INDArray expectedEpsilon = Nd4j.create( new double[] { -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56. }, new int[] {1, 1, 8, 8}); org.deeplearning4j.nn.layers.convolution.ConvolutionLayer layer2 = (org.deeplearning4j.nn.layers.convolution.ConvolutionLayer) layer; layer2.setCol(col); Pair<Gradient, INDArray> pair = layer2.backpropGradient(epsilon); assertEquals(expectedEpsilon.shape(), pair.getSecond().shape()); assertEquals(expectedWeightGradient.shape(), pair.getFirst().getGradientFor("W").shape()); assertEquals(expectedBiasGradient.shape(), pair.getFirst().getGradientFor("b").shape()); assertEquals(expectedEpsilon, pair.getSecond()); assertEquals(expectedWeightGradient, pair.getFirst().getGradientFor("W")); assertEquals(expectedBiasGradient, pair.getFirst().getGradientFor("b")); }
@Test public void testCalculateDelta() { Layer layer = getContainedConfig(); INDArray col = getContainedCol(); INDArray expectedOutput = Nd4j.create( new double[] { -12., -12., -12., -12., -56., -56., -56., -56., -12., -12., -12., -12., -56., -56., -56., -56., -12., -12., -12., -12., -56., -56., -56., -56., -12., -12., -12., -12., -56., -56., -56., -56. }, new int[] {1, 2, 4, 4}); org.deeplearning4j.nn.layers.convolution.ConvolutionLayer layer2 = (org.deeplearning4j.nn.layers.convolution.ConvolutionLayer) layer; layer2.setCol(col); INDArray delta = layer2.calculateDelta(epsilon); assertEquals(expectedOutput.shape(), delta.shape()); assertEquals(expectedOutput, delta); }
@Test public void testPreOutputMethod() { Layer layer = getContainedConfig(); INDArray col = getContainedCol(); INDArray expectedOutput = Nd4j.create( new double[] { 4., 4., 4., 4., 4., 4., 4., 4., 8., 8., 8., 8., 8., 8., 8., 8., 4., 4., 4., 4., 4., 4., 4., 4., 8., 8., 8., 8., 8., 8., 8., 8. }, new int[] {1, 2, 4, 4}); org.deeplearning4j.nn.layers.convolution.ConvolutionLayer layer2 = (org.deeplearning4j.nn.layers.convolution.ConvolutionLayer) layer; layer2.setCol(col); INDArray activation = layer2.preOutput(true); assertEquals(expectedOutput.shape(), activation.shape()); assertEquals(expectedOutput, activation); }