// note precision is off on this test but the numbers are close // investigation in a future release should determine how to resolve @Test public void testBackpropResultsContained() { Layer layer = getContainedConfig(); INDArray input = getContainedData(); INDArray col = getContainedCol(); INDArray epsilon = Nd4j.ones(1, 2, 4, 4); INDArray expectedBiasGradient = Nd4j.create(new double[] {0.16608272, 0.16608272}, new int[] {1, 2}); INDArray expectedWeightGradient = Nd4j.create( new double[] { 0.17238397, 0.17238397, 0.33846668, 0.33846668, 0.17238397, 0.17238397, 0.33846668, 0.33846668 }, new int[] {2, 1, 2, 2}); INDArray expectedEpsilon = Nd4j.create( new double[] { 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0., 0., 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0., 0., 0.02036651, 0.02036651, 0.02036651, 0.02036651, 0.02036651, 0.02036651, 0., 0., 0.02036651, 0.02036651, 0.02036651, 0.02036651, 0.02036651, 0.02036651, 0., 0., 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0., 0., 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.00039383, 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0. }, new int[] {1, 1, 8, 8}); layer.setInput(input); org.deeplearning4j.nn.layers.convolution.ConvolutionLayer layer2 = (org.deeplearning4j.nn.layers.convolution.ConvolutionLayer) layer; layer2.setCol(col); Pair<Gradient, INDArray> pair = layer2.backpropGradient(epsilon); assertArrayEquals(expectedEpsilon.shape(), pair.getSecond().shape()); assertArrayEquals(expectedWeightGradient.shape(), pair.getFirst().getGradientFor("W").shape()); assertArrayEquals(expectedBiasGradient.shape(), pair.getFirst().getGradientFor("b").shape()); assertEquals(expectedEpsilon, pair.getSecond()); assertEquals(expectedWeightGradient, pair.getFirst().getGradientFor("W")); assertEquals(expectedBiasGradient, pair.getFirst().getGradientFor("b")); }
@Test public void testBackpropResults() { Layer layer = getContainedConfig(); INDArray col = getContainedCol(); INDArray expectedWeightGradient = Nd4j.create( new double[] {-1440., -1440., -1984., -1984., -1440., -1440., -1984., -1984.}, new int[] {2, 1, 2, 2}); INDArray expectedBiasGradient = Nd4j.create( new double[] {-544., -544.}, new int[] { 2, }); INDArray expectedEpsilon = Nd4j.create( new double[] { -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -12., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56., -56. }, new int[] {1, 1, 8, 8}); org.deeplearning4j.nn.layers.convolution.ConvolutionLayer layer2 = (org.deeplearning4j.nn.layers.convolution.ConvolutionLayer) layer; layer2.setCol(col); Pair<Gradient, INDArray> pair = layer2.backpropGradient(epsilon); assertEquals(expectedEpsilon.shape(), pair.getSecond().shape()); assertEquals(expectedWeightGradient.shape(), pair.getFirst().getGradientFor("W").shape()); assertEquals(expectedBiasGradient.shape(), pair.getFirst().getGradientFor("b").shape()); assertEquals(expectedEpsilon, pair.getSecond()); assertEquals(expectedWeightGradient, pair.getFirst().getGradientFor("W")); assertEquals(expectedBiasGradient, pair.getFirst().getGradientFor("b")); }