// note precision is off on this test but the numbers are close
  // investigation in a future release should determine how to resolve
  @Test
  public void testCalculateDeltaContained() {
    Layer layer = getContainedConfig();
    INDArray input = getContainedData();
    INDArray col = getContainedCol();
    INDArray epsilon = Nd4j.ones(1, 2, 4, 4);

    INDArray expectedOutput =
        Nd4j.create(
            new double[] {
              0.02036651, 0.02036651, 0.02036651, 0.02036651, 0.00039383,
              0.00039383, 0.00039383, 0.00039383, 0.02036651, 0.02036651,
              0.02036651, 0.02036651, 0.00039383, 0.00039383, 0.00039383,
              0.00039383, 0.02036651, 0.02036651, 0.02036651, 0.02036651,
              0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.02036651,
              0.02036651, 0.02036651, 0.02036651, 0.00039383, 0.00039383,
              0.00039383, 0.00039383
            },
            new int[] {1, 2, 4, 4});

    layer.setInput(input);
    org.deeplearning4j.nn.layers.convolution.ConvolutionLayer layer2 =
        (org.deeplearning4j.nn.layers.convolution.ConvolutionLayer) layer;
    layer2.setCol(col);
    INDArray delta = layer2.calculateDelta(epsilon);

    assertArrayEquals(expectedOutput.shape(), delta.shape());
    assertEquals(expectedOutput, delta);
  }
  // TODO remove/move, technically this is testing Nd4j functionality
  @Test
  public void testCreateFeatureMapMethod() {
    Layer layer = getContainedConfig();
    INDArray input = getContainedData();
    int inputWidth = input.shape()[0];
    int featureMapWidth =
        (inputWidth + layer.conf().getPadding()[0] * 2 - layer.conf().getKernelSize()[0])
                / layer.conf().getStride()[0]
            + 1;

    INDArray expectedOutput =
        Nd4j.create(
            new double[] {
              1, 1, 1, 1, 3, 3, 3, 3, 1, 1, 1, 1, 3, 3, 3, 3, 1, 1, 1, 1, 3, 3, 3,
              3, 1, 1, 1, 1, 3, 3, 3, 3, 2, 2, 2, 2, 4, 4, 4, 4, 2, 2, 2, 2, 4, 4,
              4, 4, 2, 2, 2, 2, 4, 4, 4, 4, 2, 2, 2, 2, 4, 4, 4, 4
            },
            new int[] {1, 1, 2, 2, 4, 4});

    layer.setInput(input);
    org.deeplearning4j.nn.layers.convolution.ConvolutionLayer layer2 =
        (org.deeplearning4j.nn.layers.convolution.ConvolutionLayer) layer;
    INDArray featureMaps = layer2.createFeatureMapColumn();

    assertEquals(featureMapWidth, featureMaps.shape()[4]);
    assertEquals(expectedOutput.shape(), featureMaps.shape());
    assertEquals(expectedOutput, featureMaps);
  }
  // note precision is off on this test but the numbers are close
  // investigation in a future release should determine how to resolve
  @Test
  public void testBackpropResultsContained() {
    Layer layer = getContainedConfig();
    INDArray input = getContainedData();
    INDArray col = getContainedCol();
    INDArray epsilon = Nd4j.ones(1, 2, 4, 4);

    INDArray expectedBiasGradient =
        Nd4j.create(new double[] {0.16608272, 0.16608272}, new int[] {1, 2});
    INDArray expectedWeightGradient =
        Nd4j.create(
            new double[] {
              0.17238397,
              0.17238397,
              0.33846668,
              0.33846668,
              0.17238397,
              0.17238397,
              0.33846668,
              0.33846668
            },
            new int[] {2, 1, 2, 2});
    INDArray expectedEpsilon =
        Nd4j.create(
            new double[] {
              0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.00039383,
              0.00039383, 0., 0., 0.00039383, 0.00039383,
              0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.,
              0., 0.02036651, 0.02036651, 0.02036651, 0.02036651,
              0.02036651, 0.02036651, 0., 0., 0.02036651,
              0.02036651, 0.02036651, 0.02036651, 0.02036651, 0.02036651,
              0., 0., 0.00039383, 0.00039383, 0.00039383,
              0.00039383, 0.00039383, 0.00039383, 0., 0.,
              0.00039383, 0.00039383, 0.00039383, 0.00039383, 0.00039383,
              0.00039383, 0., 0., 0., 0.,
              0., 0., 0., 0., 0.,
              0., 0., 0., 0., 0.,
              0., 0., 0., 0.
            },
            new int[] {1, 1, 8, 8});

    layer.setInput(input);
    org.deeplearning4j.nn.layers.convolution.ConvolutionLayer layer2 =
        (org.deeplearning4j.nn.layers.convolution.ConvolutionLayer) layer;
    layer2.setCol(col);
    Pair<Gradient, INDArray> pair = layer2.backpropGradient(epsilon);

    assertArrayEquals(expectedEpsilon.shape(), pair.getSecond().shape());
    assertArrayEquals(expectedWeightGradient.shape(), pair.getFirst().getGradientFor("W").shape());
    assertArrayEquals(expectedBiasGradient.shape(), pair.getFirst().getGradientFor("b").shape());
    assertEquals(expectedEpsilon, pair.getSecond());
    assertEquals(expectedWeightGradient, pair.getFirst().getGradientFor("W"));
    assertEquals(expectedBiasGradient, pair.getFirst().getGradientFor("b"));
  }