@Test
  public void testGradientWithAsList() {
    MultiLayerNetwork net1 = new MultiLayerNetwork(getConf());
    MultiLayerNetwork net2 = new MultiLayerNetwork(getConf());
    net1.init();
    net2.init();

    DataSet x1 = new IrisDataSetIterator(1, 150).next();
    DataSet all = new IrisDataSetIterator(150, 150).next();
    DataSet x2 = all.asList().get(0);

    // x1 and x2 contain identical data
    assertArrayEquals(asFloat(x1.getFeatureMatrix()), asFloat(x2.getFeatureMatrix()), 0.0f); // OK
    assertArrayEquals(asFloat(x1.getLabels()), asFloat(x2.getLabels()), 0.0f); // OK
    assertEquals(x1, x2); // Fails, DataSet doesn't override Object.equals()

    // Set inputs/outputs so gradient can be calculated:
    net1.feedForward(x1.getFeatureMatrix());
    net2.feedForward(x2.getFeatureMatrix());
    ((OutputLayer) net1.getLayers()[1]).setLabels(x1.getLabels());
    ((OutputLayer) net2.getLayers()[1]).setLabels(x2.getLabels());

    net1.gradient(); // OK
    net2.gradient(); // IllegalArgumentException: Buffers must fill up specified length 29
  }
  private static void checkNinNoutForEachLayer(
      int[] expNin, int[] expNout, MultiLayerConfiguration conf, MultiLayerNetwork network) {

    // Check configuration
    for (int i = 0; i < expNin.length; i++) {
      NeuralNetConfiguration layerConf = conf.getConf(i);
      assertTrue(layerConf.getNIn() == expNin[i]);
      assertTrue(layerConf.getNOut() == expNout[i]);
    }

    // Check Layer
    for (int i = 0; i < expNin.length; i++) {
      Layer layer = network.getLayers()[i];
      assertTrue(layer.conf().getNIn() == expNin[i]);
      assertTrue(layer.conf().getNOut() == expNout[i]);
      int[] weightShape = layer.getParam(DefaultParamInitializer.WEIGHT_KEY).shape();
      assertTrue(weightShape[0] == expNin[i]);
      assertTrue(weightShape[1] == expNout[i]);
    }
  }