@Test
  public void testJson() throws Exception {
    MultiLayerConfiguration conf =
        new NeuralNetConfiguration.Builder()
            .list()
            .layer(0, new RBM.Builder().dist(new NormalDistribution(1, 1e-1)).build())
            .inputPreProcessor(0, new ReshapePreProcessor())
            .build();

    String json = conf.toJson();
    MultiLayerConfiguration from = MultiLayerConfiguration.fromJson(json);
    assertEquals(conf.getConf(0), from.getConf(0));

    Properties props = new Properties();
    props.put("json", json);
    String key = props.getProperty("json");
    assertEquals(json, key);
    File f = new File("props");
    f.deleteOnExit();
    BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(f));
    props.store(bos, "");
    bos.flush();
    bos.close();
    BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));
    Properties props2 = new Properties();
    props2.load(bis);
    bis.close();
    assertEquals(props2.getProperty("json"), props.getProperty("json"));
    String json2 = props2.getProperty("json");
    MultiLayerConfiguration conf3 = MultiLayerConfiguration.fromJson(json2);
    assertEquals(conf.getConf(0), conf3.getConf(0));
  }
  @Test
  public void testListOverloads() {

    MultiLayerConfiguration conf =
        new NeuralNetConfiguration.Builder()
            .seed(12345)
            .list()
            .layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build())
            .layer(1, new OutputLayer.Builder().nIn(4).nOut(5).build())
            .pretrain(false)
            .backprop(true)
            .build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    DenseLayer dl = (DenseLayer) conf.getConf(0).getLayer();
    assertEquals(3, dl.getNIn());
    assertEquals(4, dl.getNOut());
    OutputLayer ol = (OutputLayer) conf.getConf(1).getLayer();
    assertEquals(4, ol.getNIn());
    assertEquals(5, ol.getNOut());

    MultiLayerConfiguration conf2 =
        new NeuralNetConfiguration.Builder()
            .seed(12345)
            .list()
            .layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build())
            .layer(1, new OutputLayer.Builder().nIn(4).nOut(5).build())
            .pretrain(false)
            .backprop(true)
            .build();
    MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
    net2.init();

    MultiLayerConfiguration conf3 =
        new NeuralNetConfiguration.Builder()
            .seed(12345)
            .list(
                new DenseLayer.Builder().nIn(3).nOut(4).build(),
                new OutputLayer.Builder().nIn(4).nOut(5).build())
            .pretrain(false)
            .backprop(true)
            .build();
    MultiLayerNetwork net3 = new MultiLayerNetwork(conf3);
    net3.init();

    assertEquals(conf, conf2);
    assertEquals(conf, conf3);
  }
  @Test
  public void testClone() {
    MultiLayerConfiguration conf =
        new NeuralNetConfiguration.Builder()
            .list()
            .layer(0, new RBM.Builder().build())
            .layer(1, new OutputLayer.Builder().build())
            .inputPreProcessor(1, new ReshapePreProcessor(new int[] {1, 2}, new int[] {3, 4}))
            .build();

    MultiLayerConfiguration conf2 = conf.clone();

    assertEquals(conf, conf2);
    assertNotSame(conf, conf2);
    assertNotSame(conf.getConfs(), conf2.getConfs());
    for (int i = 0; i < conf.getConfs().size(); i++) {
      assertNotSame(conf.getConf(i), conf2.getConf(i));
    }
    assertNotSame(conf.getInputPreProcessors(), conf2.getInputPreProcessors());
    for (Integer layer : conf.getInputPreProcessors().keySet()) {
      assertNotSame(conf.getInputPreProcess(layer), conf2.getInputPreProcess(layer));
    }
  }