public void testFactoryFeedforward() {
   String architecture = "?:B->TANH->3->LINEAR->?:B";
   MLMethodFactory factory = new MLMethodFactory();
   BasicNetwork network =
       (BasicNetwork) factory.create(MLMethodFactory.TYPE_FEEDFORWARD, architecture, 1, 4);
   Assert.assertTrue(network.isLayerBiased(0));
   Assert.assertFalse(network.isLayerBiased(1));
   Assert.assertTrue(network.isLayerBiased(2));
   Assert.assertEquals(3, network.getLayerCount());
   Assert.assertTrue(network.getActivation(0) instanceof ActivationLinear);
   Assert.assertTrue(network.getActivation(1) instanceof ActivationTANH);
   Assert.assertTrue(network.getActivation(2) instanceof ActivationLinear);
   Assert.assertEquals(18, network.encodedArrayLength());
   Assert.assertEquals(1, network.getLayerNeuronCount(0));
   Assert.assertEquals(3, network.getLayerNeuronCount(1));
   Assert.assertEquals(4, network.getLayerNeuronCount(2));
 }
  private void randomizeSynapse(BasicNetwork network, int fromLayer) {
    int toLayer = fromLayer + 1;
    int toCount = network.getLayerNeuronCount(toLayer);
    int fromCount = network.getLayerNeuronCount(fromLayer);
    int fromCountTotalCount = network.getLayerTotalNeuronCount(fromLayer);
    ActivationFunction af = network.getActivation(toLayer);
    double low = calculateRange(af, Double.MIN_VALUE);
    double high = calculateRange(af, Double.MAX_VALUE);

    double b = 0.7d * Math.pow(toCount, (1d / fromCount)) / (high - low);

    for (int toNeuron = 0; toNeuron < toCount; toNeuron++) {
      if (fromCount != fromCountTotalCount) {
        double w = nextDouble(-b, b);
        network.setWeight(fromLayer, fromCount, toNeuron, w);
      }
      for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++) {
        double w = nextDouble(0, b);
        network.setWeight(fromLayer, fromNeuron, toNeuron, w);
      }
    }
  }