コード例 #1
0
  /**
   * Construct a network analyze class. Analyze the specified network.
   *
   * @param network The network to analyze.
   */
  public AnalyzeNetwork(final BasicNetwork network) {
    final int assignDisabled = 0;
    final int assignedTotal = 0;
    final List<Double> biasList = new ArrayList<Double>();
    final List<Double> weightList = new ArrayList<Double>();
    final List<Double> allList = new ArrayList<Double>();

    for (int layerNumber = 0; layerNumber < network.getLayerCount() - 1; layerNumber++) {
      final int fromCount = network.getLayerNeuronCount(layerNumber);
      final int fromBiasCount = network.getLayerTotalNeuronCount(layerNumber);
      final int toCount = network.getLayerNeuronCount(layerNumber + 1);

      // weights
      for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++) {
        for (int toNeuron = 0; toNeuron < toCount; toNeuron++) {
          final double v = network.getWeight(layerNumber, fromNeuron, toNeuron);
          weightList.add(v);
          allList.add(v);
        }
      }

      // bias
      if (fromCount != fromBiasCount) {
        final int biasNeuron = fromCount;
        for (int toNeuron = 0; toNeuron < toCount; toNeuron++) {
          final double v = network.getWeight(layerNumber, biasNeuron, toNeuron);
          biasList.add(v);
          allList.add(v);
        }
      }
    }

    for (final Layer layer : network.getStructure().getLayers()) {
      if (layer.hasBias()) {
        for (int i = 0; i < layer.getNeuronCount(); i++) {}
      }
    }

    this.disabledConnections = assignDisabled;
    this.totalConnections = assignedTotal;
    this.weights = new NumericRange(weightList);
    this.bias = new NumericRange(biasList);
    this.weightsAndBias = new NumericRange(allList);
    this.weightValues = EngineArray.listToDouble(weightList);
    this.allValues = EngineArray.listToDouble(allList);
    this.biasValues = EngineArray.listToDouble(biasList);
  }
コード例 #2
0
 public void testFactoryFeedforward() {
   String architecture = "?:B->TANH->3->LINEAR->?:B";
   MLMethodFactory factory = new MLMethodFactory();
   BasicNetwork network =
       (BasicNetwork) factory.create(MLMethodFactory.TYPE_FEEDFORWARD, architecture, 1, 4);
   Assert.assertTrue(network.isLayerBiased(0));
   Assert.assertFalse(network.isLayerBiased(1));
   Assert.assertTrue(network.isLayerBiased(2));
   Assert.assertEquals(3, network.getLayerCount());
   Assert.assertTrue(network.getActivation(0) instanceof ActivationLinear);
   Assert.assertTrue(network.getActivation(1) instanceof ActivationTANH);
   Assert.assertTrue(network.getActivation(2) instanceof ActivationLinear);
   Assert.assertEquals(18, network.encodedArrayLength());
   Assert.assertEquals(1, network.getLayerNeuronCount(0));
   Assert.assertEquals(3, network.getLayerNeuronCount(1));
   Assert.assertEquals(4, network.getLayerNeuronCount(2));
 }
コード例 #3
0
  private void randomizeSynapse(BasicNetwork network, int fromLayer) {
    int toLayer = fromLayer + 1;
    int toCount = network.getLayerNeuronCount(toLayer);
    int fromCount = network.getLayerNeuronCount(fromLayer);
    int fromCountTotalCount = network.getLayerTotalNeuronCount(fromLayer);
    ActivationFunction af = network.getActivation(toLayer);
    double low = calculateRange(af, Double.MIN_VALUE);
    double high = calculateRange(af, Double.MAX_VALUE);

    double b = 0.7d * Math.pow(toCount, (1d / fromCount)) / (high - low);

    for (int toNeuron = 0; toNeuron < toCount; toNeuron++) {
      if (fromCount != fromCountTotalCount) {
        double w = nextDouble(-b, b);
        network.setWeight(fromLayer, fromCount, toNeuron, w);
      }
      for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++) {
        double w = nextDouble(0, b);
        network.setWeight(fromLayer, fromNeuron, toNeuron, w);
      }
    }
  }