Exemplo n.º 1
0
  /**
   * Convert to an array. This is used with some training algorithms that require that the "memory"
   * of the neuron(the weight and bias values) be expressed as a linear array.
   *
   * @param network The network to encode.
   * @return The memory of the neuron.
   */
  public static double[] networkToArray(final BasicNetwork network) {
    final int size = network.getStructure().calculateSize();

    // allocate an array to hold
    final double[] result = new double[size];

    int index = 0;

    for (final Layer layer : network.getStructure().getLayers()) {
      // process layer bias
      if (layer.hasBias()) {
        for (int i = 0; i < layer.getNeuronCount(); i++) {
          result[index++] = layer.getBiasWeight(i);
        }
      }

      // process synapses
      for (final Synapse synapse : network.getStructure().getPreviousSynapses(layer)) {
        if (synapse.getMatrix() != null) {
          // process each weight matrix
          for (int x = 0; x < synapse.getToNeuronCount(); x++) {
            for (int y = 0; y < synapse.getFromNeuronCount(); y++) {
              result[index++] = synapse.getMatrix().get(y, x);
            }
          }
        }
      }
    }

    return result;
  }
Exemplo n.º 2
0
  /**
   * Generate the network.
   *
   * @return The generated network.
   */
  public BasicNetwork generate() {

    Layer input, instar, outstar;
    int y = PatternConst.START_Y;

    final BasicNetwork network = new BasicNetwork();
    network.addLayer(input = new BasicLayer(new ActivationLinear(), false, this.inputCount));
    network.addLayer(instar = new BasicLayer(new ActivationCompetitive(), false, this.instarCount));
    network.addLayer(outstar = new BasicLayer(new ActivationLinear(), false, this.outstarCount));
    network.getStructure().finalizeStructure();
    network.reset();

    input.setX(PatternConst.START_X);
    input.setY(y);
    y += PatternConst.INC_Y;

    instar.setX(PatternConst.START_X);
    instar.setY(y);
    y += PatternConst.INC_Y;

    outstar.setX(PatternConst.START_X);
    outstar.setY(y);

    // tag as needed
    network.tagLayer(BasicNetwork.TAG_INPUT, input);
    network.tagLayer(BasicNetwork.TAG_OUTPUT, outstar);
    network.tagLayer(CPNPattern.TAG_INSTAR, instar);
    network.tagLayer(CPNPattern.TAG_OUTSTAR, outstar);

    return network;
  }
Exemplo n.º 3
0
  /**
   * Construct a network analyze class. Analyze the specified network.
   *
   * @param network The network to analyze.
   */
  public AnalyzeNetwork(final BasicNetwork network) {
    final int assignDisabled = 0;
    final int assignedTotal = 0;
    final List<Double> biasList = new ArrayList<Double>();
    final List<Double> weightList = new ArrayList<Double>();
    final List<Double> allList = new ArrayList<Double>();

    for (int layerNumber = 0; layerNumber < network.getLayerCount() - 1; layerNumber++) {
      final int fromCount = network.getLayerNeuronCount(layerNumber);
      final int fromBiasCount = network.getLayerTotalNeuronCount(layerNumber);
      final int toCount = network.getLayerNeuronCount(layerNumber + 1);

      // weights
      for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++) {
        for (int toNeuron = 0; toNeuron < toCount; toNeuron++) {
          final double v = network.getWeight(layerNumber, fromNeuron, toNeuron);
          weightList.add(v);
          allList.add(v);
        }
      }

      // bias
      if (fromCount != fromBiasCount) {
        final int biasNeuron = fromCount;
        for (int toNeuron = 0; toNeuron < toCount; toNeuron++) {
          final double v = network.getWeight(layerNumber, biasNeuron, toNeuron);
          biasList.add(v);
          allList.add(v);
        }
      }
    }

    for (final Layer layer : network.getStructure().getLayers()) {
      if (layer.hasBias()) {
        for (int i = 0; i < layer.getNeuronCount(); i++) {}
      }
    }

    this.disabledConnections = assignDisabled;
    this.totalConnections = assignedTotal;
    this.weights = new NumericRange(weightList);
    this.bias = new NumericRange(biasList);
    this.weightsAndBias = new NumericRange(allList);
    this.weightValues = EngineArray.listToDouble(weightList);
    this.allValues = EngineArray.listToDouble(allList);
    this.biasValues = EngineArray.listToDouble(biasList);
  }
Exemplo n.º 4
0
  /**
   * Use an array to populate the memory of the neural network.
   *
   * @param array An array of doubles.
   * @param network The network to encode.
   */
  public static void arrayToNetwork(final double[] array, final BasicNetwork network) {

    int index = 0;

    for (final Layer layer : network.getStructure().getLayers()) {
      if (layer.hasBias()) {
        // process layer bias
        for (int i = 0; i < layer.getNeuronCount(); i++) {
          layer.setBiasWeight(i, array[index++]);
        }
      }

      if (network.getStructure().isConnectionLimited()) {
        index = NetworkCODEC.processSynapseLimited(network, layer, array, index);
      } else {
        index = NetworkCODEC.processSynapseFull(network, layer, array, index);
      }
    }
  }
Exemplo n.º 5
0
 /**
  * Generate the RSOM network.
  *
  * @return The neural network.
  */
 public BasicNetwork generate() {
   final Layer input = new BasicLayer(new ActivationLinear(), false, this.inputNeurons);
   final Layer output = new BasicLayer(new ActivationLinear(), false, this.outputNeurons);
   int y = PatternConst.START_Y;
   final BasicNetwork network = new BasicNetwork(new SOMLogic());
   network.addLayer(input);
   network.addLayer(output);
   input.setX(PatternConst.START_X);
   output.setX(PatternConst.START_X);
   input.setY(y);
   y += PatternConst.INC_Y;
   output.setY(y);
   network.getStructure().finalizeStructure();
   network.reset();
   return network;
 }