public void fire() {
   double res = bias;
   for (Synapse s : getInputSynapses()) {
     res += s.getWeight() * s.getInputNeuron().getOutput();
   }
   intermediateValue = res;
   setOutput(Sigmoid.getInstance().apply(res));
 }
 int getNumberOfConnectedSynapsesToCurrentActiveNeuron(
     Neuron possiblyActiveInNextTimeStep, Neuron activeNeuron) {
   // NOTE: This is incredibly inefficient. Fix this by making activeNeuron
   // know which synapses from which neurons are connected to it.
   int numberOfConnectedSynapsesToCurrentActiveNeuron = 0;
   for (DistalSegment distalSegment : possiblyActiveInNextTimeStep.getDistalSegments()) {
     for (Synapse synapse : distalSegment.getConnectedSynapses()) {
       if (synapse.getCell().equals(activeNeuron)) {
         numberOfConnectedSynapsesToCurrentActiveNeuron++;
       }
     }
   }
   return numberOfConnectedSynapsesToCurrentActiveNeuron;
 }
 /**
  * Sets the values of the given list as weights in the synapse layer
  *
  * @param list The list of weights
  * @return Boolean indicating if the operation succeeded
  */
 public boolean setWeightVector(Vector<Double> list) {
   this.checker.init("setWeightVector");
   this.checker.addCheck(list != null, "The given list of weights is null.");
   this.checker.addCheck(list.size() > 0, "The given list of weights is empty.");
   if (this.checker.isSecure()) {
     if (list != null) {
       int offset = 0;
       for (SynapseLayer synlayer : this.synapse_layers)
         for (Synapse synapse : synlayer.synapses)
           synapse.weight = list.get(offset++).doubleValue();
     } else
       System.err.println(
           "ERROR: Tried to set the network's weights by providing a null pointer instead of the weight list.");
     return true;
   }
   return false;
 }
Exemplo n.º 4
0
 /**
  * Updates this {@code Pool}'s store of permanences for the specified {@link Synapse}
  *
  * @param c the connections memory
  * @param s the synapse who's permanence is recorded
  * @param permanence the permanence value to record
  */
 public void updatePool(Connections c, Synapse s, double permanence) {
   int inputIndex = s.getInputIndex();
   if (synapsesBySourceIndex.get(inputIndex) == null) {
     synapsesBySourceIndex.put(inputIndex, s);
   }
   if (permanence > c.getSynPermConnected()) {
     synapseConnections.add(inputIndex);
   } else {
     synapseConnections.remove(inputIndex);
   }
 }
 /**
  * Backpropogates the training data to all the incomming synapses.
  *
  * @since 1.0
  */
 @Override
 public void backPropagate() {
   this.calculateDeltaTrain();
   // TODO fix this bad typing
   /*
   //step thru source synapses and make them learn their new weight.
   for(final Synapse currentSynapse : this.getBrain().getInEdges(this))
   {
   	final Neuron sourceNeuron = currentSynapse.getSourceNode();
   	if( sourceNeuron instanceof BackpropNeuron )
   	{
   		final BackpropNeuron sourceBackpropNeuron = (BackpropNeuron) sourceNeuron;
   		// TODO instead of only working on SimpleBackpropNeuron perhaps make deltaTrain part of a Backprop synapse
   		if( sourceBackpropNeuron instanceof SimpleBackpropNeuron )
   			((SimpleBackpropNeuron) sourceBackpropNeuron).deltaTrainDestinations.put(currentSynapse, this.deltaTrain);
   		currentSynapse.setWeight(currentSynapse.getWeight() + (this.deltaTrain * this.learningRate * currentSynapse.getInput()));
   	}
   }
   */
   // step thru source synapses and make them learn their new weight.
   for (final Object currentSynapse : this.getBrain().getInEdges(this)) {
     final Neuron sourceNeuron = (Neuron) ((Synapse) currentSynapse).getSourceNode();
     if (sourceNeuron instanceof BackpropNeuron) {
       final BackpropNeuron sourceBackpropNeuron = (BackpropNeuron) sourceNeuron;
       // TODO instead of only working on SimpleBackpropNeuron perhaps make deltaTrain part of a
       // Backprop synapse
       if (sourceBackpropNeuron instanceof SimpleBackpropNeuron)
         ((SimpleBackpropNeuron) sourceBackpropNeuron)
             .deltaTrainDestinations.put(((Synapse) currentSynapse), this.deltaTrain);
       ((Synapse) currentSynapse)
           .setWeight(
               ((Synapse) currentSynapse).getWeight()
                   + (this.deltaTrain
                       * this.learningRate
                       * ((Synapse) currentSynapse).getInput()));
     }
   }
 }
  /**
   * Call this method to run PredictionAlgorithm_1 once on a Region.
   *
   * <p>MAIN LOGIC: For each learning neuron in an active column, connect to all previously active
   * neurons.
   */
  public void run() {
    // Step 1) Which neurons to apply logic to?
    // POSSIBLE ANSWER: Iterate through all neurons in active columns in region
    Set<ColumnPosition> activeColumnPositions = this.spatialPooler.getActiveColumnPositions();
    for (ColumnPosition ACP : activeColumnPositions) {
      Column activeColumn = super.getRegion().getColumn(ACP.getRow(), ACP.getRow());
      Neuron learningNeuron = this.getNeuronWithLeastNumberOfConnectedSynapses(activeColumn);

      // Step 2) How do you allow neuronA to predict neuronB will become
      // active in the next time step?
      // POSSIBLE ANSWER: For each learning neuron connect to all
      // previously active neurons. 1 new distal segment per learning neuron.
      DistalSegment distalSegment = new DistalSegment();

      for (Neuron previouslyActiveNeuron : this.wasActiveNeurons) {
        distalSegment.addSynapse(
            new Synapse<>(previouslyActiveNeuron, Synapse.MINIMAL_CONNECTED_PERMANENCE, -1, -1));
      }
      learningNeuron.addDistalSegment(distalSegment);

      // Step 3) Which neurons should be active for the current time step?
      // POSSIBLE ANSWER: The active neurons that best represent the
      //                  current sensory input.
      //                                      2
      // EXAMPLE: Imagine you saw "2 - 1" and - . Although the minus
      //                                      1
      //          symbol can also represent division you are not confused
      //          because the "2" and "1" are in different locations.
      //          Your brain saw the "2" and "1" SDRs as well as the SDR
      //          for how your eye moved while looking at "2", "1", and
      //          "-" in sequence so when you saw "-" you knew that it
      //          meant minus or division.
      //
      // CONCLUSION: We want the current SDR to be the active neurons that
      //             are most connected to all previous active SDRs. In
      //             this case it includes vision and eye muscle SDRs.
      Neuron activeNeuron = this.computeActiveNeuron(activeColumn);
      activeNeuron.setActiveState(true);
      this.isActiveNeurons.add(activeNeuron);
    }

    // Step 4) What neurons can be used for prediction?
    // POSSIBLE ANSWER: which neurons currently have the most # of connected
    // (NOT active Cells)
    // synapses across all distal dendrites connected to the current set of
    // active neurons. This is where we reward all the competition between
    // all synapses to represent an connection to a past time step.

    // NOTE: connectionScores = sorted # of connected synapses for each neuron in Region
    Set<Integer> connectionScores = this.getConnectionScores();

    int index =
        Math.max(0, connectionScores.size() - this.spatialPooler.getActiveColumnPositions().size());
    int minimumConnectionScore = (Integer) connectionScores.toArray()[index];

    // Step 5) How many number of predicting neurons?
    // POSSIBLE ANSWER: same number of currently active neurons.
    this.updateIsPredictingNeurons(minimumConnectionScore);

    // Step 6) Which synapse connections should be strengthened to model
    // long term potentiation?
    // POSSIBLE ANSWER: Current time-step is @t=4. Strengthen the
    // connection between neuronBs that isActive @t=4 and isPredicting
    // @t=3 and neuronA that isActive @t=3.
    for (Neuron activeNeuronBatTequals4 : this.isActiveNeurons) {
      if (activeNeuronBatTequals4.getPreviousPredictingState()) {

        for (DistalSegment distalSegment : activeNeuronBatTequals4.getDistalSegments()) {
          for (Synapse synapse : distalSegment.getSynapses()) {
            // increase permanence of connection with
            // neuronAs' active @t=3.
            if (synapse.getCell().getPreviousActiveState()) {
              synapse.increasePermanence();
            }
          }
        }
      }
    }

    // Step 7) Which synapse connections should be weakened to model
    // long term depression?
    // POSSIBLE ANSWER: Current time-step is @t=4. Weaken the connection
    // between neuronBs that isActive=False @t=4 and isPredicting @t=3
    // and neuronA that isActive @t=3.
    Column[][] columns = super.region.getColumns();
    for (int ri = 0; ri < columns.length; ri++) {
      for (int ci = 0; ci < columns[0].length; ci++) {
        for (Neuron inActiveNeuronBatTequals4 : columns[ri][ci].getNeurons()) {
          if (!inActiveNeuronBatTequals4.getActiveState()
              && inActiveNeuronBatTequals4.getPreviousPredictingState()) {

            for (DistalSegment distalSegment : inActiveNeuronBatTequals4.getDistalSegments()) {
              for (Synapse synapse : distalSegment.getSynapses()) {
                // decrease permanence of connection with
                // neuronA' active @t=3.
                if (synapse.getCell().getPreviousActiveState()) {
                  synapse.decreasePermanence();
                }
              }
            }
          }
        }
      }
    }

    this.nextTimeStep();
  }
  public static PMML generateSimpleNeuralNetwork(
      String modelName,
      String[] inputfieldNames,
      String[] outputfieldNames,
      double[] inputMeans,
      double[] inputStds,
      double[] outputMeans,
      double[] outputStds,
      int hiddenSize,
      double[] weights) {

    int counter = 0;
    int wtsIndex = 0;
    PMML pmml = new PMML();
    pmml.setVersion("4.0");

    Header header = new Header();
    Application app = new Application();
    app.setName("Drools PMML Generator");
    app.setVersion("0.01 Alpha");
    header.setApplication(app);

    header.setCopyright("BSD");

    header.setDescription(" Smart Vent Model ");

    Timestamp ts = new Timestamp();
    ts.getContent().add(new java.util.Date().toString());
    header.setTimestamp(ts);

    pmml.setHeader(header);

    DataDictionary dic = new DataDictionary();
    dic.setNumberOfFields(BigInteger.valueOf(inputfieldNames.length + outputfieldNames.length));
    for (String ifld : inputfieldNames) {
      DataField dataField = new DataField();
      dataField.setName(ifld);
      dataField.setDataType(DATATYPE.DOUBLE);
      dataField.setDisplayName(ifld);
      dataField.setOptype(OPTYPE.CONTINUOUS);
      dic.getDataFields().add(dataField);
    }
    for (String ofld : outputfieldNames) {
      DataField dataField = new DataField();
      dataField.setName(ofld);
      dataField.setDataType(DATATYPE.DOUBLE);
      dataField.setDisplayName(ofld);
      dataField.setOptype(OPTYPE.CONTINUOUS);
      dic.getDataFields().add(dataField);
    }

    pmml.setDataDictionary(dic);

    NeuralNetwork nnet = new NeuralNetwork();
    nnet.setActivationFunction(ACTIVATIONFUNCTION.LOGISTIC);
    nnet.setFunctionName(MININGFUNCTION.REGRESSION);
    nnet.setNormalizationMethod(NNNORMALIZATIONMETHOD.NONE);
    nnet.setModelName(modelName);

    MiningSchema miningSchema = new MiningSchema();
    for (String ifld : inputfieldNames) {
      MiningField mfld = new MiningField();
      mfld.setName(ifld);
      mfld.setOptype(OPTYPE.CONTINUOUS);
      mfld.setUsageType(FIELDUSAGETYPE.ACTIVE);
      miningSchema.getMiningFields().add(mfld);
    }
    for (String ofld : outputfieldNames) {
      MiningField mfld = new MiningField();
      mfld.setName(ofld);
      mfld.setOptype(OPTYPE.CONTINUOUS);
      mfld.setUsageType(FIELDUSAGETYPE.PREDICTED);
      miningSchema.getMiningFields().add(mfld);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(miningSchema);

    Output outputs = new Output();
    for (String ofld : outputfieldNames) {
      OutputField outFld = new OutputField();
      outFld.setName("Out_" + ofld);
      outFld.setTargetField(ofld);
      outputs.getOutputFields().add(outFld);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(outputs);

    NeuralInputs nins = new NeuralInputs();
    nins.setNumberOfInputs(BigInteger.valueOf(inputfieldNames.length));

    for (int j = 0; j < inputfieldNames.length; j++) {
      String ifld = inputfieldNames[j];
      NeuralInput nin = new NeuralInput();
      nin.setId("" + counter++);
      DerivedField der = new DerivedField();
      der.setDataType(DATATYPE.DOUBLE);
      der.setOptype(OPTYPE.CONTINUOUS);
      NormContinuous nc = new NormContinuous();
      nc.setField(ifld);
      nc.setOutliers(OUTLIERTREATMENTMETHOD.AS_IS);
      LinearNorm lin1 = new LinearNorm();
      lin1.setOrig(0);
      lin1.setNorm(-inputMeans[j] / inputStds[j]);
      nc.getLinearNorms().add(lin1);
      LinearNorm lin2 = new LinearNorm();
      lin2.setOrig(inputMeans[j]);
      lin2.setNorm(0);
      nc.getLinearNorms().add(lin2);
      der.setNormContinuous(nc);
      nin.setDerivedField(der);
      nins.getNeuralInputs().add(nin);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(nins);

    NeuralLayer hidden = new NeuralLayer();
    hidden.setNumberOfNeurons(BigInteger.valueOf(hiddenSize));

    for (int j = 0; j < hiddenSize; j++) {
      Neuron n = new Neuron();
      n.setId("" + counter++);
      n.setBias(weights[wtsIndex++]);
      for (int k = 0; k < inputfieldNames.length; k++) {
        Synapse con = new Synapse();
        con.setFrom("" + k);
        con.setWeight(weights[wtsIndex++]);
        n.getCons().add(con);
      }
      hidden.getNeurons().add(n);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(hidden);

    NeuralLayer outer = new NeuralLayer();
    outer.setActivationFunction(ACTIVATIONFUNCTION.IDENTITY);
    outer.setNumberOfNeurons(BigInteger.valueOf(outputfieldNames.length));

    for (int j = 0; j < outputfieldNames.length; j++) {
      Neuron n = new Neuron();
      n.setId("" + counter++);
      n.setBias(weights[wtsIndex++]);
      for (int k = 0; k < hiddenSize; k++) {
        Synapse con = new Synapse();
        con.setFrom("" + (k + inputfieldNames.length));
        con.setWeight(weights[wtsIndex++]);
        n.getCons().add(con);
      }
      outer.getNeurons().add(n);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(outer);

    NeuralOutputs finalOuts = new NeuralOutputs();
    finalOuts.setNumberOfOutputs(BigInteger.valueOf(outputfieldNames.length));
    for (int j = 0; j < outputfieldNames.length; j++) {
      NeuralOutput output = new NeuralOutput();
      output.setOutputNeuron("" + (j + inputfieldNames.length + hiddenSize));
      DerivedField der = new DerivedField();
      der.setDataType(DATATYPE.DOUBLE);
      der.setOptype(OPTYPE.CONTINUOUS);
      NormContinuous nc = new NormContinuous();
      nc.setField(outputfieldNames[j]);
      nc.setOutliers(OUTLIERTREATMENTMETHOD.AS_IS);
      LinearNorm lin1 = new LinearNorm();
      lin1.setOrig(0);
      lin1.setNorm(-outputMeans[j] / outputStds[j]);
      nc.getLinearNorms().add(lin1);
      LinearNorm lin2 = new LinearNorm();
      lin2.setOrig(outputMeans[j]);
      lin2.setNorm(0);
      nc.getLinearNorms().add(lin2);
      der.setNormContinuous(nc);
      output.setDerivedField(der);
      finalOuts.getNeuralOutputs().add(output);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(finalOuts);

    pmml.getAssociationModelsAndBaselineModelsAndClusteringModels().add(nnet);

    return pmml;
  }
Exemplo n.º 8
0
 /**
  * Sets the specified permanence value for the specified {@link Synapse}
  *
  * @param s
  * @param permanence
  */
 public void setPermanence(Connections c, Synapse s, double permanence) {
   updatePool(c, s, permanence);
   s.setPermanence(c, permanence);
 }
Exemplo n.º 9
0
 /**
  * Returns the permanence value for the {@link Synapse} specified.
  *
  * @param s the Synapse
  * @return the permanence
  */
 public double getPermanence(Synapse s) {
   return synapsesBySourceIndex.get(s.getInputIndex()).getPermanence();
 }