Esempio n. 1
0
 public static void main(String[] args) {
   NeuralNetwork nn = new NeuralNetwork(2, 4, 1);
   int maxRuns = 50000;
   double minErrorCondition = 0.001;
   //     nn.run(maxRuns, minErrorCondition);
   nn.test();
 }
  public void testIdentityNetwork() {
    NeuralNetwork network =
        new NeuralNetwork() {
          public double[] receive(double[] input) {
            return input;
          }
        };

    double[] input = {1.0, 0.5, -2.0};

    assertEquals("Output was not correct.", network.receive(input), input);
  }
Esempio n. 3
0
  public static void main(String[] args) {

    int pb1[] = {1, 1, 1, 1};
    ParityProblem pp1 = new ParityProblem(pb1);

    int pb2[] = {1, 0, 1, 1};
    ParityProblem pp2 = new ParityProblem(pb2);

    ParityProblem pp[] = {pp1, pp2};

    NeuralNetwork net = new NeuralNetwork(10, 0.5F);
    net.training(pp);
  }
Esempio n. 4
0
 /*
 Initialize Starting Population for Genetic Algorithm to commence Artificial Evolution
  */
 void initializePopulation(int popSize) {
   float[][][] weights = NN.retrieveWeights();
   float[][] w1, w2;
   w1 = weights[1].clone();
   w2 = weights[2].clone();
   genes1 = new Gene[popSize];
   genes2 = new Gene[popSize];
   for (int i = 0; i < popSize; i++) {
     genes1[i] = new Gene(w1);
     genes2[i] = new Gene(w2);
   }
 }
Esempio n. 5
0
 GeneticAlgorithm(int numData, float[][] labeledData) {
   // Initialization of Neural Network
   NN = new NeuralNetwork(4, 5, 1, numData, labeledData);
   // Initialization Genetic Population
   initializePopulation(15);
   // Start Loop
   while (dError < tolerance) {
     // Forward Propagate Neural Network
     NN.forwardPropgate();
     // If error < tolerance or doesn't change
     // Exit
     // Get Error and Retrieve top 5; clone all and mutate
     mutateGenes();
   }
 }
 @Override
 public void merge(NeuralNetwork network, int batchSize) {
   W.addi(network.getW().sub(W).div(batchSize));
   hBias.addi(network.gethBias().sub(hBias).divi(batchSize));
   vBias.addi(network.getvBias().subi(vBias).divi(batchSize));
 }
 @Override
 public NeuralNetwork clone() {
   try {
     Constructor<?> c = Dl4jReflection.getEmptyConstructor(getClass());
     c.setAccessible(true);
     NeuralNetwork ret = (NeuralNetwork) c.newInstance();
     ret.setMomentumAfter(momentumAfter);
     ret.setResetAdaGradIterations(resetAdaGradIterations);
     ret.setHbiasAdaGrad(hBiasAdaGrad);
     ret.setVBiasAdaGrad(vBiasAdaGrad);
     ret.sethBias(hBias.dup());
     ret.setvBias(vBias.dup());
     ret.setnHidden(getnHidden());
     ret.setnVisible(getnVisible());
     ret.setW(W.dup());
     ret.setL2(l2);
     ret.setMomentum(momentum);
     ret.setRenderEpochs(getRenderIterations());
     ret.setSparsity(sparsity);
     ret.setRng(getRng());
     ret.setDist(getDist());
     ret.setAdaGrad(wAdaGrad);
     ret.setLossFunction(lossFunction);
     ret.setConstrainGradientToUnitNorm(constrainGradientToUnitNorm);
     ret.setOptimizationAlgorithm(optimizationAlgo);
     return ret;
   } catch (Exception e) {
     throw new RuntimeException(e);
   }
 }
  public static void main(String[] args) throws Exception {
    System.out.println("Testing Neural Network");

    NeuralNetwork neuralNetwork = new NeuralNetwork(NUM_LAYERS, numLayerNodes, true);

    System.out.println("--------------------------------------------------");
    System.out.println("Testing initialisation of weights");
    neuralNetwork.printWeights();

    System.out.println("--------------------------------------------------");
    System.out.println("Testing get weights");
    Double[][][] weights = neuralNetwork.getWeights();

    for (int i = 0; i < weights.length; i++) {
      for (int l = 0; l < weights[i].length; l++) {
        for (int k = 0; k < weights[i][l].length; k++) {
          weights[i][l][k] = 9.0d;
        }
      }
    }

    System.out.println("--------------------------------------------------");
    System.out.println("Testing set weights");
    neuralNetwork.setWeights(weights);
    neuralNetwork.printWeights();

    System.out.println("--------------------------------------------------");
    System.out.println("Testing set inputs");
    Double[][][] inputs = new Double[NUM_LAYERS][][];

    for (int i = 0; i < inputs.length; i++) {
      Double[][] temp = new Double[numLayerNodes[i]][];

      for (int l = 0; l < numLayerNodes[i]; l++) {
        if (i == 0) {
          temp[l] = new Double[1];
          temp[l][0] = 0.5d;
        } else {
          temp[l] = new Double[inputs[i - 1].length];

          for (int t = 0; t < temp[l].length; t++) {
            temp[l][t] = 0.5d;
          }
        }
      }
      inputs[i] = temp;
    }

    neuralNetwork.setInput(inputs);

    System.out.println("--------------------------------------------------");
    System.out.println("Testing get Input");
    inputs = neuralNetwork.getInputs();

    for (int i = 0; i < inputs.length; i++) {
      for (int l = 0; l < inputs[i].length; l++) {
        for (int k = 0; k < inputs[i][l].length; k++) System.out.print(inputs[i][l][k] + ", ");

        System.out.println();
      }
      System.out.println("_____________________________________________");
    }

    System.out.println("--------------------------------------------------");
    System.out.println("Testing get Output");

    Double[][] testInput = new Double[numLayerNodes[0]][1];

    for (int i = 0; i < testInput.length; i++) {
      testInput[i][0] = 1.0d - ((double) i / 10.0d);
      System.out.print(testInput[i][0] + ", ");
    }
    System.out.println("\n\nFinal Output: ");

    Double[] output = neuralNetwork.getOutput(testInput);

    for (int i = 0; i < output.length; i++) {
      System.out.print(output[i] + "; ");
    }
  }
  public static PMML generateSimpleNeuralNetwork(
      String modelName,
      String[] inputfieldNames,
      String[] outputfieldNames,
      double[] inputMeans,
      double[] inputStds,
      double[] outputMeans,
      double[] outputStds,
      int hiddenSize,
      double[] weights) {

    int counter = 0;
    int wtsIndex = 0;
    PMML pmml = new PMML();
    pmml.setVersion("4.0");

    Header header = new Header();
    Application app = new Application();
    app.setName("Drools PMML Generator");
    app.setVersion("0.01 Alpha");
    header.setApplication(app);

    header.setCopyright("BSD");

    header.setDescription(" Smart Vent Model ");

    Timestamp ts = new Timestamp();
    ts.getContent().add(new java.util.Date().toString());
    header.setTimestamp(ts);

    pmml.setHeader(header);

    DataDictionary dic = new DataDictionary();
    dic.setNumberOfFields(BigInteger.valueOf(inputfieldNames.length + outputfieldNames.length));
    for (String ifld : inputfieldNames) {
      DataField dataField = new DataField();
      dataField.setName(ifld);
      dataField.setDataType(DATATYPE.DOUBLE);
      dataField.setDisplayName(ifld);
      dataField.setOptype(OPTYPE.CONTINUOUS);
      dic.getDataFields().add(dataField);
    }
    for (String ofld : outputfieldNames) {
      DataField dataField = new DataField();
      dataField.setName(ofld);
      dataField.setDataType(DATATYPE.DOUBLE);
      dataField.setDisplayName(ofld);
      dataField.setOptype(OPTYPE.CONTINUOUS);
      dic.getDataFields().add(dataField);
    }

    pmml.setDataDictionary(dic);

    NeuralNetwork nnet = new NeuralNetwork();
    nnet.setActivationFunction(ACTIVATIONFUNCTION.LOGISTIC);
    nnet.setFunctionName(MININGFUNCTION.REGRESSION);
    nnet.setNormalizationMethod(NNNORMALIZATIONMETHOD.NONE);
    nnet.setModelName(modelName);

    MiningSchema miningSchema = new MiningSchema();
    for (String ifld : inputfieldNames) {
      MiningField mfld = new MiningField();
      mfld.setName(ifld);
      mfld.setOptype(OPTYPE.CONTINUOUS);
      mfld.setUsageType(FIELDUSAGETYPE.ACTIVE);
      miningSchema.getMiningFields().add(mfld);
    }
    for (String ofld : outputfieldNames) {
      MiningField mfld = new MiningField();
      mfld.setName(ofld);
      mfld.setOptype(OPTYPE.CONTINUOUS);
      mfld.setUsageType(FIELDUSAGETYPE.PREDICTED);
      miningSchema.getMiningFields().add(mfld);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(miningSchema);

    Output outputs = new Output();
    for (String ofld : outputfieldNames) {
      OutputField outFld = new OutputField();
      outFld.setName("Out_" + ofld);
      outFld.setTargetField(ofld);
      outputs.getOutputFields().add(outFld);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(outputs);

    NeuralInputs nins = new NeuralInputs();
    nins.setNumberOfInputs(BigInteger.valueOf(inputfieldNames.length));

    for (int j = 0; j < inputfieldNames.length; j++) {
      String ifld = inputfieldNames[j];
      NeuralInput nin = new NeuralInput();
      nin.setId("" + counter++);
      DerivedField der = new DerivedField();
      der.setDataType(DATATYPE.DOUBLE);
      der.setOptype(OPTYPE.CONTINUOUS);
      NormContinuous nc = new NormContinuous();
      nc.setField(ifld);
      nc.setOutliers(OUTLIERTREATMENTMETHOD.AS_IS);
      LinearNorm lin1 = new LinearNorm();
      lin1.setOrig(0);
      lin1.setNorm(-inputMeans[j] / inputStds[j]);
      nc.getLinearNorms().add(lin1);
      LinearNorm lin2 = new LinearNorm();
      lin2.setOrig(inputMeans[j]);
      lin2.setNorm(0);
      nc.getLinearNorms().add(lin2);
      der.setNormContinuous(nc);
      nin.setDerivedField(der);
      nins.getNeuralInputs().add(nin);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(nins);

    NeuralLayer hidden = new NeuralLayer();
    hidden.setNumberOfNeurons(BigInteger.valueOf(hiddenSize));

    for (int j = 0; j < hiddenSize; j++) {
      Neuron n = new Neuron();
      n.setId("" + counter++);
      n.setBias(weights[wtsIndex++]);
      for (int k = 0; k < inputfieldNames.length; k++) {
        Synapse con = new Synapse();
        con.setFrom("" + k);
        con.setWeight(weights[wtsIndex++]);
        n.getCons().add(con);
      }
      hidden.getNeurons().add(n);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(hidden);

    NeuralLayer outer = new NeuralLayer();
    outer.setActivationFunction(ACTIVATIONFUNCTION.IDENTITY);
    outer.setNumberOfNeurons(BigInteger.valueOf(outputfieldNames.length));

    for (int j = 0; j < outputfieldNames.length; j++) {
      Neuron n = new Neuron();
      n.setId("" + counter++);
      n.setBias(weights[wtsIndex++]);
      for (int k = 0; k < hiddenSize; k++) {
        Synapse con = new Synapse();
        con.setFrom("" + (k + inputfieldNames.length));
        con.setWeight(weights[wtsIndex++]);
        n.getCons().add(con);
      }
      outer.getNeurons().add(n);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(outer);

    NeuralOutputs finalOuts = new NeuralOutputs();
    finalOuts.setNumberOfOutputs(BigInteger.valueOf(outputfieldNames.length));
    for (int j = 0; j < outputfieldNames.length; j++) {
      NeuralOutput output = new NeuralOutput();
      output.setOutputNeuron("" + (j + inputfieldNames.length + hiddenSize));
      DerivedField der = new DerivedField();
      der.setDataType(DATATYPE.DOUBLE);
      der.setOptype(OPTYPE.CONTINUOUS);
      NormContinuous nc = new NormContinuous();
      nc.setField(outputfieldNames[j]);
      nc.setOutliers(OUTLIERTREATMENTMETHOD.AS_IS);
      LinearNorm lin1 = new LinearNorm();
      lin1.setOrig(0);
      lin1.setNorm(-outputMeans[j] / outputStds[j]);
      nc.getLinearNorms().add(lin1);
      LinearNorm lin2 = new LinearNorm();
      lin2.setOrig(outputMeans[j]);
      lin2.setNorm(0);
      nc.getLinearNorms().add(lin2);
      der.setNormContinuous(nc);
      output.setDerivedField(der);
      finalOuts.getNeuralOutputs().add(output);
    }

    nnet.getExtensionsAndNeuralLayersAndNeuralInputs().add(finalOuts);

    pmml.getAssociationModelsAndBaselineModelsAndClusteringModels().add(nnet);

    return pmml;
  }