Пример #1
0
  public static void train(File dataDir) {
    final File networkFile = new File(dataDir, Config.NETWORK_FILE);
    final File trainingFile = new File(dataDir, Config.TRAINING_FILE);

    // network file
    if (!networkFile.exists()) {
      System.out.println("Can't read file: " + networkFile.getAbsolutePath());
      return;
    }

    BasicNetwork network = (BasicNetwork) EncogDirectoryPersistence.loadObject(networkFile);

    // training file
    if (!trainingFile.exists()) {
      System.out.println("Can't read file: " + trainingFile.getAbsolutePath());
      return;
    }

    final MLDataSet trainingSet = EncogUtility.loadEGB2Memory(trainingFile);

    // train the neural network
    EncogUtility.trainConsole(network, trainingSet, Config.TRAINING_MINUTES);
    System.out.println("Final Error: " + (float) network.calculateError(trainingSet));
    System.out.println("Training complete, saving network.");
    EncogDirectoryPersistence.saveObject(networkFile, network);
    System.out.println("Network saved.");
    Encog.getInstance().shutdown();
  }
Пример #2
0
  @AfterClass
  public void shutDown() throws IOException {
    FileUtils.deleteDirectory(new File("./models/"));
    FileUtils.deleteDirectory(new File("./modelsTmp/"));

    Encog.getInstance().shutdown();
  }
Пример #3
0
  public void recognizer(List<File> files) {

    FeatureExtractor fe = new FeatureExtractor();
    MLDataSet trainingSet = new BasicMLDataSet();
    for (File f : files) {
      // System.out.println(f.getAbsolutePath());

      List<double[]> data;
      try {
        data = fe.fileProcessor(f);
        MLData mldataIn = new BasicMLData(data.get(0));
        double[] out = new double[NUM_OUT];
        Integer index = new Integer(Labeler.getLabel(f));
        // System.out.println(index+""+data.get(0));
        out[index] = 1.;
        System.out.println(out.toString());
        MLData mldataout = new BasicMLData(out);
        trainingSet.add(mldataIn, mldataout);
      } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
    }

    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 4 * NUM_IN));
    // network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 2 * NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, NUM_OUT));
    network.getStructure().finalizeStructure();
    network.reset();

    // train the neural network
    ResilientPropagation train = new ResilientPropagation(network, trainingSet);

    System.out.println("Training Set: " + trainingSet.size());

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch:" + epoch + " Error-->" + train.getError());
      epoch++;
    } while (train.getError() > 0.001);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          "actual-->" + Labeler.getWord(output) + ", ideal-->" + Labeler.getWord(pair.getIdeal()));
    }

    Encog.getInstance().shutdown();
  }
  /**
   * Create a new machine learning method.
   *
   * <p>
   *
   * @param methodType The method to create.
   * @param architecture The architecture string.
   * @param input The input count.
   * @param output The output count.
   *     <p>
   * @return The newly created machine learning method.
   */
  public MLMethod create(
      final String methodType, final String architecture, final int input, final int output) {
    for (EncogPluginBase plugin : Encog.getInstance().getPlugins()) {
      if (plugin instanceof EncogPluginService1) {
        MLMethod result =
            ((EncogPluginService1) plugin).createMethod(methodType, architecture, input, output);
        if (result != null) {
          return result;
        }
      }
    }

    throw new EncogError("Unknown method type: " + methodType);
  }
Пример #5
0
  private void startup() {
    int actualThreadCount = Runtime.getRuntime().availableProcessors();
    Encog.getInstance().addShutdownTask(this);

    if (this.threadCount != 0) {
      actualThreadCount = this.threadCount;
    }

    this.workers = new GeneticTrainWorker[actualThreadCount];

    for (int i = 0; i < this.workers.length; i++) {
      this.workers[i] = new GeneticTrainWorker(this);
      this.workers[i].start();
    }

    this.needBestGenome = true;
  }
  public static void main(String[] args) {
    JdvUtils.Arquivo.versionamento(0);
    JdvRedeAbstrata rede = new MultilayerPerceptron3("delete_me").inicializar();
    ConjuntosDados dados = JdvUtils.Arquivo.carregarDados(rede);
    MLDataSet setDados = dados.getConjuntosMLSet();

    rede.treinar(dados);

    JdvUtils.Arquivo.incrementaVersao();
    JdvUtils.Arquivo.salvarRede(rede);
    JdvUtils.Arquivo.salvarDados(rede, dados);

    //		rede.testar(setDados.get(( int ) (Math.random() * setDados.size())));
    rede.testar(setDados);

    Encog.getInstance().shutdown();
  }
Пример #7
0
  public void finishTraining() {
    if (this.workers != null) {
      for (int i = 0; i < this.workers.length; i++) {
        this.workers[i].requestTerminate();
      }

      for (int i = 0; i < this.workers.length; i++) {
        try {
          this.workers[i].join();
        } catch (InterruptedException e) {
          throw new EncogError("Can't shut down training threads.");
        }
      }
    }

    this.workers = null;
    Encog.getInstance().removeShutdownTask(this);
  }
Пример #8
0
  /**
   * The main method.
   *
   * @param args No arguments are used.
   */
  public static void main(final String args[]) {

    // create a neural network, without using a factory
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
    network.getStructure().finalizeStructure();
    network.reset();

    // create training data
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final StochasticGradientDescent train = new StochasticGradientDescent(network, trainingSet);
    train.setUpdateRule(new RMSPropUpdate());

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while (train.getError() > 0.01);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          pair.getInput().getData(0)
              + ","
              + pair.getInput().getData(1)
              + ", actual="
              + output.getData(0)
              + ",ideal="
              + pair.getIdeal().getData(0));
    }

    PerturbationFeatureImportanceCalc d;

    Encog.getInstance().shutdown();
  }
  public void performEditConfig() {
    // ObjectEditorFrame config = new
    // ObjectEditorFrame(EncogWorkBench.getInstance().getConfig());
    // config.setVisible(true);

    EncogConfigDialog dialog = new EncogConfigDialog(EncogWorkBench.getInstance().getMainWindow());

    EncogWorkBenchConfig config = EncogWorkBench.getInstance().getConfig();

    dialog.getUserID().setValue(config.getEncogCloudUserID());
    dialog.getPassword().setValue(config.getEncogCloudPassword());
    dialog.getDefaultError().setValue(config.getDefaultError());
    dialog.getNetwork().setValue(config.getEncogCloudNetwork());
    dialog.getAutoConnect().setValue(config.isAutoConnect());
    dialog.getThreadCount().setValue(config.getThreadCount());
    dialog.getUseOpenCL().setValue(config.isUseOpenCL());
    switch (config.getErrorCalculation()) {
      case RMS:
        ((JComboBox) dialog.getErrorCalculation().getField()).setSelectedIndex(0);
        break;
      case MSE:
        ((JComboBox) dialog.getErrorCalculation().getField()).setSelectedIndex(1);
        break;
      case ARCTAN:
        ((JComboBox) dialog.getErrorCalculation().getField()).setSelectedIndex(2);
        break;
    }

    if (dialog.process()) {
      config.setEncogCloudUserID(dialog.getUserID().getValue());
      config.setEncogCloudPassword(dialog.getPassword().getValue());
      config.setDefaultError(dialog.getDefaultError().getValue());
      config.setAutoConnect(dialog.getAutoConnect().getValue());
      config.setEncogCloudNetwork(dialog.getNetwork().getValue());
      config.setThreadCount(dialog.getThreadCount().getValue());
      config.setUseOpenCL(dialog.getUseOpenCL().getValue());
      switch (((JComboBox) dialog.getErrorCalculation().getField()).getSelectedIndex()) {
        case 0:
          config.setErrorCalculation(ErrorCalculationMode.RMS);
          break;
        case 1:
          config.setErrorCalculation(ErrorCalculationMode.MSE);
          break;
        case 2:
          config.setErrorCalculation(ErrorCalculationMode.ARCTAN);
          break;
      }
      EncogWorkBench.saveConfig();

      ErrorCalculation.setMode(EncogWorkBench.getInstance().getConfig().getErrorCalculation());

      if (config.isUseOpenCL() && Encog.getInstance().getCL() == null) {
        EncogWorkBench.initCL();
        if (Encog.getInstance().getCL() != null) {
          EncogWorkBench.displayMessage(
              "OpenCL",
              "Success, your graphics card(s) are now ready to help train neural networks.");
        }
      } else if (!EncogWorkBench.getInstance().getConfig().isUseOpenCL()
          && Encog.getInstance().getCL() != null) {
        EncogWorkBench.displayMessage(
            "OpenCL",
            "Encog Workbench will stop using your GPU the next time\nthe workbench is restarted.");
      }
    }
  }
Пример #10
0
public class MultiThreadedEA extends BasicEA
    implements MultiThreadable, EncogShutdownTask, Serializable {

  private GeneticTrainWorker[] workers;

  private final OperationList operators = new OperationList();

  private boolean needBestGenome = true;

  private int iterationNumber;
  private int subIterationCounter;
  private final Lock iterationLock = new ReentrantLock();
  private transient Throwable currentError;
  private ThreadedGenomeSelector selector;
  private final Genome bestGenome;
  private RandomFactory randomNumberFactory =
      Encog.getInstance().getRandomFactory().factorFactory();

  /** Condition used to check if we are done. */
  private final Condition iterationCondition = this.iterationLock.newCondition();

  /** The thread count; */
  private int threadCount;

  public MultiThreadedEA(Population thePopulation, CalculateGenomeScore theScoreFunction) {
    super(thePopulation, theScoreFunction);

    this.bestGenome = thePopulation.getGenomeFactory().factor();
    this.selector = new ThreadedGenomeSelector(this);
  }

  /** {@inheritDoc} */
  public int getThreadCount() {
    return this.threadCount;
  }

  /** {@inheritDoc} */
  public void setThreadCount(int numThreads) {
    this.threadCount = numThreads;
  }

  private void startup() {
    int actualThreadCount = Runtime.getRuntime().availableProcessors();
    Encog.getInstance().addShutdownTask(this);

    if (this.threadCount != 0) {
      actualThreadCount = this.threadCount;
    }

    this.workers = new GeneticTrainWorker[actualThreadCount];

    for (int i = 0; i < this.workers.length; i++) {
      this.workers[i] = new GeneticTrainWorker(this);
      this.workers[i].start();
    }

    this.needBestGenome = true;
  }

  @Override
  public void iteration() {
    if (this.workers == null) {
      this.operators.finalizeStructure();
      startup();
    }

    this.iterationLock.lock();
    try {
      this.iterationCondition.await();
      if (this.currentError != null) {
        throw new EncogError(this.currentError);
      }
    } catch (InterruptedException e) {

    } finally {
      this.iterationLock.unlock();
    }

    if (this.currentError != null) {
      finishTraining();
    }
  }

  public void addOperation(double probability, EvolutionaryOperator opp) {
    this.operators.add(probability, opp);
  }

  public void finishTraining() {
    if (this.workers != null) {
      for (int i = 0; i < this.workers.length; i++) {
        this.workers[i].requestTerminate();
      }

      for (int i = 0; i < this.workers.length; i++) {
        try {
          this.workers[i].join();
        } catch (InterruptedException e) {
          throw new EncogError("Can't shut down training threads.");
        }
      }
    }

    this.workers = null;
    Encog.getInstance().removeShutdownTask(this);
  }

  public void evaluateBestGenome(Genome prg) {
    this.iterationLock.lock();
    try {
      calculateEffectiveScore(prg);
      if (this.needBestGenome || this.getSelectionComparator().isBetterThan(prg, this.bestGenome)) {
        this.bestGenome.copy(prg);
        this.needBestGenome = false;
      }
    } finally {
      this.iterationLock.unlock();
    }
  }

  public void copyBestGenome(Genome target) {
    this.iterationLock.lock();
    try {
      target.copy(this.bestGenome);
    } finally {
      this.iterationLock.unlock();
    }
  }

  public void addGenome(Genome[] genome, int index, int size) {
    Genome replaceTarget = null;
    this.iterationLock.lock();
    try {
      for (int i = 0; i < size; i++) {
        if (genome[i].size() > getMaxIndividualSize()) {
          throw new GeneticError("Program is too large to be added to population.");
        }
        replaceTarget = this.selector.antiSelectGenome();
        getPopulation().rewrite(genome[index + i]);
        replaceTarget.copy(genome[index + i]);
        evaluateBestGenome(genome[index + i]);
      }
    } finally {
      this.iterationLock.unlock();
      if (replaceTarget != null) {
        this.selector.releaseGenome(replaceTarget);
      }
    }
  }

  public void notifyProgress() {
    this.iterationLock.lock();
    try {
      this.subIterationCounter++;
      if (this.subIterationCounter > getPopulation().size()) {
        this.subIterationCounter = 0;
        this.iterationNumber++;
        this.iterationCondition.signal();
      }
    } finally {
      this.iterationLock.unlock();
    }
  }

  public void reportError(Throwable t) {
    this.iterationLock.lock();
    try {
      this.currentError = t;
      this.iterationCondition.signal();
    } finally {
      this.iterationLock.unlock();
    }
  }

  public void signalDone() {
    this.iterationLock.lock();
    try {
      this.iterationCondition.signal();
    } finally {
      this.iterationLock.unlock();
    }
  }

  /** @return the selector */
  public ThreadedGenomeSelector getSelector() {
    return selector;
  }

  /** @return the operators */
  public OperationList getOperators() {
    return operators;
  }

  public double getError() {
    return this.bestGenome.getScore();
  }

  public int getIteration() {
    return this.iterationNumber;
  }

  /** @return the randomNumberFactory */
  public RandomFactory getRandomNumberFactory() {
    return randomNumberFactory;
  }

  /** @param randomNumberFactory the randomNumberFactory to set */
  public void setRandomNumberFactory(RandomFactory randomNumberFactory) {
    this.randomNumberFactory = randomNumberFactory;
  }

  public void createRandomPopulation(int maxDepth) {
    Random random = this.randomNumberFactory.factor();
    getPopulation()
        .getGenomeFactory()
        .factorRandomPopulation(random, getPopulation(), getScoreFunction(), maxDepth);

    for (Genome genome : getPopulation().getGenomes()) {
      evaluateBestGenome(genome);
    }
  }

  public void calculateEffectiveScore(Genome genome) {
    GeneticTrainingParams params = getParams();
    double result = genome.getScore();
    if (genome.size() > params.getComplexityPenaltyThreshold()) {
      int over = genome.size() - params.getComplexityPenaltyThreshold();
      int range =
          params.getComplexityPentaltyFullThreshold() - params.getComplexityPenaltyThreshold();
      double complexityPenalty =
          ((params.getComplexityFullPenalty() - params.getComplexityPenalty()) / range) * over;
      result += (result * complexityPenalty);
    }
    genome.setAdjustedScore(result);
  }

  @Override
  public void performShutdownTask() {
    finishTraining();
  }
}
 public static void main(String[] args) {
   BidirectionalAssociativeMemory program = new BidirectionalAssociativeMemory();
   program.run();
   Encog.getInstance().shutdown();
 }