Пример #1
0
  @Test
  public void TestShuffle() throws CIlibIOException {
    DataTableBuilder dataTableBuilder = new DataTableBuilder(new DelimitedTextFileReader());
    dataTableBuilder.getDataReader().setSourceURL(testFilePath);
    dataTableBuilder.buildDataTable();
    DataTable<List<StringType>, List<StringType>> dataTable = dataTableBuilder.getDataTable();
    DataTable<List<StringType>, List<StringType>> reference =
        (DataTable<List<StringType>, List<StringType>>) dataTable.getClone();

    ShuffleOperator operator = new ShuffleOperator();
    operator.operate(dataTable);

    for (int i = 0; i < dataTable.size(); i++) {
      Assert.assertNotSame(reference.getRow(i), dataTable.getRow(i));
    }
  }
Пример #2
0
  /**
   * Initialises the problem by reading in the data and constructing the training and generalisation
   * sets. Also initialises (constructs) the neural network.
   */
  @Override
  public void initialise() {
    if (initialised) {
      return;
    }
    try {
      dataTableBuilder.addDataOperator(new TypeConversionOperator());
      dataTableBuilder.addDataOperator(patternConversionOperator);
      dataTableBuilder.buildDataTable();
      DataTable dataTable = dataTableBuilder.getDataTable();

      ShuffleOperator initialShuffler = new ShuffleOperator();
      initialShuffler.operate(dataTable);

      int trainingSize = (int) (dataTable.size() * trainingSetPercentage);
      int validationSize = (int) (dataTable.size() * validationSetPercentage);
      int generalisationSize = dataTable.size() - trainingSize - validationSize;

      trainingSet = new StandardPatternDataTable();
      validationSet = new StandardPatternDataTable();
      generalisationSet = new StandardPatternDataTable();

      for (int i = 0; i < trainingSize; i++) {
        trainingSet.addRow((StandardPattern) dataTable.getRow(i));
      }

      for (int i = trainingSize; i < validationSize + trainingSize; i++) {
        validationSet.addRow((StandardPattern) dataTable.getRow(i));
      }

      for (int i = validationSize + trainingSize;
          i < generalisationSize + validationSize + trainingSize;
          i++) {
        generalisationSet.addRow((StandardPattern) dataTable.getRow(i));
      }

      neuralNetwork.initialise();

    } catch (CIlibIOException exception) {
      exception.printStackTrace();
    }
    initialised = true;
  }