public void runSupervised() {

    MyTimer timer = new MyTimer();
    timer.start();

    PuddleMapFV agentfv = new PuddleMapFV(this.puddleMap, 5, 20, 20);
    PuddleMapFVComponent agentCompFV = new PuddleMapFVComponent(this.puddleMap, 5, 20, 20);
    StateToFeatureVectorGenerator svar =
        new ConcatenatedObjectFeatureVectorGenerator(false, GridWorldDomain.CLASSAGENT);
    java.util.List<EpisodeAnalysis> eas =
        EpisodeAnalysis.parseFilesIntoEAList(this.expertDir, domain, this.sp);
    LinearStateDifferentiableRF objectiveRF =
        new LinearStateDifferentiableRF(agentfv, agentfv.getDim());
    objectiveRF.setParameters(new double[] {1., -10, -10, 0, -10, 0, 0, 0, 0, 0});

    WekaPolicy p = new WekaPolicy(agentfv, new J48(), this.domain.getActions(), eas);
    // WekaPolicy p = new WekaPolicy(svar, new Logistic(), this.domain.getActions(), eas);

    timer.stop();

    System.out.println("Training Time: " + timer.getTime());

    String baseName = "Svar";

    GridWorldTerminalFunction tf = new GridWorldTerminalFunction(20, 20);

    State simple = this.initialState.copy();
    GridWorldDomain.setAgent(simple, 18, 0);
    EpisodeAnalysis trainedEp1 = p.evaluateBehavior(simple, objectiveRF, tf, 200);
    trainedEp1.writeToFile(trainedDir + "/j48" + baseName + "EpSimple", this.sp);

    State hardAgent = this.initialState.copy();
    GridWorldDomain.setAgent(hardAgent, 0, 9);
    EpisodeAnalysis trainedEp2 = p.evaluateBehavior(hardAgent, objectiveRF, tf, 200);
    trainedEp2.writeToFile(trainedDir + "/j48" + baseName + "EpHardAgent", this.sp);

    int ngx = 12;
    int ngy = 14;
    tf = new GridWorldTerminalFunction(ngx, ngy);
    this.puddleMap[ngx][ngy] = 1;
    this.puddleMap[20][20] = 0;
    agentfv.setGoal(ngx, ngy);
    agentCompFV.setGoal(ngx, ngy);
    State hardGoal = this.initialState.copy();
    GridWorldDomain.setAgent(hardGoal, 0, 0);

    EpisodeAnalysis trainedEp3 = p.evaluateBehavior(hardGoal, objectiveRF, tf, 200);
    trainedEp3.writeToFile(trainedDir + "/j48" + baseName + "EpHardGoal", this.sp);

    new EpisodeSequenceVisualizer(this.v, this.domain, this.sp, this.trainedDir);
  }