@Test
  public void testExecute() throws Exception {

    int uktFilesSize = 0;

    String parentDir =
        ResourceUtils.getFile("classpath:schedule/test-uktstatus.gpg.txt").getParent();

    setUktDirectory(parentDir);

    File uktDir = new File(uktDirectory);
    File[] uktFiles =
        uktDir.listFiles(
            new FilenameFilter() {
              public boolean accept(File dir, String name) {
                return name.endsWith("uktstatus.gpg.txt");
              }
            });

    if (uktFiles != null) {
      uktFilesSize = uktFiles.length;
    }

    assertTrue("Can not read UKT files", uktFilesSize != 0);

    uktImportExportScheduler.setUktDirectory(parentDir);
    uktImportExportScheduler.setUktExportDirectory(parentDir);
    uktImportExportScheduler.execute();

    UktStatus uktStatus = ukTransplantManager.getUktStatus("9876543210");

    if (uktFilesSize > 0) {
      assertNotNull("UktStatus not be saved", uktStatus);
      File file = ResourceUtils.getFile("classpath:schedule/ukt_rpv_export.txt");
      CSVParser uktParser = new CSVParser(new FileReader(file));
      uktParser.changeDelimiter(',');
      String[][] uktValues = uktParser.getAllValues();

      assertEquals("nhsno not same", patient.getNhsno(), uktValues[0][0]);
      assertEquals("surname not same", patient.getSurname(), uktValues[0][1]);
      assertEquals("forname not same", patient.getForename(), uktValues[0][2]);
      assertEquals("postcode not same", patient.getPostcode(), uktValues[0][4]);

      uktParser.close();
    } else {
      assertNull("Wrong entity exists.", uktStatus);
    }
  }
 /**
  * from a report output path get the words
  *
  * @param path report output path
  * @param ext where the words are in the path
  * @return map of words to counts and index
  * @throws IOException
  */
 public static LinkedHashMap<String, IndependentPair<Long, Long>> readWordCountLines(
     String path, String ext) throws IOException {
   String wordPath = path + ext;
   Path p = HadoopToolsUtil.getInputPaths(wordPath)[0];
   FileSystem fs = HadoopToolsUtil.getFileSystem(p);
   FSDataInputStream toRead = fs.open(p);
   BufferedReader reader = new BufferedReader(new InputStreamReader(toRead, "UTF-8"));
   CSVParser csvreader = new CSVParser(reader);
   long lineN = 0;
   String[] next = null;
   LinkedHashMap<String, IndependentPair<Long, Long>> toRet =
       new LinkedHashMap<String, IndependentPair<Long, Long>>();
   while ((next = csvreader.getLine()) != null && next.length > 0) {
     if (next.length != 2) {
       System.out.println("PROBLEM READLINE LINE: " + Arrays.toString(next));
       continue;
     }
     toRet.put(next[0], IndependentPair.pair(Long.parseLong(next[1]), lineN));
     lineN++;
   }
   return toRet;
 }
  private void readData(Reader in) throws IOException {
    CSVParser creader = new CSVParser(in);
    this.datavalues = new HashMap<String, double[]>();
    this.titles = creader.getLine();
    for (String title : titles) {
      this.datavalues.put(title, new double[nentries]);
    }
    String[] line = null;
    DateTimeFormatter parser = DateTimeFormat.forPattern("YYYY-MM-dd");
    int entry = nentries - 1;
    while ((line = creader.getLine()) != null) {
      for (int i = 0; i < titles.length; i++) {
        String title = titles[i];
        if (i == 0) {
          DateTime dt = parser.parseDateTime(line[i]);
          this.datavalues.get(title)[entry] = dt.getMillis();
        } else {

          this.datavalues.get(title)[entry] = Double.parseDouble(line[i]);
        }
      }
      entry--;
    }
  }