@Override
 public UpdateContainer doIt(Workspace workspace) throws CommandException {
   Worksheet wk = workspace.getWorksheet(worksheetId);
   SuperSelection selection = getSuperSelection(wk);
   String Msg =
       String.format("begin, Time,%d, Worksheet,%s", System.currentTimeMillis(), worksheetId);
   logger.info(Msg);
   // Get the HNode
   HashMap<String, HashMap<String, String>> rows = new HashMap<String, HashMap<String, String>>();
   HNodePath selectedPath = null;
   List<HNodePath> columnPaths = wk.getHeaders().getAllPaths();
   for (HNodePath path : columnPaths) {
     if (path.getLeaf().getId().equals(hNodeId)) {
       selectedPath = path;
     }
   }
   // random nodes
   Collection<Node> nodes = new ArrayList<Node>();
   wk.getDataTable().collectNodes(selectedPath, nodes, selection);
   HashSet<Integer> indSet = this.obtainIndexs(nodes.size());
   int index = 0;
   for (Iterator<Node> iterator = nodes.iterator(); iterator.hasNext(); ) {
     Node node = iterator.next();
     if (indSet.contains(index)) {
       String id = node.getId();
       String originalVal = node.getValue().asString();
       HashMap<String, String> x = new HashMap<String, String>();
       x.put("Org", originalVal);
       x.put("Tar", originalVal);
       x.put("Orgdis", originalVal);
       x.put("Tardis", originalVal);
       rows.put(id, x);
     }
     index++;
   }
   Msg = String.format("end, Time,%d, Worksheet,%s", System.currentTimeMillis(), worksheetId);
   logger.info(Msg);
   return new UpdateContainer(new FetchResultUpdate(hNodeId, rows));
 }
  private void generateRDF(
      String wkname,
      String query,
      List<KR2RMLRDFWriter> writers,
      R2RMLMappingIdentifier id,
      String baseURI)
      throws IOException, JSONException, KarmaException, SQLException, ClassNotFoundException {
    logger.debug("Generating RDF...");

    WorksheetR2RMLJenaModelParser parserTest = new WorksheetR2RMLJenaModelParser(id);
    KR2RMLMapping mapping = parserTest.parse();

    AbstractJDBCUtil dbUtil = JDBCUtilFactory.getInstance(dbType);
    Connection conn = dbUtil.getConnection(hostname, portnumber, username, password, dBorSIDName);
    conn.setAutoCommit(false);

    java.sql.Statement stmt =
        conn.createStatement(
            java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY);
    stmt.setFetchSize(DATABASE_TABLE_FETCH_SIZE);

    ResultSet r = stmt.executeQuery(query);
    ResultSetMetaData meta = r.getMetaData();
    ;

    // Get the column names
    List<String> columnNames = new ArrayList<>();
    for (int i = 1; i <= meta.getColumnCount(); i++) {
      columnNames.add(meta.getColumnName(i));
    }

    // Prepare required Karma objects
    Workspace workspace = initializeWorkspace();

    RepFactory factory = workspace.getFactory();
    Worksheet wk = factory.createWorksheet(wkname, workspace, encoding);
    List<String> headersList = addHeaders(wk, columnNames, factory);

    int counter = 0;

    ArrayList<String> rowValues = null;
    while ((rowValues = dbUtil.parseResultSetRow(r)) != null) {
      // Generate RDF and create a new worksheet for every DATABASE_TABLE_FETCH_SIZE rows
      if (counter % DATABASE_TABLE_FETCH_SIZE == 0 && counter != 0) {
        generateRDFFromWorksheet(wk, workspace, mapping, writers, baseURI);
        logger.debug("Done for " + counter + " rows ...");
        removeWorkspace(workspace);

        parserTest = new WorksheetR2RMLJenaModelParser(id);
        mapping = parserTest.parse();
        workspace = initializeWorkspace();
        factory = workspace.getFactory();
        wk = factory.createWorksheet(wkname, workspace, encoding);
        headersList = addHeaders(wk, columnNames, factory);
      }

      /** Add the data * */
      Table dataTable = wk.getDataTable();
      Row row = dataTable.addRow(factory);
      for (int i = 0; i < rowValues.size(); i++) {
        row.setValue(headersList.get(i), rowValues.get(i), factory);
      }

      counter++;
    }

    generateRDFFromWorksheet(wk, workspace, mapping, writers, baseURI);

    // Releasing all the resources
    r.close();
    conn.close();
    stmt.close();
    logger.debug("done");
  }
  @Override
  public UpdateContainer doIt(Workspace workspace) throws CommandException {
    Worksheet wk = workspace.getWorksheet(worksheetId);
    SuperSelection selection = getSuperSelection(wk);
    String msg =
        String.format(
            "Gen rule start,Time,%d, Worksheet,%s", System.currentTimeMillis(), worksheetId);
    logger.info(msg);
    // Get the HNode
    HashMap<String, String> rows = new HashMap<String, String>();
    HashMap<String, Integer> amb = new HashMap<String, Integer>();
    HNodePath selectedPath = null;
    List<HNodePath> columnPaths = wk.getHeaders().getAllPaths();
    for (HNodePath path : columnPaths) {
      if (path.getLeaf().getId().equals(hNodeId)) {
        selectedPath = path;
      }
    }
    Collection<Node> nodes = new ArrayList<Node>();
    wk.getDataTable().collectNodes(selectedPath, nodes, selection);
    for (Node node : nodes) {
      String id = node.getId();
      if (!this.nodeIds.contains(id)) continue;
      String originalVal = node.getValue().asString();
      rows.put(id, originalVal);
      this.compResultString += originalVal + "\n";
      calAmbScore(id, originalVal, amb);
    }
    RamblerValueCollection vc = new RamblerValueCollection(rows);
    HashMap<String, Vector<String[]>> expFeData = new HashMap<String, Vector<String[]>>();
    inputs = new RamblerTransformationInputs(examples, vc);
    // generate the program
    boolean results = false;
    int iterNum = 0;
    RamblerTransformationOutput rtf = null;
    // initialize the vocabulary
    Iterator<String> iterx = inputs.getInputValues().getValues().iterator();
    Vector<String> v = new Vector<String>();
    int vb_cnt = 0;
    while (iterx.hasNext() && vb_cnt < 30) {
      String eString = iterx.next();
      v.add(eString);
      vb_cnt++;
    }
    Vector<String> vob = UtilTools.buildDict(v);
    inputs.setVocab(vob.toArray(new String[vob.size()]));
    while (iterNum < 1 && !results) // try to find an program within iterNum
    {
      rtf = new RamblerTransformationOutput(inputs);
      if (rtf.getTransformations().keySet().size() > 0) {
        results = true;
      }
      iterNum++;
    }
    Iterator<String> iter = rtf.getTransformations().keySet().iterator();
    // id:{org: tar: orgdis: tardis: }
    HashMap<String, HashMap<String, String>> resdata =
        new HashMap<String, HashMap<String, String>>();
    HashSet<String> keys = new HashSet<String>();
    while (iter.hasNext()) {
      String tpid = iter.next();
      ValueCollection rvco = rtf.getTransformedValues_debug(tpid);
      if (rvco == null) continue;
      // constructing displaying data
      HashMap<String, String[]> xyzHashMap = new HashMap<String, String[]>();
      for (String key : rvco.getNodeIDs()) {
        HashMap<String, String> dict = new HashMap<String, String>();
        // add to the example selection
        boolean isExp = false;
        String org = vc.getValue(key);
        String classLabel = rvco.getClass(key);
        String pretar = rvco.getValue(key);
        String dummyValue = pretar;
        if (pretar.indexOf("_FATAL_ERROR_") != -1) {
          dummyValue = org;
          // dummyValue = "#ERROR";
        }
        try {
          UtilTools.StringColorCode(org, dummyValue, dict);
        } catch (Exception ex) {
          logger.info(String.format("ColorCoding Exception%s, %s", org, dummyValue));
          // set dict
          dict.put("Org", org);
          dict.put("Tar", "ERROR");
          dict.put("Orgdis", org);
          dict.put("Tardis", "ERROR");
        }
        for (TransformationExample exp : examples) {
          if (exp.getNodeId().compareTo(key) == 0) {
            if (!expFeData.containsKey(classLabel)) {
              Vector<String[]> vstr = new Vector<String[]>();
              String[] texp = {dict.get("Org"), pretar};
              vstr.add(texp);
              expFeData.put(classLabel, vstr);
            } else {
              String[] texp = {dict.get("Org"), pretar};
              expFeData.get(classLabel).add(texp);
            }
            isExp = true;
          }
        }

        if (!isExp) {
          String[] pair = {dict.get("Org"), dict.get("Tar"), pretar, classLabel};
          xyzHashMap.put(key, pair);
        }
        resdata.put(key, dict);
      }
      if (!rtf.nullRule) keys.add(getBestExample(xyzHashMap, expFeData));
    }
    // find the best row
    String vars = "";
    String expstr = "";
    String recmd = "";
    for (TransformationExample x : examples) {
      expstr += String.format("%s|%s", x.getBefore(), x.getAfter());
    }
    expstr += "|";
    if (rtf.nullRule) {
      keys.clear();
      // keys.add("-2"); // "-2 indicates null rule"
    }
    if (!resdata.isEmpty() && !rtf.nullRule) {
      recmd = resdata.get(keys.iterator().next()).get("Org");
    } else {
      recmd = "";
    }
    msg =
        String.format(
            "Gen rule end, Time,%d, Worksheet,%s,Examples:%s,Recmd:%s",
            System.currentTimeMillis(), worksheetId, expstr, recmd);
    logger.info(msg);
    return new UpdateContainer(new CleaningResultUpdate(hNodeId, resdata, vars, keys));
  }