@Override
  public UpdateContainer doIt(Workspace workspace) throws CommandException {
    ModelingConfiguration modelingConfiguration =
        ModelingConfigurationRegistry.getInstance()
            .getModelingConfiguration(
                WorkspaceKarmaHomeRegistry.getInstance().getKarmaHome(workspace.getId()));
    TripleStoreUtil utilObj = new TripleStoreUtil();
    boolean showModelsWithoutMatching = modelingConfiguration.isShowModelsWithoutMatching();
    try {
      HashMap<String, List<String>> metadata =
          utilObj.getMappingsWithMetadata(TripleStoreUrl, context);
      RepFactory factory = workspace.getFactory();
      List<String> model_Names = metadata.get("model_names");
      List<String> model_Urls = metadata.get("model_urls");
      List<String> model_Times = metadata.get("model_publishtimes");
      List<String> model_Contexts = metadata.get("model_contexts");
      List<String> model_inputColumns = metadata.get("model_inputcolumns");
      final List<JSONObject> list = new ArrayList<>();
      Set<String> worksheetcolumns = new HashSet<>();
      if (worksheetId != null && !worksheetId.trim().isEmpty()) {
        HTable htable = factory.getWorksheet(worksheetId).getHeaders();
        getHNodesForWorksheet(htable, worksheetcolumns, factory);
      }
      Iterator<String> nameitr = model_Names.iterator();
      Iterator<String> urlitr = model_Urls.iterator();
      Iterator<String> timeitr = model_Times.iterator();
      Iterator<String> contextitr = model_Contexts.iterator();
      Iterator<String> inputitr = model_inputColumns.iterator();
      while (nameitr.hasNext()
          && urlitr.hasNext()
          && timeitr.hasNext()
          && contextitr.hasNext()
          && inputitr.hasNext()) {
        JSONObject obj = new JSONObject();
        Set<String> inputs = new HashSet<>();
        obj.put("name", nameitr.next());
        obj.put("url", urlitr.next());
        obj.put("publishTime", timeitr.next());
        obj.put("context", contextitr.next());
        String columns = inputitr.next();
        if (columns != null && !columns.isEmpty()) {
          JSONArray array = new JSONArray(columns);
          for (int i = 0; i < array.length(); i++) inputs.add(array.get(i).toString());
        } else if (showModelsWithoutMatching) {
          list.add(obj);
        }
        if (worksheetId != null && !worksheetId.isEmpty()) {
          inputs.retainAll(worksheetcolumns);
          obj.put("inputColumns", inputs.size());
        } else obj.put("inputColumns", 0);
        if (!inputs.isEmpty() || (worksheetId == null || worksheetId.trim().isEmpty()))
          list.add(obj);
      }

      Collections.sort(
          list,
          new Comparator<JSONObject>() {

            @Override
            public int compare(JSONObject a, JSONObject b) {
              return b.getInt("inputColumns") - a.getInt("inputColumns");
            }
          });

      return new UpdateContainer(
          new AbstractUpdate() {
            @Override
            public void generateJson(String prefix, PrintWriter pw, VWorkspace vWorkspace) {
              try {
                JSONArray array = new JSONArray();
                for (JSONObject obj : list) {
                  array.put(obj);
                }
                pw.print(array.toString());
              } catch (Exception e) {
                logger.error("Error generating JSON!", e);
              }
            }
          });
    } catch (Exception e) {
      return new UpdateContainer(
          new ErrorUpdate("Unable to get mappings with metadata: " + e.getMessage()));
    }
  }
  @Override
  public UpdateContainer doIt(Workspace workspace) {
    Worksheet worksheet = workspace.getWorksheet(worksheetId);
    SuperSelection selection = getSuperSelection(worksheet);
    String worksheetName = worksheet.getTitle();
    try {

      // preparing model file name
      final String modelFileName =
          workspace.getCommandPreferencesId() + worksheetId + "-" + worksheetName + "-model.ttl";
      final String modelFileLocalPath =
          ServletContextParameterMap.getParameterValue(ContextParameter.R2RML_PUBLISH_DIR)
              + modelFileName;

      File f = new File(modelFileLocalPath);

      // preparing the graphUri where the model is published in the triple store
      String graphName =
          worksheet
              .getMetadataContainer()
              .getWorksheetProperties()
              .getPropertyValue(Property.graphName);
      if (graphName == null || graphName.isEmpty()) {
        SimpleDateFormat sdf = new SimpleDateFormat("dd-MMM-yyyy-kkmmssS");
        String ts = sdf.format(Calendar.getInstance().getTime());
        graphName =
            "http://localhost/"
                + workspace.getCommandPreferencesId()
                + "/"
                + worksheetId
                + "/model/"
                + ts;
        worksheet
            .getMetadataContainer()
            .getWorksheetProperties()
            .setPropertyValue(Property.graphName, graphName);
      }

      // If the model is not published, publish it!
      if (!f.exists() || !f.isFile()) {
        GenerateR2RMLModelCommandFactory factory = new GenerateR2RMLModelCommandFactory();
        GenerateR2RMLModelCommand cmd =
            (GenerateR2RMLModelCommand)
                factory.createCommand(
                    workspace,
                    worksheetId,
                    TripleStoreUtil.defaultModelsRepoUrl,
                    graphName,
                    selection.getName());
        cmd.doIt(workspace);
      } else {
        // if the model was published 30 min ago, publish it again, just to be sure
        long diff = Calendar.getInstance().getTimeInMillis() - f.lastModified();
        if ((diff / 1000L / 60L) > 30) {
          f.delete();
          GenerateR2RMLModelCommandFactory factory = new GenerateR2RMLModelCommandFactory();
          GenerateR2RMLModelCommand cmd =
              (GenerateR2RMLModelCommand)
                  factory.createCommand(
                      workspace,
                      worksheetId,
                      TripleStoreUtil.defaultModelsRepoUrl,
                      graphName,
                      selection.getName());
          cmd.doIt(workspace);
        }
      }

      //			TripleStoreUtil tUtil = new TripleStoreUtil();
      StringBuffer query =
          new StringBuffer(
              "prefix rr: <http://www.w3.org/ns/r2rml#> prefix km-dev: <http://isi.edu/integration/karma/dev#> ");

      /* ****** this is the query for the list of columns.

      PREFIX km-dev: <http://isi.edu/integration/karma/dev#>
      PREFIX rr: <http://www.w3.org/ns/r2rml#>

      select distinct ?class where  {
        {
          ?x1 rr:subjectMap/km-dev:alignmentNodeId "------- The full url of the column/class --------".
          ?x1 rr:predicateObjectMap/rr:objectMap/rr:column ?column .
      	?x1 rr:subjectMap/rr:predicate ?class .
        }
        UNION
        {
          ?x1 rr:subjectMap/km-dev:alignmentNodeId "------- The full url of the column/class --------".
      	?x1 (rr:predicateObjectMap/rr:objectMap/rr:parentTriplesMap)* ?x2 .
      	?x2 rr:predicateObjectMap/rr:objectMap/rr:column ?column .
      	?x2 rr:predicateObjectMap/rr:predicate ?class .
        }
      }
      * */

      query.append("select distinct ?class ?column where { ");
      if (graphName != null && !graphName.trim().isEmpty()) {
        query.append(" graph  <" + graphName + "> { ");
      }
      query
          .append("{ ?x1 rr:subjectMap/km-dev:alignmentNodeId \"")
          .append(this.nodeId)
          .append(
              "\" . ?x1 rr:predicateObjectMap/rr:objectMap/rr:column ?column . ?x1 rr:subjectMap/rr:predicate ?class .")
          .append(" } UNION { ")
          .append("?x1 rr:subjectMap/km-dev:alignmentNodeId \"")
          .append(this.nodeId)
          .append("\" . ?x1 (rr:predicateObjectMap/rr:objectMap/rr:parentTriplesMap)* ?x2 .")
          .append(" ?x2 rr:predicateObjectMap ?x3 . ")
          .append(" ?x3 rr:objectMap/rr:column ?column . ?x3 rr:predicate ?class .")
          .append(" } }");
      if (graphName != null && !graphName.trim().isEmpty()) {
        query.append(" } ");
      }
      logger.info("Query: " + query.toString());
      String sData =
          TripleStoreUtil.invokeSparqlQuery(
              query.toString(), TripleStoreUtil.defaultModelsRepoUrl, "application/json", null);
      if (sData == null | sData.isEmpty()) {
        logger.error("Empty response object from query : " + query);
      }
      HashMap<String, String> cols = new HashMap<String, String>();
      try {
        JSONObject obj1 = new JSONObject(sData);
        JSONArray arr = obj1.getJSONObject("results").getJSONArray("bindings");
        for (int i = 0; i < arr.length(); i++) {
          String colName = arr.getJSONObject(i).getJSONObject("column").getString("value");
          String colValue = arr.getJSONObject(i).getJSONObject("class").getString("value");
          if (cols.containsKey(colName)) {
            logger.error("Duplicate Column <-> property mapping. " + colName + " <=> " + colValue);
          } else {
            cols.put(colName, colValue);
          }
        }
      } catch (Exception e2) {
        logger.error("Error in parsing json response", e2);
      }

      logger.info("Total Columns fetched : " + cols.size());
      final HashMap<String, String> columns = cols;
      return new UpdateContainer(
          new AbstractUpdate() {

            @Override
            public void generateJson(String prefix, PrintWriter pw, VWorkspace vWorkspace) {
              JSONObject obj = new JSONObject();
              try {
                Iterator<String> itr = columns.keySet().iterator();
                JSONArray colList = new JSONArray();
                while (itr.hasNext()) {
                  JSONObject o = new JSONObject();
                  String k = itr.next();
                  o.put("name", k);
                  o.put("url", columns.get(k));
                  colList.put(o);
                }
                obj.put("updateType", "FetchColumnUpdate");
                obj.put("columns", colList);
                obj.put("rootId", nodeId);
                pw.println(obj.toString());
              } catch (JSONException e) {
                logger.error("Error occurred while fetching worksheet properties!", e);
              }
            }
          });

    } catch (Exception e) {
      String msg = "Error occured while fetching columns!";
      logger.error(msg, e);
      return new UpdateContainer(new ErrorUpdate(msg));
    }
  }