Пример #1
0
 @Override
 public UpdateContainer undoIt(Workspace workspace) {
   Node node = workspace.getFactory().getNode(nodeIdArg);
   SuperSelection sel = getSuperSelection(workspace);
   node.setValue(previousValue, previousStatus, workspace.getFactory());
   UpdateContainer uc =
       WorksheetUpdateFactory.createWorksheetHierarchicalAndCleaningResultsUpdates(
           worksheetId, sel);
   uc.add(new NodeChangedUpdate(worksheetId, nodeIdArg, previousValue, previousStatus));
   return uc;
 }
  private void generateRDFFromWorksheet(
      Worksheet wk,
      Workspace workspace,
      KR2RMLMapping mapping,
      List<KR2RMLRDFWriter> writers,
      String baseURI)
      throws IOException, JSONException, KarmaException {
    // Generate RDF for the remaining rows
    // Gets all the errors generated during the RDF generation
    ErrorReport errorReport = new ErrorReport();

    this.applyHistoryToWorksheet(workspace, wk, mapping);
    SuperSelection selection = SuperSelectionManager.DEFAULT_SELECTION;
    if (selectionName != null && !selectionName.trim().isEmpty())
      selection = wk.getSuperSelectionManager().getSuperSelection(selectionName);
    if (selection == null) return;
    // RDF generation object initialization
    KR2RMLWorksheetRDFGenerator rdfGen =
        new KR2RMLWorksheetRDFGenerator(
            wk,
            workspace.getFactory(),
            workspace.getOntologyManager(),
            writers,
            false,
            mapping,
            errorReport,
            selection);

    // Generate the rdf
    rdfGen.generateRDF(false);
  }
Пример #3
0
 @Override
 public UpdateContainer doIt(Workspace workspace) throws CommandException {
   Node node = workspace.getFactory().getNode(nodeIdArg);
   SuperSelection sel = getSuperSelection(workspace);
   inputColumns.clear();
   outputColumns.clear();
   inputColumns.add(node.getHNodeId());
   outputColumns.add(node.getHNodeId());
   previousValue = node.getValue();
   previousStatus = node.getStatus();
   if (node.hasNestedTable()) {
     throw new CommandException(
         this, "Cell " + nodeIdArg + " has a nested table. It cannot be edited.");
   }
   node.setValue(newValueArg, Node.NodeStatus.edited, workspace.getFactory());
   WorksheetUpdateFactory.detectSelectionStatusChange(worksheetId, workspace, this);
   UpdateContainer uc =
       WorksheetUpdateFactory.createWorksheetHierarchicalAndCleaningResultsUpdates(
           worksheetId, sel);
   uc.add(new NodeChangedUpdate(worksheetId, nodeIdArg, newValueArg, Node.NodeStatus.edited));
   return uc;
 }
  @Override
  public UpdateContainer doIt(Workspace workspace) throws CommandException {
    Worksheet worksheet = workspace.getWorksheet(worksheetId);
    RepFactory factory = workspace.getFactory();
    SuperSelection superSel = getSuperSelection(worksheet);
    HTable hTable = factory.getHTable(factory.getHNode(hNodeId).getHTableId());
    Selection currentSel = superSel.getSelection(hTable.getId());
    if (currentSel != null) {
      currentSel.updateSelection();
    }
    CommandHistory history = workspace.getCommandHistory();
    List<Command> tmp =
        gatherAllOperateSelectionCommands(
            history.getCommandsFromWorksheetId(worksheetId), workspace);
    if (tmp.size() > 0) {
      JSONArray inputJSON = new JSONArray();
      inputJSON.put(
          CommandInputJSONUtil.createJsonObject(
              "worksheetId", worksheetId, ParameterType.worksheetId));
      inputJSON.put(
          CommandInputJSONUtil.createJsonObject("hNodeId", hNodeId, ParameterType.hNodeId));
      inputJSON.put(
          CommandInputJSONUtil.createJsonObject(
              "operation", Operation.Intersect.name(), ParameterType.other));
      inputJSON.put(
          CommandInputJSONUtil.createJsonObject(
              "pythonCode", SelectionManager.defaultCode, ParameterType.other));
      inputJSON.put(CommandInputJSONUtil.createJsonObject("onError", "false", ParameterType.other));
      inputJSON.put(
          CommandInputJSONUtil.createJsonObject(
              "selectionName", superSel.getName(), ParameterType.other));
      Command t = null;
      try {
        t = new OperateSelectionCommandFactory().createCommand(inputJSON, workspace);
      } catch (Exception e) {

      }
      if (t != null) history._getHistory().add(t);
      history._getHistory().addAll(tmp);
    }
    UpdateContainer uc =
        WorksheetUpdateFactory.createWorksheetHierarchicalAndCleaningResultsUpdates(
            worksheetId, superSel);
    uc.add(new HistoryUpdate(history));
    return uc;
  }
  private JSONArray extractHistoryFromModel(Workspace workspace, UpdateContainer uc)
      throws RepositoryException, RDFParseException, IOException, JSONException, KarmaException {

    Worksheet ws = workspace.getFactory().getWorksheet(worksheetId);
    R2RMLMappingIdentifier id =
        new R2RMLMappingIdentifier(ws.getTitle(), r2rmlModelFile.toURI().toURL());
    WorksheetR2RMLJenaModelParser parser = new WorksheetR2RMLJenaModelParser(id);
    KR2RMLMapping mapping = parser.parse();
    KR2RMLVersion version = mapping.getVersion();
    if (version.compareTo(KR2RMLVersion.current) < 0) {
      uc.add(
          new InfoUpdate(
              "Model version is "
                  + version.toString()
                  + ".  Current version is "
                  + KR2RMLVersion.current.toString()
                  + ".  Please publish it again."));
    }
    return mapping.getWorksheetHistory();
  }
Пример #6
0
 /*
  * Pedro
  *
  * Return an HNodeId in a nice format for printing on command logs.
  */
 protected String formatHNodeId(Workspace workspace, String hNodeId) {
   return hNodeId + " (" + workspace.getFactory().getColumnName(hNodeId) + ")";
 }
  private void generateRDF(
      String wkname,
      String query,
      List<KR2RMLRDFWriter> writers,
      R2RMLMappingIdentifier id,
      String baseURI)
      throws IOException, JSONException, KarmaException, SQLException, ClassNotFoundException {
    logger.debug("Generating RDF...");

    WorksheetR2RMLJenaModelParser parserTest = new WorksheetR2RMLJenaModelParser(id);
    KR2RMLMapping mapping = parserTest.parse();

    AbstractJDBCUtil dbUtil = JDBCUtilFactory.getInstance(dbType);
    Connection conn = dbUtil.getConnection(hostname, portnumber, username, password, dBorSIDName);
    conn.setAutoCommit(false);

    java.sql.Statement stmt =
        conn.createStatement(
            java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY);
    stmt.setFetchSize(DATABASE_TABLE_FETCH_SIZE);

    ResultSet r = stmt.executeQuery(query);
    ResultSetMetaData meta = r.getMetaData();
    ;

    // Get the column names
    List<String> columnNames = new ArrayList<>();
    for (int i = 1; i <= meta.getColumnCount(); i++) {
      columnNames.add(meta.getColumnName(i));
    }

    // Prepare required Karma objects
    Workspace workspace = initializeWorkspace();

    RepFactory factory = workspace.getFactory();
    Worksheet wk = factory.createWorksheet(wkname, workspace, encoding);
    List<String> headersList = addHeaders(wk, columnNames, factory);

    int counter = 0;

    ArrayList<String> rowValues = null;
    while ((rowValues = dbUtil.parseResultSetRow(r)) != null) {
      // Generate RDF and create a new worksheet for every DATABASE_TABLE_FETCH_SIZE rows
      if (counter % DATABASE_TABLE_FETCH_SIZE == 0 && counter != 0) {
        generateRDFFromWorksheet(wk, workspace, mapping, writers, baseURI);
        logger.debug("Done for " + counter + " rows ...");
        removeWorkspace(workspace);

        parserTest = new WorksheetR2RMLJenaModelParser(id);
        mapping = parserTest.parse();
        workspace = initializeWorkspace();
        factory = workspace.getFactory();
        wk = factory.createWorksheet(wkname, workspace, encoding);
        headersList = addHeaders(wk, columnNames, factory);
      }

      /** Add the data * */
      Table dataTable = wk.getDataTable();
      Row row = dataTable.addRow(factory);
      for (int i = 0; i < rowValues.size(); i++) {
        row.setValue(headersList.get(i), rowValues.get(i), factory);
      }

      counter++;
    }

    generateRDFFromWorksheet(wk, workspace, mapping, writers, baseURI);

    // Releasing all the resources
    r.close();
    conn.close();
    stmt.close();
    logger.debug("done");
  }
  @Override
  public UpdateContainer doIt(Workspace workspace) throws CommandException {
    ModelingConfiguration modelingConfiguration =
        ModelingConfigurationRegistry.getInstance()
            .getModelingConfiguration(
                WorkspaceKarmaHomeRegistry.getInstance().getKarmaHome(workspace.getId()));
    TripleStoreUtil utilObj = new TripleStoreUtil();
    boolean showModelsWithoutMatching = modelingConfiguration.isShowModelsWithoutMatching();
    try {
      HashMap<String, List<String>> metadata =
          utilObj.getMappingsWithMetadata(TripleStoreUrl, context);
      RepFactory factory = workspace.getFactory();
      List<String> model_Names = metadata.get("model_names");
      List<String> model_Urls = metadata.get("model_urls");
      List<String> model_Times = metadata.get("model_publishtimes");
      List<String> model_Contexts = metadata.get("model_contexts");
      List<String> model_inputColumns = metadata.get("model_inputcolumns");
      final List<JSONObject> list = new ArrayList<>();
      Set<String> worksheetcolumns = new HashSet<>();
      if (worksheetId != null && !worksheetId.trim().isEmpty()) {
        HTable htable = factory.getWorksheet(worksheetId).getHeaders();
        getHNodesForWorksheet(htable, worksheetcolumns, factory);
      }
      Iterator<String> nameitr = model_Names.iterator();
      Iterator<String> urlitr = model_Urls.iterator();
      Iterator<String> timeitr = model_Times.iterator();
      Iterator<String> contextitr = model_Contexts.iterator();
      Iterator<String> inputitr = model_inputColumns.iterator();
      while (nameitr.hasNext()
          && urlitr.hasNext()
          && timeitr.hasNext()
          && contextitr.hasNext()
          && inputitr.hasNext()) {
        JSONObject obj = new JSONObject();
        Set<String> inputs = new HashSet<>();
        obj.put("name", nameitr.next());
        obj.put("url", urlitr.next());
        obj.put("publishTime", timeitr.next());
        obj.put("context", contextitr.next());
        String columns = inputitr.next();
        if (columns != null && !columns.isEmpty()) {
          JSONArray array = new JSONArray(columns);
          for (int i = 0; i < array.length(); i++) inputs.add(array.get(i).toString());
        } else if (showModelsWithoutMatching) {
          list.add(obj);
        }
        if (worksheetId != null && !worksheetId.isEmpty()) {
          inputs.retainAll(worksheetcolumns);
          obj.put("inputColumns", inputs.size());
        } else obj.put("inputColumns", 0);
        if (!inputs.isEmpty() || (worksheetId == null || worksheetId.trim().isEmpty()))
          list.add(obj);
      }

      Collections.sort(
          list,
          new Comparator<JSONObject>() {

            @Override
            public int compare(JSONObject a, JSONObject b) {
              return b.getInt("inputColumns") - a.getInt("inputColumns");
            }
          });

      return new UpdateContainer(
          new AbstractUpdate() {
            @Override
            public void generateJson(String prefix, PrintWriter pw, VWorkspace vWorkspace) {
              try {
                JSONArray array = new JSONArray();
                for (JSONObject obj : list) {
                  array.put(obj);
                }
                pw.print(array.toString());
              } catch (Exception e) {
                logger.error("Error generating JSON!", e);
              }
            }
          });
    } catch (Exception e) {
      return new UpdateContainer(
          new ErrorUpdate("Unable to get mappings with metadata: " + e.getMessage()));
    }
  }
 private boolean isSamehTableId(String hNodeId1, String hNodeId2, Workspace workspace) {
   HNode hNode1 = workspace.getFactory().getHNode(hNodeId1);
   HNode hNode2 = workspace.getFactory().getHNode(hNodeId2);
   if (hNode1 == null || hNode2 == null) return false;
   return hNode1.getHTableId().equals(hNode2.getHTableId());
 }
  @Override
  public UpdateContainer doIt(Workspace workspace) throws CommandException {
    Worksheet worksheet = workspace.getWorksheet(worksheetId);
    SuperSelection selection = getSuperSelection(worksheet);
    HNodePath selectedPath = null;
    List<HNodePath> columnPaths = worksheet.getHeaders().getAllPaths();
    for (HNodePath path : columnPaths) {
      if (path.getLeaf().getId().equals(hNodeId)) {
        selectedPath = path;
      }
    }
    Collection<Node> nodes = new ArrayList<Node>();
    workspace
        .getFactory()
        .getWorksheet(worksheetId)
        .getDataTable()
        .collectNodes(selectedPath, nodes, selection);

    try {
      JSONArray requestJsonArray = new JSONArray();
      for (Node node : nodes) {
        String id = node.getId();
        String originalVal = node.getValue().asString();
        JSONObject jsonRecord = new JSONObject();
        jsonRecord.put("id", id);
        originalVal = originalVal == null ? "" : originalVal;
        jsonRecord.put("value", originalVal);
        requestJsonArray.put(jsonRecord);
      }
      String jsonString = null;
      jsonString = requestJsonArray.toString();

      // String url =
      // "http://localhost:8080/cleaningService/IdentifyData";
      //			String url = "http://localhost:8070/myWS/IdentifyData";
      String url =
          ServletContextParameterMap.getParameterValue(ContextParameter.CLEANING_SERVICE_URL);

      HttpClient httpclient = new DefaultHttpClient();
      HttpPost httppost = null;
      HttpResponse response = null;
      HttpEntity entity;
      StringBuffer out = new StringBuffer();

      URI u = null;
      u = new URI(url);
      List<NameValuePair> formparams = new ArrayList<NameValuePair>();
      formparams.add(new BasicNameValuePair("json", jsonString));

      httppost = new HttpPost(u);
      httppost.setEntity(new UrlEncodedFormEntity(formparams, "UTF-8"));
      response = httpclient.execute(httppost);
      entity = response.getEntity();
      if (entity != null) {
        BufferedReader buf = new BufferedReader(new InputStreamReader(entity.getContent()));
        String line = buf.readLine();
        while (line != null) {
          out.append(line);
          line = buf.readLine();
        }
      }
      // logger.trace(out.toString());
      // logger.info("Connnection success : " + url + " Successful.");
      final JSONObject data1 = new JSONObject(out.toString());
      // logger.trace("Data--->" + data1);
      return new UpdateContainer(
          new AbstractUpdate() {

            @Override
            public void generateJson(String prefix, PrintWriter pw, VWorkspace vWorkspace) {
              JSONObject response = new JSONObject();
              // logger.trace("Reached here");
              try {
                response.put("updateType", "CleaningServiceOutput");
                response.put("chartData", data1);
                response.put("hNodeId", hNodeId);
                // logger.trace(response.toString(4));
              } catch (JSONException e) {
                pw.print("Error");
              }

              pw.print(response.toString());
            }
          });
    } catch (Exception e) {
      e.printStackTrace();
      return new UpdateContainer(new ErrorUpdate("Error!"));
    }
  }