private void insertMapping(String target, String source) {
    List<Function> targetQuery = parse(target);
    if (targetQuery != null) {
      final boolean isValid = validator.validate(targetQuery);
      if (isValid) {
        try {
          OBDAModel mapcon = obdaModel;
          URI sourceID = dataSource.getSourceID();
          System.out.println(sourceID.toString() + " \n");

          OBDASQLQuery body = dataFactory.getSQLQuery(source);
          System.out.println(body.toString() + " \n");

          OBDAMappingAxiom newmapping =
              dataFactory.getRDBMSMappingAxiom(txtMappingID.getText().trim(), body, targetQuery);
          System.out.println(newmapping.toString() + " \n");

          if (mapping == null) {
            // Case when we are creating a new mapping
            mapcon.addMapping(sourceID, newmapping);
          } else {
            // Case when we are updating an existing mapping
            mapcon.updateMappingsSourceQuery(sourceID, mapping.getId(), body);
            mapcon.updateTargetQueryMapping(sourceID, mapping.getId(), targetQuery);
            mapcon.updateMapping(sourceID, mapping.getId(), txtMappingID.getText().trim());
          }
        } catch (DuplicateMappingException e) {
          JOptionPane.showMessageDialog(
              this, "Error while inserting mapping: " + e.getMessage() + " is already taken");
          return;
        }
        parent.setVisible(false);
        parent.dispose();

      } else {
        // List of invalid predicates that are found by the validator.
        Vector<String> invalidPredicates = validator.getInvalidPredicates();
        String invalidList = "";
        for (String predicate : invalidPredicates) {
          invalidList += "- " + predicate + "\n";
        }
        JOptionPane.showMessageDialog(
            this,
            "This list of predicates is unknown by the ontology: \n" + invalidList,
            "New Mapping",
            JOptionPane.WARNING_MESSAGE);
      }
    }
  }
 @Override
 public String toString() {
   StringBuffer bf = new StringBuffer();
   bf.append(sourceQuery.toString());
   bf.append(" ==> ");
   bf.append(targetQuery.toString());
   return bf.toString();
 }
  public DatalogProgram constructDatalogProgram() {
    DatalogProgram datalog = dfac.getDatalogProgram();
    LinkedList<String> errorMessage = new LinkedList<String>();
    for (OBDAMappingAxiom axiom : mappingList) {
      try {
        // Obtain the target and source query from each mapping axiom in
        // the model.
        CQIE targetQuery = (CQIE) axiom.getTargetQuery();

        // Get the parsed sql, since it is already parsed by the mapping
        // parser
        // consider also MetaMappingExpander
        // VisitedQuery queryParsed = ...;

        OBDASQLQuery sourceQuery = (OBDASQLQuery) axiom.getSourceQuery();

        // Construct the SQL query tree from the source query
        VisitedQuery queryParsed = translator.constructParser(sourceQuery.toString());

        // Create a lookup table for variable swapping
        LookupTable lookupTable = createLookupTable(queryParsed);

        // We can get easily the table from the SQL
        ArrayList<RelationJSQL> tableList = queryParsed.getTableSet();

        // Construct the body from the source query
        ArrayList<Function> atoms = new ArrayList<Function>();
        for (RelationJSQL table : tableList) {
          // Construct the URI from the table name
          String tableName = table.getGivenName();
          String predicateName = tableName;

          // Construct the predicate using the table name
          int arity = dbMetaData.getDefinition(tableName).countAttribute();
          Predicate predicate = dfac.getPredicate(predicateName, arity);

          // Swap the column name with a new variable from the lookup
          // table
          List<Term> terms = new ArrayList<Term>();
          for (int i = 1; i <= arity; i++) {
            String columnName =
                dbMetaData.getFullQualifiedAttributeName(tableName, table.getAlias(), i);
            String termName = lookupTable.lookup(columnName);
            if (termName == null) {
              throw new RuntimeException(
                  "Column '" + columnName + "'was not found in the lookup table: ");
            }
            Term term = dfac.getVariable(termName);
            terms.add(term);
          }
          // Create an atom for a particular table
          Function atom = dfac.getFunction(predicate, terms);
          atoms.add(atom);
        }

        // For the join conditions WE STILL NEED TO CONSIDER NOT EQUI
        // JOIN
        ArrayList<Expression> joinConditions = queryParsed.getJoinCondition();
        for (Expression predicate : joinConditions) {

          Function atom = getFunction(predicate, lookupTable);
          atoms.add(atom);
        }

        // For the selection "where" clause conditions
        SelectionJSQL selection = queryParsed.getSelection();
        if (selection != null) {

          // Stack for filter function
          Stack<Function> filterFunctionStack = new Stack<Function>();

          Expression conditions = selection.getRawConditions();
          Function filterFunction = getFunction(conditions, lookupTable);
          filterFunctionStack.push(filterFunction);

          // The filter function stack must have 1 element left
          if (filterFunctionStack.size() == 1) {
            Function filterFunct = filterFunctionStack.pop();
            Function atom =
                dfac.getFunction(filterFunct.getFunctionSymbol(), filterFunct.getTerms());
            atoms.add(atom);
          } else {
            throwInvalidFilterExpressionException(filterFunctionStack);
          }
        }

        // Construct the head from the target query.
        List<Function> atomList = targetQuery.getBody();
        // for (Function atom : atomList) {
        Iterator<Function> atomListIter = atomList.iterator();

        while (atomListIter.hasNext()) {
          Function atom = atomListIter.next();
          List<Term> terms = atom.getTerms();
          List<Term> newterms = new LinkedList<Term>();
          for (Term term : terms) {
            newterms.add(updateTerm(term, lookupTable));
          }
          Function newhead = dfac.getFunction(atom.getPredicate(), newterms);
          CQIE rule = dfac.getCQIE(newhead, atoms);
          datalog.appendRule(rule);
        }

      } catch (Exception e) {
        errorMessage.add(
            "Error in mapping with id: "
                + axiom.getId()
                + " \n Description: "
                + e.getMessage()
                + " \nMapping: ["
                + axiom.toString()
                + "]");
      }
    }

    if (errorMessage.size() > 0) {
      StringBuilder errors = new StringBuilder();
      for (String error : errorMessage) {
        errors.append(error + "\n");
      }
      final String msg =
          "There was an error analyzing the following mappings. Please correct the issue(s) to continue.\n"
              + errors.toString();
      RuntimeException r = new RuntimeException(msg);
      throw r;
    }
    return datalog;
  }
 @Override
 public OBDARDBMappingAxiom clone() {
   OBDARDBMappingAxiom clone =
       new RDBMSMappingAxiomImpl(this.getId(), sourceQuery.clone(), targetQuery.clone());
   return clone;
 }