/** * * Enforces all equalities in the query, that is, for every equivalence class (among variables) * defined by a set of equalities, it chooses one representative variable and replaces all other * variables in the equivalence class with the representative variable. For example, if the query * body is R(x,y,z), x=y, y=z. It will choose x and produce the following body R(x,x,x). * * <p>We ignore the equalities with disjunctions. For example R(x,y,z), x=y OR y=z Note the * process will also remove from the body all the equalities that are here processed. * * @param result */ public static void enforceEqualities(CQIE result) { List<Function> body = result.getBody(); Substitution mgu = new SubstitutionImpl(); // collecting all equalities as substitutions for (int i = 0; i < body.size(); i++) { Function atom = body.get(i); SubstitutionUtilities.applySubstitution(atom, mgu); if (atom.getFunctionSymbol() == ExpressionOperation.EQ) { if (!mgu.composeTerms(atom.getTerm(0), atom.getTerm(1))) continue; body.remove(i); i--; } // search for nested equalities in AND function else if (atom.getFunctionSymbol() == ExpressionOperation.AND) { nestedEQSubstitutions(atom, mgu); // we remove the function if empty because all its terms were equalities if (atom.getTerms().isEmpty()) { body.remove(i); i--; } else { // if there is only a term left we remove the conjunction if (atom.getTerms().size() == 1) { body.set(i, (Function) atom.getTerm(0)); } } } } SubstitutionUtilities.applySubstitution(result, mgu, false); }
public DatalogProgram constructDatalogProgram() { DatalogProgram datalog = dfac.getDatalogProgram(); LinkedList<String> errorMessage = new LinkedList<String>(); for (OBDAMappingAxiom axiom : mappingList) { try { // Obtain the target and source query from each mapping axiom in // the model. CQIE targetQuery = (CQIE) axiom.getTargetQuery(); // Get the parsed sql, since it is already parsed by the mapping // parser // consider also MetaMappingExpander // VisitedQuery queryParsed = ...; OBDASQLQuery sourceQuery = (OBDASQLQuery) axiom.getSourceQuery(); // Construct the SQL query tree from the source query VisitedQuery queryParsed = translator.constructParser(sourceQuery.toString()); // Create a lookup table for variable swapping LookupTable lookupTable = createLookupTable(queryParsed); // We can get easily the table from the SQL ArrayList<RelationJSQL> tableList = queryParsed.getTableSet(); // Construct the body from the source query ArrayList<Function> atoms = new ArrayList<Function>(); for (RelationJSQL table : tableList) { // Construct the URI from the table name String tableName = table.getGivenName(); String predicateName = tableName; // Construct the predicate using the table name int arity = dbMetaData.getDefinition(tableName).countAttribute(); Predicate predicate = dfac.getPredicate(predicateName, arity); // Swap the column name with a new variable from the lookup // table List<Term> terms = new ArrayList<Term>(); for (int i = 1; i <= arity; i++) { String columnName = dbMetaData.getFullQualifiedAttributeName(tableName, table.getAlias(), i); String termName = lookupTable.lookup(columnName); if (termName == null) { throw new RuntimeException( "Column '" + columnName + "'was not found in the lookup table: "); } Term term = dfac.getVariable(termName); terms.add(term); } // Create an atom for a particular table Function atom = dfac.getFunction(predicate, terms); atoms.add(atom); } // For the join conditions WE STILL NEED TO CONSIDER NOT EQUI // JOIN ArrayList<Expression> joinConditions = queryParsed.getJoinCondition(); for (Expression predicate : joinConditions) { Function atom = getFunction(predicate, lookupTable); atoms.add(atom); } // For the selection "where" clause conditions SelectionJSQL selection = queryParsed.getSelection(); if (selection != null) { // Stack for filter function Stack<Function> filterFunctionStack = new Stack<Function>(); Expression conditions = selection.getRawConditions(); Function filterFunction = getFunction(conditions, lookupTable); filterFunctionStack.push(filterFunction); // The filter function stack must have 1 element left if (filterFunctionStack.size() == 1) { Function filterFunct = filterFunctionStack.pop(); Function atom = dfac.getFunction(filterFunct.getFunctionSymbol(), filterFunct.getTerms()); atoms.add(atom); } else { throwInvalidFilterExpressionException(filterFunctionStack); } } // Construct the head from the target query. List<Function> atomList = targetQuery.getBody(); // for (Function atom : atomList) { Iterator<Function> atomListIter = atomList.iterator(); while (atomListIter.hasNext()) { Function atom = atomListIter.next(); List<Term> terms = atom.getTerms(); List<Term> newterms = new LinkedList<Term>(); for (Term term : terms) { newterms.add(updateTerm(term, lookupTable)); } Function newhead = dfac.getFunction(atom.getPredicate(), newterms); CQIE rule = dfac.getCQIE(newhead, atoms); datalog.appendRule(rule); } } catch (Exception e) { errorMessage.add( "Error in mapping with id: " + axiom.getId() + " \n Description: " + e.getMessage() + " \nMapping: [" + axiom.toString() + "]"); } } if (errorMessage.size() > 0) { StringBuilder errors = new StringBuilder(); for (String error : errorMessage) { errors.append(error + "\n"); } final String msg = "There was an error analyzing the following mappings. Please correct the issue(s) to continue.\n" + errors.toString(); RuntimeException r = new RuntimeException(msg); throw r; } return datalog; }