public Node scan() throws SyntaxException, LexException, SemanticException { Node res = syntaxAnalyser.parse(); ArrayList<Node> tree = syntaxAnalyser.getTree(); ArrayList<Node> variables = new ArrayList<Node>(); tree.forEach( (node) -> { if (node.getTag() == Tag.VARIABLES) variables.add(node); }); setVariablesType(variables); ArrayList<Node> sequence = new ArrayList<Node>(); tree.forEach( (node) -> { if (node.getTag() == Tag.SEQUENCE) sequence.add(node); }); sequence.forEach( (node) -> { node.getChildren() .forEach( (node1) -> { try { check(node1.getChildren().get(0)); } catch (SemanticException e) { e.printStackTrace(); System.exit(1); } }); }); return res; }
/** Find a type by name. */ @Override public Named find(String name) throws SemanticException { if (Report.should_report(report_topics, 3)) Report.report(3, "MemberCR.find(" + name + ")"); if (nocache.contains(name)) { throw new NoClassException(name); } Named n = ts.systemResolver().check(name); if (n != null) { return n; } SemanticException error = null; // First, just try the long name. try { if (Report.should_report(report_topics, 2)) Report.report(2, "MCR: loading " + name + " from " + inner); return inner.find(name); } catch (SemanticException e) { if (Report.should_report(report_topics, 2)) Report.report(2, "MCR: " + e.getMessage()); if (StringUtil.isNameShort(name)) { throw e; } error = e; } boolean install = true; // Now try the prefix of the name and look for a member class // within it named with the suffix. String prefix = StringUtil.getPackageComponent(name); String suffix = StringUtil.getShortNameComponent(name); // Try the full name of the prefix first, then the raw class name, // so that encoded type information and source files are preferred // to the raw class file. try { if (Report.should_report(report_topics, 2)) Report.report(2, "MCR: loading prefix " + prefix); n = find(prefix); // This may be called during deserialization; n's // member classes might not be initialized yet. if (n instanceof ParsedTypeObject) { return findMember(n, suffix); } } catch (SemanticException e) { } if (install) { nocache.add(name); } throw error; }
void uncaughtType(Type t, Position pos) throws SemanticException { SemanticException e = new SemanticException( codeType + " cannot throw a \"" + t + "\"; the exception must either be caught or declared to be thrown.", pos); Map<String, Object> map = CollectionFactory.newHashMap(); map.put(CodedErrorInfo.ERROR_CODE_KEY, CodedErrorInfo.ERROR_CODE_SURROUND_THROW); map.put("TYPE", t.toString()); e.setAttributes(map); throw e; }
public boolean wellFormedRecursions(SJSessionType st) { try { wellFormedRecursionsAux( st, new HashSet<SJLabel>(), new HashSet< SJLabel>()); // Would be enough to just use a hasRecurse boolean? (As in the original // implementation.) } catch (SemanticException se) { se.printStackTrace(); return false; } return true; }
public Grammar(Start ast) { if (ast == null) { throw new InternalException("ast may not be null"); } if (GrammarCompiler.RESTRICTED_SYNTAX) { findAnonymousContexts(ast); fillGlobalNameSpace(ast); findInlineExpressions(ast); findLexerPriorities(ast); verifyandResolveReferences(); verifyPriorities(); } else { findAnonymousContexts(ast); fillGlobalNameSpace(ast); fillTreeNameSpace(ast); findInlineExpressions(ast); findTransformations(ast); findLexerPriorities(ast); verifyandResolveReferences(); verifyPriorities(); if (hasATree()) { buildImplicitTransformations(); resolveUnknowTypes(); verifyAssignability(); } } if (this.globalAnonymousContext == null && this.namedContexts.isEmpty()) { throw SemanticException.genericError("The Lexer and the Parser are both empty."); } }
private void internalAdd(INameDeclaration nameDeclaration) { if (nameDeclaration == null) { throw new InternalException("nameDeclaration may not be null"); } String name = nameDeclaration.getName(); if (this.nameMap.containsKey(name)) { throw SemanticException.duplicateDeclaration(nameDeclaration, this.nameMap.get(name)); } this.nameMap.put(name, nameDeclaration); }
private void add(Tree.TreeProduction treeProduction) { if (treeProduction == null) { throw new InternalException("treeProduction may not be null"); } String name = treeProduction.getName(); INameDeclaration nameDeclaration = this.globalNameSpace.getNameDeclaration(name); if (nameDeclaration == null || nameDeclaration instanceof Parser.ParserProduction || nameDeclaration instanceof Selector.ParserSelector.Selection) { this.nameMap.put(name, treeProduction); } else { throw SemanticException.duplicateDeclaration(treeProduction, nameDeclaration); } }
@Override public void constructType() { this.type = new Type.SimpleType.EmptyListType(); for (int i = 0; i < getListElements().size(); i++) { AlternativeTransformationListElement element = getListElements().get(i); element.constructType(); this.type = (Type.SimpleType) this.type.add(element.getType()); if (this.type == null) { throw SemanticException.uncompatibleListElement( getListElements().get(i - 1), getListElements().get(i)); } } }
public int check(Node root) throws SemanticException { if (root.getType() == 0) { switch (root.getTag()) { case Tag.STRUCTERED_STATEMENT: int tag = root.getChildren().get(0).getTag(); if (tag == Tag.BEGIN) { root.getChildren() .get(1) .getChildren() .forEach( (node) -> { try { check(node.getChildren().get(0)); } catch (SemanticException e) { e.printStackTrace(); System.exit(1); } }); } else { if (tag == Tag.WHILE) { if (check(root.getChildren().get(1)) != Tag.BOOL_NUM) { throw new SemanticException( "Semantic error in line " + root.getNumLine() + ", unexpected type, expected type 'boolean'."); } check( root.getChildren() .get(3) .getChildren() .get(0)); // Проверяем statement->(simple-statement | structured-statement) } else { if (tag == Tag.WRITELN) { check(root.getChildren().get(1)); } else { if (check(root.getChildren().get(1)) != Tag.BOOL_NUM) { throw new SemanticException( "Semantic error in line " + root.getNumLine() + ", unexpected type, expected type 'boolean'."); } check(root.getChildren().get(3).getChildren().get(0)); if (root.getChildren().size() > 4) { check(root.getChildren().get(5).getChildren().get(0)); } } } } break; case Tag.SIMPLE_STATEMENT: int tagId = check(root.getChildren().get(0)); int tagExpr = check(root.getChildren().get(2)); if (!checkTags(tagId, tagExpr)) { throw new SemanticException( "Semantic error in line " + root.getNumLine() + ", unexpected type."); } root.setType(tagId); break; case Tag.ID: root.setType( syntaxAnalyser.getVariables().get(new Word(root.getStrValue(), root.getTag()))); break; case Tag.EXPRESSION: if (root.getChildren().size() > 1) { int tag1 = check(root.getChildren().get(0)); int tag2 = check(root.getChildren().get(2)); if (!checkTagsExpr(tag1, tag2)) { throw new SemanticException( "Semantic error in line " + root.getNumLine() + ", unexpected type."); } root.setType(Tag.BOOL_NUM); } else { root.setType(check(root.getChildren().get(0))); } break; case Tag.SIMPLE_EXPRESSION: if (root.getChildren().size() <= 2) { int index = 0; if (root.getChildren().get(0).getTag() != Tag.TERM) { index = 1; } root.setType(check(root.getChildren().get(index))); } else { for (Node node : root.getChildren()) { if (node.getTag() == Tag.TERM) { int type = check(node); if (type == Tag.BOOL_NUM) { throw new SemanticException( "Semantic error in line " + root.getNumLine() + ", unexpected type."); } if (type == Tag.REAL_NUM) { root.setType(type); } } } if (root.getType() == 0) { root.setType(Tag.INT_NUM); } } break; case Tag.TERM: if (root.getChildren().size() == 1) { root.setType(check(root.getChildren().get(0))); } else { int index = 0; for (Node node : root.getChildren()) { if (node.getTag() == Tag.FACTOR) { int type = check(node); if (type == Tag.BOOL_NUM) { throw new SemanticException( "Semantic error in line " + root.getNumLine() + ", unexpected type."); } if (type == Tag.REAL_NUM) { root.setType(type); } } if (node.getTag() == Tag.DIV) { root.setType(Tag.REAL_NUM); } if (node.getTag() == Tag.IDIV || node.getTag() == Tag.IMOD) { if (root.getChildren().get(index - 1).getType() != Tag.INT_NUM || check(root.getChildren().get(index + 1)) != Tag.INT_NUM) { throw new SemanticException( "Semantic error in line " + root.getNumLine() + ", unexpected type."); } } index++; } if (root.getType() == 0) { root.setType(Tag.INT_NUM); } } break; case Tag.FACTOR: if (root.getChildren().get(0).getTag() == Tag.LEFT_PARENTHESIS) { root.setType(check(root.getChildren().get(1))); } else { if (root.getChildren().get(0).getTag() == Tag.ID) { root.setType(check(root.getChildren().get(0))); } else { if (root.getChildren().get(0).getTag() == Tag.SQRT) { root.setType(check(root.getChildren().get(1))); } else { root.setType(root.getChildren().get(0).getType()); } } } break; } } return root.getType(); }
/* * Check and apply implicit and explicit lexical precedence rules. Display * errors and infos for the human user during the process. * * @param automaton * is the automaton to check. In order to have the explicit * priorities applied, it is required that the automaton is * tagged with the acceptation of the LexerExpression. * @return a new automaton where only the right acceptation tags remains. */ public Automaton checkAndApplyLexerPrecedence( Automaton automaton, Trace trace, Strictness strictness) { automaton = automaton.minimal(); Map<State, String> words = automaton.collectShortestWords(); Map<Acceptation, Set<State>> accepts = automaton.collectAcceptationStates(); // Associate each acceptation with the ones it share at least a common // state. Map<Acceptation, Set<Acceptation>> conflicts = new HashMap<Acceptation, Set<Acceptation>>(); // Associate each acceptation with the ones it supersedes. Map<Acceptation, Set<Acceptation>> priorities = new HashMap<Acceptation, Set<Acceptation>>(); // Fill the priorities structure with the implicit inclusion rule for (Acceptation acc1 : automaton.getAcceptations()) { if (acc1 == Acceptation.ACCEPT) { continue; } // FIXME: empty LexerExpressions are not detected here since // their acceptation tag is not in the automaton. // Collect all the conflicts Set<State> set1 = accepts.get(acc1); Set<Acceptation> confs = new TreeSet<Acceptation>(); for (State s : set1) { confs.addAll(s.getAcceptations()); } conflicts.put(acc1, confs); // Check for implicit priority for each conflict for (Acceptation acc2 : confs) { if (acc2 == Acceptation.ACCEPT) { continue; } if (acc1 == acc2) { continue; } Set<State> set2 = accepts.get(acc2); if (set2.equals(set1)) { if (!conflicts.containsKey(acc2)) { throw SemanticException.genericError( "The " + acc1.getName() + " and " + acc2.getName() + " tokens are equivalent."); } } else if (set2.containsAll(set1)) { addPriority(priorities, acc1, acc2); State example = null; for (State s : set2) { if (!set1.contains(s)) { example = s; break; } } // Note: Since set1 is strictly included in set2, example // cannot be null trace.verboseln( " The " + acc1.getName() + " token is included in the " + acc2.getName() + " token. (Example of divergence: '" + words.get(example) + "'.)"); } } } // Collect new acceptation states and see if a conflict still exists Map<State, Acceptation> newAccepts = new HashMap<State, Acceptation>(); for (State s : automaton.getStates()) { if (s.getAcceptations().isEmpty()) { continue; } Acceptation candidate = s.getAcceptations().first(); for (Acceptation challenger : s.getAcceptations()) { if (candidate == challenger) { continue; } if (hasPriority(priorities, candidate, challenger)) { // nothing. keep the candidate } else if (hasPriority(priorities, challenger, candidate)) { candidate = challenger; } else { throw SemanticException.genericError( "The " + candidate.getName() + " token and the " + challenger.getName() + " token conflict on the string '" + words.get(s) + "'. You should specify a precedence between them."); } } newAccepts.put(s, candidate); } // Ask for a new automaton with the correct acceptation states. return automaton.resetAcceptations(newAccepts); }
public void compileLexer(Trace trace, Strictness strictness) { Automaton automaton; if (this.globalAnonymousContext != null) { automaton = this.globalAnonymousContext.computeAutomaton().minimal().longest().minimal(); automaton = checkAndApplyLexerPrecedence(automaton, trace, strictness).minimal(); this.lexer.setAutomaton(automaton.withMarkers().minimal()); } else { throw new InternalException("not implemented"); } for (Context context : this.namedContexts) { context.computeAutomaton(); } // Look for useless LexerExpression for (LexerExpression lexerExpression : this.lexer.getExpressions()) { // If their is no automaton saved it means that the LexerExpression // was not used to build the big automaton. if (lexerExpression.getSavedAutomaton() == null) { if (strictness == Strictness.STRICT) { throw SemanticException.genericError( "The " + lexerExpression.getExpressionName() + " expression is useless."); } else { trace.verboseln( " The " + lexerExpression.getExpressionName() + " expression is useless."); } } } for (LexerExpression lexerExpression : this.lexer.getExpressions()) { // Note: getting the automaton forces the validation of the semantic // validity of (eg. cirularity) Automaton lexerAutomaton = lexerExpression.getAutomaton(); } for (LexerExpression lexerExpression : this.globalAnonymousContext.getLexerExpressionTokensAndIgnored()) { // Note: The big automaton has to be minimal (thus with the unused // acceptations removed) if (!automaton.getAcceptations().contains(lexerExpression.getAcceptation())) { if (strictness == Strictness.STRICT) { throw SemanticException.genericError( "The " + lexerExpression.getExpressionName() + " token does not match anything."); } else { trace.verboseln( " The " + lexerExpression.getExpressionName() + " token does not match anything."); } } Automaton expressionAutomaton = lexerExpression.getAutomaton(); for (RichSymbol richSymbol : expressionAutomaton.getStartState().getTransitions().keySet()) { if (richSymbol.isLookahead()) { // We have a lookahead transition from the start state. // Note: this works since the result of getAutomaton() is // minimized. throw SemanticException.genericError( "The " + lexerExpression.getExpressionName() + " token matches the empty string."); } } } }