private void expect(String string, Token... tokens) { System.out.println("Tokenizing [" + string + "]"); ExprLexer tokenizer = new ExprLexer(string); int expectedIndex = 0; while (!tokenizer.isEndOfInput()) { String expectedString; Token expected; if (expectedIndex < tokens.length) { expected = tokens[expectedIndex]; expectedString = expected.toString(); } else { expected = null; expectedString = "<NOTHING>"; } Token actual = tokenizer.next(); System.out.println( String.format("Expected: %15s, got %s", expectedString, actual.toString())); if (expected != null) { assertEquals("tokenStart", expected.getTokenStart(), actual.getTokenStart()); assertEquals("text", expected.getString(), actual.getString()); assertEquals("type", expected.getType(), actual.getType()); if (!expected.equals(actual)) { System.err.println("Unexpected result!"); throw new AssertionError(); } } expectedIndex++; } }
public final Enumeration Enumeration() throws ParseException { Token t; ASTStringNode name; ASTStringNode name1; ArrayList<ASTStringNode> name1List = new ArrayList<ASTStringNode>(); Token firstToken = token; jj_consume_token(OB); t = jj_consume_token(NAME); name = new ASTStringNode(t.toString(), new WToken(token)); label_6: while (true) { switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case 32:; break; default: jj_la1[16] = jj_gen; break label_6; } jj_consume_token(32); t = jj_consume_token(NAME); name1 = new ASTStringNode(t.toString(), new WToken(token)); name1List.add(name1); } jj_consume_token(CB); { if (true) return new Enumeration(name, name1List, firstToken.next, token); } throw new Error("Missing return statement in function"); }
public static void main(String args[]) { Token tok; try { // Read from the command line for the filename Lexer lex = new Lexer(args[0]); while (true) { tok = lex.nextToken(); // if token returns error token we print the line for error and exit if (tok.getKind() == Tokens.Error) { System.out.println( "Error in Line " + lex.source.getLineno() + " ErrorValue: " + tok.toString()); System.exit(0); } // Printing the information about the line and the token String p = "Left: " + tok.getLeftPosition() + "| Right: " + tok.getRightPosition() + "| Type: " + TokenType.tokens.get(tok.getKind()) + "| Value: "; // if ((tok.getKind() == Tokens.Identifier) || (tok.getKind() == Tokens.INTeger)) p += tok.toString(); System.out.println(p + "| Line: " + lex.source.getLineno()); } } catch (Exception e) { } }
public static void main(String[] args) { String inFile = "C:\\Users\\Sukoon\\Documents\\NetBeansProjects\\cdproj\\src\\Sample.in"; String outFile = "C:\\Users\\Sukoon\\Desktop\\Sample.out"; if (args.length > 1) { inFile = args[0]; outFile = args[1]; } Lexer lexer = new Lexer(inFile); try { BufferedWriter writer = new BufferedWriter(new FileWriter(outFile)); Token t; while ((t = lexer.nextToken()) != null) { writer.write(t.toString()); writer.newLine(); } writer.close(); System.out.println("Done tokenizing file: " + inFile); System.out.println("Output written in file: " + outFile); } catch (IOException e) { e.printStackTrace(); } }
public final AttListDecl AttListDecl() throws ParseException { Token t; ASTStringNode name; AttribDef attribDef; ArrayList<AttribDef> attribDefList = new ArrayList<AttribDef>(); Token firstToken = token; jj_consume_token(34); t = jj_consume_token(NAME); name = new ASTStringNode(t.toString(), new WToken(token)); label_4: while (true) { switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case NAME:; break; default: jj_la1[11] = jj_gen; break label_4; } attribDef = AttribDef(); attribDefList.add(attribDef); } jj_consume_token(PCB); { if (true) return new AttListDecl(name, attribDefList, firstToken.next, token); } throw new Error("Missing return statement in function"); }
public final DTD DTD() throws ParseException { DTDEntry dTDEntry; ArrayList<DTDEntry> dTDEntryList = new ArrayList<DTDEntry>(); Token t; ASTStringNode eof; Token firstToken = token; label_1: while (true) { switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case 31: case 34:; break; default: jj_la1[0] = jj_gen; break label_1; } dTDEntry = DTDEntry(); dTDEntryList.add(dTDEntry); } t = jj_consume_token(0); eof = new ASTStringNode(t.toString(), new WToken(token)); { if (true) return new DTD(dTDEntryList, eof, firstToken.next, token); } throw new Error("Missing return statement in function"); }
/** Prints the content and annotations. */ public String toString() { StringBuffer sb = new StringBuffer(); sb.append(" SENTENCE begin\n"); for (Token tok : tokens) sb.append(tok.toString()); sb.append(" SENTENCE end\n"); return sb.toString(); }
@Override public String toString() { StringBuilder sb = new StringBuilder(); for (Token t : tokens) sb.append(t.toString()).append(" String:").append(t.getString()).append("\n"); return sb.toString(); }
public static final Valor PValorInteiro() throws ParseException { Token token; token = jj_consume_token(INTEGER_LITERAL); { if (true) return new ValorInteiro(Integer.parseInt(token.toString())); } throw new Error("Missing return statement in function"); }
public String toString() { final StringBuilder result = new StringBuilder(); // overall format is (cat)token, or: // (cat)'text'[startIdx,endIdx{wordCount}](seqNum.revisNum) result.append("(").append(category).append(')').append(token.toString()); return result.toString(); }
public static final Valor PValorString() throws ParseException { Token token; token = jj_consume_token(STRING_LITERAL); String tokenStr = token.toString(); tokenStr = tokenStr.substring(1, tokenStr.length() - 1); { if (true) return new ValorString(tokenStr); } throw new Error("Missing return statement in function"); }
public final AttribValue AttribValue() throws ParseException { Token t; ASTStringNode quotedstr; Token firstToken = token; t = jj_consume_token(QUOTEDSTR); quotedstr = new ASTStringNode(t.toString(), new WToken(token)); { if (true) return new AttribValue(quotedstr, firstToken.next, token); } throw new Error("Missing return statement in function"); }
public final Cp Cp() throws ParseException { Token t; ASTStringNode name; Modifier modifier = null; SeqOrChoice seqOrChoice; Modifier modifier1 = null; Token firstToken = token; switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case PCDATA: jj_consume_token(PCDATA); { if (true) return new Cp1(firstToken.next, token); } break; case NAME: t = jj_consume_token(NAME); name = new ASTStringNode(t.toString(), new WToken(token)); switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case STAR: case PLUS: case Q: modifier = Modifier(); break; default: jj_la1[7] = jj_gen; ; } { if (true) return new Cp2(name, modifier, firstToken.next, token); } break; case OB: seqOrChoice = SeqOrChoice(); switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case STAR: case PLUS: case Q: modifier1 = Modifier(); break; default: jj_la1[8] = jj_gen; ; } { if (true) return new Cp3(seqOrChoice, modifier1, firstToken.next, token); } break; default: jj_la1[9] = jj_gen; jj_consume_token(-1); throw new ParseException(); } throw new Error("Missing return statement in function"); }
/** This function constructs the tree for debug purposes in a text view. */ public StringBuilder toString(StringBuilder prefix, boolean isTail, StringBuilder sb) { if (right != null) { right.toString( new StringBuilder().append(prefix).append(isTail ? "│ " : " "), false, sb); } sb.append(prefix).append(isTail ? "└── " : "┌── ").append(token.toString()).append("\n"); if (left != null) { left.toString(new StringBuilder().append(prefix).append(isTail ? " " : "│ "), true, sb); } return sb; }
public static void main(String args[]) { System.out.println("start."); Lexer l = new Lexer(); while (true) { try { Token t = l.scan(); if (t.tag == Tag.NUM) { System.out.println( "Token : " + t.toString() + " , Tag:" + t.tag + " , value:" + ((Num) t).value); } else if (t.tag == Tag.ID) { System.out.println( "Token : " + t.toString() + " , Tag:" + t.tag + " , lexeme:" + ((Word) t).lexeme); } else { System.out.println("Token : " + t.toString() + " , Tag:" + t.tag); } } catch (Exception e) { e.printStackTrace(); } } }
/** Writes the data to file */ public void writeToFile(String path) { try (DataOutputStream outputStream = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(path)))) { // First write all unique tokens Map<Token, Integer> tokenToId = new HashMap<>(); Set<Token> tokens = new HashSet<>(); for (NGram ngram : this.ngramModel.getNgrams().keySet()) { if (ngram.length() == 1) { tokens.add(ngram.at(0)); } } outputStream.writeInt(tokens.size()); int id = 0; for (Token token : tokens) { outputStream.writeUTF(token.toString()); tokenToId.put(token, id); id++; } // Then the Good-Turing parameters outputStream.writeInt(this.ngramModel.getGoodTuringEstimation().getTotal()); outputStream.writeDouble(this.ngramModel.getGoodTuringEstimation().getA()); outputStream.writeDouble(this.ngramModel.getGoodTuringEstimation().getB()); // Then the top ranked unigrams outputStream.writeInt(this.ngramModel.topUnigrams().size()); for (NGram ngram : this.ngramModel.topUnigrams()) { Token token = ngram.at(0); outputStream.writeInt(tokenToId.get(token)); } // Then the n-grams, where the token id points to the previous table outputStream.writeInt(this.ngramModel.getNgrams().size()); for (Map.Entry<NGram, Integer> current : this.ngramModel.getNgrams().entrySet()) { outputStream.writeInt(current.getKey().length()); for (int i = 0; i < current.getKey().length(); i++) { Token token = current.getKey().at(i); outputStream.writeInt(tokenToId.get(token)); } outputStream.writeInt(current.getValue()); } outputStream.flush(); } catch (Exception e) { e.printStackTrace(); } }
public final ElementDecl ElementDecl() throws ParseException { Token t; ASTStringNode name; ContentSpec contentSpec; Token firstToken = token; jj_consume_token(31); t = jj_consume_token(NAME); name = new ASTStringNode(t.toString(), new WToken(token)); contentSpec = ContentSpec(); jj_consume_token(PCB); { if (true) return new ElementDecl(name, contentSpec, firstToken.next, token); } throw new Error("Missing return statement in function"); }
public final AttribDef AttribDef() throws ParseException { Token t; ASTStringNode name; AttribType attribType; DefaultDecl defaultDecl; Token firstToken = token; t = jj_consume_token(NAME); name = new ASTStringNode(t.toString(), new WToken(token)); attribType = AttribType(); defaultDecl = DefaultDecl(); { if (true) return new AttribDef(name, attribType, defaultDecl, firstToken.next, token); } throw new Error("Missing return statement in function"); }
public static String parse( Lexer lexer, HashMap<Nonterminal, HashMap<Terminal, Rule>> parsingTable, Nonterminal startSymbol) { Stack<Token> stack = new Stack<Token>(); ArrayList<Terminal> inputs = lexer.getTokens(); int count = 0; stack.push(startSymbol); while (!stack.isEmpty() && count < inputs.size()) { Token top = stack.peek(); System.out.println( "Testing top: " + top.toString() + ", next input: " + inputs.get(count).toString()); if (top instanceof Nonterminal) { Nonterminal nextNonTerm = (Nonterminal) top; Rule rules = parsingTable.get(nextNonTerm).get(inputs.get(count)); if (rules == null) return "Invalid. No rule found for " + nextNonTerm.toString() + " and the input " + inputs.get(count); stack.pop(); for (int i = rules.rule.length - 1; i > -1; i--) { if (!rules.rule[i].equals(new Terminal(Terminal.TerminalType.EPSILON))) stack.push(rules.rule[i]); } } else if (top instanceof Terminal) { Terminal nextTerm = (Terminal) top; Terminal nextInput = inputs.get(count); if (nextTerm.equals(nextInput)) { ++count; stack.pop(); } else return nextTerm + " and " + nextInput + " do not match: invalid"; } System.out.println("Current stack: " + stack.toString()); } if (stack.isEmpty()) return "Valid"; return "Invalid. Ran out of tokens early or ran over input count"; }
protected void onActivityResult(int requestCode, int resultCode, Intent intent) { switch (requestCode) { case REQUEST_PICK_TOKEN: if (resultCode == RESULT_OK && intent != null) { Uri name = intent.getData(); try { // URI vs Uri -> Java/Android fail ... File f = new File(new URI(name.toString())); Token t = Token.fromFile(f); SharedPreferences p = PreferenceManager.getDefaultSharedPreferences(getBaseContext()); SharedPreferences.Editor e = p.edit(); e.putString("token", t.toString()); e.commit(); Toast.makeText(this, "Token successfully imported.", Toast.LENGTH_LONG).show(); } catch (Exception e) { e.printStackTrace(); } } break; default: Log.w(TAG, "Unknown activity " + intent.toString()); } }
public void writeDictionary(TokenStream tokenStream, String docId, int docSize) { count++; while (tokenStream.hasNext()) { Node node = new Node(); node.setDocId(docId); Token token = tokenStream.next(); String tokenText = token.toString(); int indexPos = token.getIndexPos(); node.setIndexPos(indexPos); node.setDocLength(docSize); if (!tokenText.isEmpty()) { node.setTokenText(tokenText); placeList.add(node); } } if (count == 20000) { count = 0; sortFiles(); placeList = new ArrayList<Node>(); initializeWriter(); } }
public IllegalTokenNameException(Token token) { super(token.toString()); }
/** * toString * * @return regresa una representacion en cadena de el nodo. */ public String toString() { return tok.toString(); }
public Token next() { currentToken = internalNext(); if (debug) System.out.println(currentToken == null ? "(null)" : currentToken.toString()); return currentToken; }