/** * Prints out tokens from a file or System.in. If no arguments are given, System.in will be used * for input. If more arguments are given, the first argument will be used as the name of the file * to use as input * * @param args program arguments, of which the first is a filename */ public static void main(String[] args) { InputStream in; try { if (args.length > 0) { File f = new File(args[0]); if (f.exists()) { if (f.canRead()) { in = new FileInputStream(f); } else { throw new IOException("Could not open " + args[0]); } } else { throw new IOException("Could not find " + args[0]); } } else { in = System.in; } PropertiesLexer shredder = new PropertiesLexer(in); Token t; while ((t = shredder.getNextToken()) != null) { if (t.getID() != PropertiesToken.WHITE_SPACE) { System.out.println(t); } } } catch (IOException e) { System.err.println(e.getMessage()); } }
/** * Chooses files that match the specified pattern. * * @param file file filter * @param content content filter * @param root root directory * @return sorted file paths * @throws InterruptedException interruption */ String[] filter(final String file, final String content, final IOFile root) throws InterruptedException { final long id = ++filterId; final TreeSet<String> results = new TreeSet<>(); final int[] search = new TokenParser(Token.lc(Token.token(content))).toArray(); // glob pattern final ProjectCache pc = cache(root); if (file.contains("*") || file.contains("?")) { final Pattern pt = Pattern.compile(IOFile.regex(file)); for (final String path : pc) { final int offset = offset(path, true); if (pt.matcher(path.substring(offset)).matches() && filterContent(path, search)) { results.add(path); if (results.size() >= MAXHITS) break; } if (id != filterId) throw new InterruptedException(); } } else { // starts-with, contains, camel case final String pttrn = file.toLowerCase(Locale.ENGLISH).replace('\\', '/'); final HashSet<String> exclude = new HashSet<>(); final boolean pathSearch = pttrn.indexOf('/') != -1; for (int i = 0; i < (pathSearch ? 2 : 3); i++) { filter(pttrn, search, i, results, exclude, pathSearch, pc, id); } } return results.toArray(new String[results.size()]); }
public final AttListDecl AttListDecl() throws ParseException { Token t; ASTStringNode name; AttribDef attribDef; ArrayList<AttribDef> attribDefList = new ArrayList<AttribDef>(); Token firstToken = token; jj_consume_token(34); t = jj_consume_token(NAME); name = new ASTStringNode(t.toString(), new WToken(token)); label_4: while (true) { switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case NAME:; break; default: jj_la1[11] = jj_gen; break label_4; } attribDef = AttribDef(); attribDefList.add(attribDef); } jj_consume_token(PCB); { if (true) return new AttListDecl(name, attribDefList, firstToken.next, token); } throw new Error("Missing return statement in function"); }
public final DTD DTD() throws ParseException { DTDEntry dTDEntry; ArrayList<DTDEntry> dTDEntryList = new ArrayList<DTDEntry>(); Token t; ASTStringNode eof; Token firstToken = token; label_1: while (true) { switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case 31: case 34:; break; default: jj_la1[0] = jj_gen; break label_1; } dTDEntry = DTDEntry(); dTDEntryList.add(dTDEntry); } t = jj_consume_token(0); eof = new ASTStringNode(t.toString(), new WToken(token)); { if (true) return new DTD(dTDEntryList, eof, firstToken.next, token); } throw new Error("Missing return statement in function"); }
/** * Gets the next token from a tokenizer and converts it to a string. * * @return The next token in the stream, as a string. * @throws TextParseException The input was invalid or not a string. * @throws IOException An I/O error occurred. */ public String getString() throws IOException { Token next = get(); if (!next.isString()) { throw exception("expected a string"); } return next.value; }
/** * Constructor, specifying the server host:port combination, login data and an output stream. * * @param host server name * @param port server port * @param user user name * @param pass password * @param output client output; if set to {@code null}, results will be returned as strings. * @throws IOException I/O exception */ public ClientSession( final String host, final int port, final String user, final String pass, final OutputStream output) throws IOException { super(output); ehost = host; socket = new Socket(); try { // limit timeout to five seconds socket.connect(new InetSocketAddress(host, port), 5000); } catch (final IllegalArgumentException ex) { throw new BaseXException(ex); } sin = socket.getInputStream(); // receive timestamp final BufferInput bi = new BufferInput(sin); final String ts = bi.readString(); // send user name and hashed password/timestamp sout = PrintOutput.get(socket.getOutputStream()); send(user); send(Token.md5(Token.md5(pass) + ts)); sout.flush(); // receive success flag if (!ok(bi)) throw new LoginException(); }
public static void main(String args[]) { if (args.length != 1) { System.err.println("Usage: java OCamlLexerTester <input.ml>"); System.exit(1); } OCamlLexer lexer = new OCamlLexer(); ArrayList<Token> tokens = new ArrayList<Token>(); try { FileReader reader = new FileReader(args[0]); lexer.tokenize(reader, tokens); for (Token t : tokens) System.out.println( t.getType() + " - " + t.getStart() + "..." + (t.getStart() + t.getLength())); } catch (IOException e) { System.err.println("IO Exception"); e.printStackTrace(); } }
public final Enumeration Enumeration() throws ParseException { Token t; ASTStringNode name; ASTStringNode name1; ArrayList<ASTStringNode> name1List = new ArrayList<ASTStringNode>(); Token firstToken = token; jj_consume_token(OB); t = jj_consume_token(NAME); name = new ASTStringNode(t.toString(), new WToken(token)); label_6: while (true) { switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case 32:; break; default: jj_la1[16] = jj_gen; break label_6; } jj_consume_token(32); t = jj_consume_token(NAME); name1 = new ASTStringNode(t.toString(), new WToken(token)); name1List.add(name1); } jj_consume_token(CB); { if (true) return new Enumeration(name, name1List, firstToken.next, token); } throw new Error("Missing return statement in function"); }
/** {@inheritDoc} */ @Override public boolean getShouldIndentNextLineAfter(Token t) { if (t != null && t.length() == 1) { char ch = t.charAt(0); return ch == '{' || ch == '('; } return false; }
public static void main(String[] args) throws Exception { ANTLRFileStream input = new ANTLRFileStream(args[0]); WigLexer lexer = new WigLexer(input); Token token; while ((token = lexer.nextToken()) != Token.EOF_TOKEN) { System.out.println("Token: " + token.getText() + "(" + token.getType() + ")"); } }
public final Token getToken(int index) { Token t = token; for (int i = 0; i < index; i++) { if (t.next != null) t = t.next; else t = t.next = token_source.getNextToken(); } return t; }
/** * next Token method that allows you to control if whitespace and comments are returned as tokens. */ public Token getNextToken(boolean returnComments, boolean returnWhiteSpace) throws IOException { Token t = getNextToken(); while (t != null && ((!returnWhiteSpace && t.isWhiteSpace()) || (!returnComments && t.isComment()))) { t = getNextToken(); } return (t); }
/** * Parses and returns a valid 'leftside' of an expression. If the left side starts with a prefix, * it consumes other expressions with a lower priority than itself. If the left side does not have * a prefix it must be an expr0. * * @param commaIsEndMarker used when the leftside is part of and argument list of expressions * @param maxPriority operators with a higher priority than this will effectivly end the * expression * @return a wrapper of: 1. term correctly structured and 2. the priority of its root operator * @throws InvalidTermException */ private IdentifiedTerm parseLeftSide(boolean commaIsEndMarker, int maxPriority) throws InvalidTermException, IOException { // 1. prefix expression Token f = tokenizer.readToken(); if (f.isOperator(commaIsEndMarker)) { int FX = opManager.opPrio(f.seq, "fx"); int FY = opManager.opPrio(f.seq, "fy"); if (f.seq.equals("-")) { Token t = tokenizer.readToken(); if (t.isNumber()) /*Michele Castagna 06/2011*/ // return new IdentifiedTerm(0, Parser.createNumber("-" + t.seq)); return identifyTerm(0, Parser.createNumber("-" + t.seq), tokenStart); /**/ else tokenizer.unreadToken(t); } // check that no operator has a priority higher than permitted if (FY > maxPriority) FY = -1; if (FX > maxPriority) FX = -1; // FX has priority over FY boolean haveAttemptedFX = false; if (FX >= FY && FX >= OperatorManager.OP_LOW) { IdentifiedTerm found = exprA(FX - 1, commaIsEndMarker); // op(fx, n) exprA(n - 1) if (found != null) /*Castagna 06/2011*/ // return new IdentifiedTerm(FX, new Struct(f.seq, found.result)); return identifyTerm(FX, new Struct(f.seq, found.result), tokenStart); /**/ else haveAttemptedFX = true; } // FY has priority over FX, or FX has failed if (FY >= OperatorManager.OP_LOW) { IdentifiedTerm found = exprA(FY, commaIsEndMarker); // op(fy,n) exprA(1200) or op(fy,n) exprA(n) if (found != null) /*Castagna 06/2011*/ // return new IdentifiedTerm(FY, new Struct(f.seq, found.result)); return identifyTerm(FY, new Struct(f.seq, found.result), tokenStart); /**/ } // FY has priority over FX, but FY failed if (!haveAttemptedFX && FX >= OperatorManager.OP_LOW) { IdentifiedTerm found = exprA(FX - 1, commaIsEndMarker); // op(fx, n) exprA(n - 1) if (found != null) /*Castagna 06/2011*/ // return new IdentifiedTerm(FX, new Struct(f.seq, found.result)); return identifyTerm(FX, new Struct(f.seq, found.result), tokenStart); /**/ } } tokenizer.unreadToken(f); // 2. expr0 return new IdentifiedTerm(0, expr0()); }
public final Cp Cp() throws ParseException { Token t; ASTStringNode name; Modifier modifier = null; SeqOrChoice seqOrChoice; Modifier modifier1 = null; Token firstToken = token; switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case PCDATA: jj_consume_token(PCDATA); { if (true) return new Cp1(firstToken.next, token); } break; case NAME: t = jj_consume_token(NAME); name = new ASTStringNode(t.toString(), new WToken(token)); switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case STAR: case PLUS: case Q: modifier = Modifier(); break; default: jj_la1[7] = jj_gen; ; } { if (true) return new Cp2(name, modifier, firstToken.next, token); } break; case OB: seqOrChoice = SeqOrChoice(); switch ((jj_ntk == -1) ? jj_ntk() : jj_ntk) { case STAR: case PLUS: case Q: modifier1 = Modifier(); break; default: jj_la1[8] = jj_gen; ; } { if (true) return new Cp3(seqOrChoice, modifier1, firstToken.next, token); } break; default: jj_la1[9] = jj_gen; jj_consume_token(-1); throw new ParseException(); } throw new Error("Missing return statement in function"); }
public final AttribValue AttribValue() throws ParseException { Token t; ASTStringNode quotedstr; Token firstToken = token; t = jj_consume_token(QUOTEDSTR); quotedstr = new ASTStringNode(t.toString(), new WToken(token)); { if (true) return new AttribValue(quotedstr, firstToken.next, token); } throw new Error("Missing return statement in function"); }
/** Provides a command line interface to the lexer */ public static void main(String[] args) throws IOException { // check for legal argument list Lexer in; if (args.length == 0) { in = new Lexer(); } else in = new Lexer(args[0]); do { Token t = in.readToken(); if (t == null) break; System.out.println("Token " + t + " in " + t.getClass()); } while (true); }
@Override int read(final TextInput ti) throws IOException { int ch = ti.readByte(); if (ch < 0x80) return ch; if (ch < 0xC0) return invalid(); cache[0] = (byte) ch; final int cl = Token.cl((byte) ch); for (int c = 1; c < cl; ++c) { ch = ti.readByte(); if (ch < 0x80) return invalid(); cache[c] = (byte) ch; } return Token.cp(cache, 0); }
public Instance pipe(Instance carrier) { TokenSequence ts = (TokenSequence) carrier.getData(); for (int i = 0; i < ts.size(); i++) { Token t = ts.get(i); String s = t.getText(); if (distinguishBorders) s = startBorderChar + s + endBorderChar; int slen = s.length(); for (int j = 0; j < gramSizes.length; j++) { int size = gramSizes[j]; for (int k = 0; k < (slen - size) + 1; k++) t.setFeatureValue((prefix + s.substring(k, k + size)).intern(), 1.0); } } return carrier; }
public final ElementDecl ElementDecl() throws ParseException { Token t; ASTStringNode name; ContentSpec contentSpec; Token firstToken = token; jj_consume_token(31); t = jj_consume_token(NAME); name = new ASTStringNode(t.toString(), new WToken(token)); contentSpec = ContentSpec(); jj_consume_token(PCB); { if (true) return new ElementDecl(name, contentSpec, firstToken.next, token); } throw new Error("Missing return statement in function"); }
public final AttribDef AttribDef() throws ParseException { Token t; ASTStringNode name; AttribType attribType; DefaultDecl defaultDecl; Token firstToken = token; t = jj_consume_token(NAME); name = new ASTStringNode(t.toString(), new WToken(token)); attribType = AttribType(); defaultDecl = DefaultDecl(); { if (true) return new AttribDef(name, attribType, defaultDecl, firstToken.next, token); } throw new Error("Missing return statement in function"); }
/** * Searches a string in a file. * * @param path file path * @param search codepoints of search string * @return success flag */ private static boolean filterContent(final String path, final int[] search) { final int cl = search.length; if (cl == 0) return true; try (final TextInput ti = new TextInput(new IOFile(path))) { final IntList il = new IntList(cl - 1); int c = 0; while (true) { if (!il.isEmpty()) { if (il.remove(0) == search[c++]) continue; c = 0; } while (true) { final int cp = ti.read(); if (cp == -1 || !XMLToken.valid(cp)) return false; final int lc = Token.lc(cp); if (c > 0) il.add(lc); if (lc == search[c]) { if (++c == cl) return true; } else { c = 0; break; } } } } catch (final IOException ex) { // file may not be accessible Util.debug(ex); return false; } }
public Instance pipe(Instance carrier) { TokenSequence ts = (TokenSequence) carrier.getData(); for (int i = 0; i < ts.size(); i++) { Token t = ts.get(i); String s = t.getText(); if (distinguishBorders) s = startBorderChar + s + endBorderChar; int slen = s.length(); for (int j = 0; j < gramSizes.length; j++) { int size = gramSizes[j]; for (int k = 0; k < slen - size; k++) t.setFeatureValue( s.substring(k, k + size), 1.0); // original was substring(k, size), changed by Fuchun } } return carrier; }
public void ReInit(ManifestParserTokenManager tm) { token_source = tm; token = new Token(); token.next = jj_nt = token_source.getNextToken(); jj_gen = 0; for (int i = 0; i < 4; i++) jj_la1[i] = -1; }
public void ReInit(CharStream stream) { token_source.ReInit(stream); token = new Token(); token.next = jj_nt = token_source.getNextToken(); jj_gen = 0; for (int i = 0; i < 4; i++) jj_la1[i] = -1; }
public ManifestParser(CharStream stream) { token_source = new ManifestParserTokenManager(stream); token = new Token(); token.next = jj_nt = token_source.getNextToken(); jj_gen = 0; for (int i = 0; i < 4; i++) jj_la1[i] = -1; }
/** * Test concurrent reader and writer (GH-458). * * <p><b>Test case:</b> * * <ol> * <li/>start a long running reader; * <li/>try to start a writer: it should time out; * <li/>stop the reader; * <li/>start the writer again: it should succeed. * </ol> * * @throws Exception error during request execution */ @Test @Ignore("There is no way to stop a query on the server!") public void testReaderWriter() throws Exception { final String readerQuery = "?query=(1%20to%20100000000000000)%5b.=1%5d"; final String writerQuery = "/test.xml"; final byte[] content = Token.token("<a/>"); final Get readerAction = new Get(readerQuery); final Put writerAction = new Put(writerQuery, content); final ExecutorService exec = Executors.newFixedThreadPool(2); // start reader exec.submit(readerAction); Performance.sleep(TIMEOUT); // delay in order to be sure that the reader has started // start writer Future<HTTPResponse> writer = exec.submit(writerAction); try { final HTTPResponse result = writer.get(TIMEOUT, TimeUnit.MILLISECONDS); if (result.status.isSuccess()) fail("Database modified while a reader is running"); throw new Exception(result.toString()); } catch (final TimeoutException e) { // writer is blocked by the reader: stop it writerAction.stop = true; } // stop reader readerAction.stop = true; // start the writer again writer = exec.submit(writerAction); assertEquals(HTTPCode.CREATED, writer.get().status); }
public final Token getNextToken() { if (token.next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; jj_gen++; return token; }
/** * Static service to get a term from its string representation, providing a specific operator * manager */ public static Term parseSingleTerm(String st, OperatorManager op) throws InvalidTermException { try { Parser p = new Parser(op, st); Token t = p.tokenizer.readToken(); if (t.isEOF()) throw new InvalidTermException("Term starts with EOF"); p.tokenizer.unreadToken(t); Term term = p.expr(false); if (term == null) throw new InvalidTermException("Term is null"); if (!p.tokenizer.readToken().isEOF()) throw new InvalidTermException("The entire string could not be read as one term"); term.resolveTerm(); return term; } catch (IOException ex) { throw new InvalidTermException("An I/O error occured"); } }
/** Reinitialise. */ public void ReInit(ISEParserTokenManager tm) { token_source = tm; token = new Token(); token.next = jj_nt = token_source.getNextToken(); jj_gen = 0; for (int i = 0; i < 3; i++) jj_la1[i] = -1; for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); }
/** * Test client with different user. * * @throws IOException I/O exception */ @Test public void user() throws IOException { run("-cexit", "-cdrop user " + NAME); equals( "5", new String[] {"-U" + NAME, "-P" + NAME, "-q5"}, new String[] {"-ccreate user " + NAME + ' ' + Token.md5(NAME)}); run("-cexit", "-cdrop user " + NAME); }