private void setupTokenizer() { st.resetSyntax(); st.wordChars('a', 'z'); st.wordChars('A', 'Z'); st.wordChars('0', '9'); st.wordChars(':', ':'); st.wordChars('.', '.'); st.wordChars('_', '_'); st.wordChars('-', '-'); st.wordChars('/', '/'); st.wordChars('\\', '\\'); st.wordChars('$', '$'); st.wordChars('{', '{'); // need {} for property subst st.wordChars('}', '}'); st.wordChars('*', '*'); st.wordChars('+', '+'); st.wordChars('~', '~'); // XXX check ASCII table and add all other characters except special // special: #="(), st.whitespaceChars(0, ' '); st.commentChar('#'); st.eolIsSignificant(true); st.quoteChar('\"'); }
/** * Configure the lexical analyzer. * * @param reader the input stream reader * @return an s-expression lexer */ private StreamTokenizer createLexer(Reader reader) { StreamTokenizer tokenizer = new StreamTokenizer(reader); tokenizer.resetSyntax(); tokenizer.eolIsSignificant(false); tokenizer.whitespaceChars(0, ' '); tokenizer.wordChars('!', '!'); tokenizer.wordChars('*', 'z'); return tokenizer; }
public CSVReader(BufferedReader input, char customizedSeparator) { this.separator = customizedSeparator; parser = new StreamTokenizer(input); parser.ordinaryChars(0, 255); parser.wordChars(0, 255); parser.ordinaryChar('\"'); parser.ordinaryChar(customizedSeparator); // Need to do set EOL significance after setting ordinary and word // chars, and need to explicitly set \n and \r as whitespace chars // for EOL detection to work parser.eolIsSignificant(true); parser.whitespaceChars('\n', '\n'); parser.whitespaceChars('\r', '\r'); atEOF = false; }
public static Map<String, String> getStyles(String str) throws IOException { HashMap<String, String> styles = new HashMap<String, String>(); if (str == null) return styles; StreamTokenizer tt = new StreamTokenizer(new StringReader(str)); tt.resetSyntax(); tt.wordChars('!', '9'); tt.wordChars('<', '~'); tt.wordChars(128 + 32, 255); tt.whitespaceChars(0, ' '); while (tt.nextToken() != StreamTokenizer.TT_EOF) { if (tt.ttype != ';') { String key, value; if (tt.ttype != StreamTokenizer.TT_WORD) { throw new IOException( "Key token expected in " + str + " " + Integer.toHexString(tt.ttype)); } key = tt.sval; if (tt.nextToken() != ':') { throw new IOException("Colon expected after " + key + " in " + str); } if (tt.nextToken() != StreamTokenizer.TT_WORD) { throw new IOException( "Value token expected after " + key + " in " + str + " " + tt.ttype); } value = tt.sval; while (tt.nextToken() == StreamTokenizer.TT_WORD) { value += ' ' + tt.sval; } tt.pushBack(); styles.put(key, value); } } return styles; }
/** * Returns the string array associated with a key, assuming it is defined. It is recommended to * check that it is defined first with {@link #hasValue(String)}. * * @throws RuntimeException if the key is not defined. * @see #hasValue(String) */ public String[] getList(/*@KeyFor("this.map")*/ String key) { try { if (!hasValue(key)) { throw new RuntimeException(String.format("Key '%s' is not defined", key)); } final String sValue = getValue(key); StreamTokenizer tok = new StreamTokenizer(new StringReader(sValue)); tok.quoteChar('"'); tok.whitespaceChars(' ', ' '); ArrayList<String> lValues = new ArrayList<String>(); int tokInfo = tok.nextToken(); while (tokInfo != StreamTokenizer.TT_EOF) { if (tok.ttype != '"') continue; assert tok.sval != null : "@AssumeAssertion(nullness)"; // tok.type == '"' guarantees not null lValues.add(tok.sval.trim()); tokInfo = tok.nextToken(); } return lValues.toArray(new String[] {}); } catch (IOException ex) { throw new RuntimeException(String.format("Parsing for key '%s' failed", key), ex); } }
public static List<BezierPath> fromPathData(String str) throws IOException { LinkedList<BezierPath> paths = new LinkedList<BezierPath>(); BezierPath path = null; Point2D.Double p = new Point2D.Double(); Point2D.Double c1 = new Point2D.Double(); Point2D.Double c2 = new Point2D.Double(); StreamTokenizer tt = new StreamTokenizer(new StringReader(str)); tt.resetSyntax(); tt.parseNumbers(); tt.whitespaceChars(0, ' '); tt.whitespaceChars(',', ','); char nextCommand = 'M'; char command = 'M'; while (tt.nextToken() != StreamTokenizer.TT_EOF) { if (tt.ttype > 0) { command = (char) tt.ttype; } else { command = nextCommand; tt.pushBack(); } BezierPath.Node node; switch (command) { // moveto case 'M': if (path != null) { paths.add(path); } path = new BezierPath(); if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y = tt.nval; path.moveTo(p.x, p.y); nextCommand = 'L'; break; case 'm': if (path != null) { paths.add(path); } path = new BezierPath(); if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x += tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y += tt.nval; path.moveTo(p.x, p.y); nextCommand = 'l'; // close path break; case 'Z': case 'z': p.x = path.get(0).x[0]; p.y = path.get(0).y[0]; path.setClosed(true); // lineto break; case 'L': if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y = tt.nval; path.lineTo(p.x, p.y); nextCommand = 'L'; break; case 'l': if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x += tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y += tt.nval; path.lineTo(p.x, p.y); nextCommand = 'l'; break; case 'H': if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x = tt.nval; path.lineTo(p.x, p.y); nextCommand = 'H'; break; case 'h': if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x += tt.nval; path.lineTo(p.x, p.y); nextCommand = 'h'; break; case 'V': if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y = tt.nval; path.lineTo(p.x, p.y); nextCommand = 'V'; break; case 'v': if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y += tt.nval; path.lineTo(p.x, p.y); nextCommand = 'v'; // curveto break; case 'C': if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c1.x = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c1.y = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c2.x = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c2.y = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y = tt.nval; path.curveTo(c1.x, c1.y, c2.x, c2.y, p.x, p.y); nextCommand = 'C'; break; case 'c': if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c1.x = p.x + tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c1.y = p.y + tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c2.x = p.x + tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c2.y = p.y + tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x += tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y += tt.nval; path.curveTo(c1.x, c1.y, c2.x, c2.y, p.x, p.y); nextCommand = 'c'; break; case 'S': node = path.get(path.size() - 1); c1.x = node.x[0] * 2d - node.x[1]; c1.y = node.y[0] * 2d - node.y[1]; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c2.x = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c2.y = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y = tt.nval; path.curveTo(c1.x, c1.y, c2.x, c2.y, p.x, p.y); nextCommand = 'S'; break; case 's': node = path.get(path.size() - 1); c1.x = node.x[0] * 2d - node.x[1]; c1.y = node.y[0] * 2d - node.y[1]; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c2.x = p.x + tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c2.y = p.y + tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x += tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y += tt.nval; path.curveTo(c1.x, c1.y, c2.x, c2.y, p.x, p.y); nextCommand = 's'; // quadto break; case 'Q': if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c1.x = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c1.y = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y = tt.nval; path.quadTo(c1.x, c1.y, p.x, p.y); nextCommand = 'Q'; break; case 'q': if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c1.x = p.x + tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); c1.y = p.y + tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x += tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y += tt.nval; path.quadTo(c1.x, c1.y, p.x, p.y); nextCommand = 'q'; break; case 'T': node = path.get(path.size() - 1); c1.x = node.x[0] * 2d - node.x[1]; c1.y = node.y[0] * 2d - node.y[1]; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y = tt.nval; path.quadTo(c1.x, c1.y, p.x, p.y); nextCommand = 'T'; break; case 't': node = path.get(path.size() - 1); c1.x = node.x[0] * 2d - node.x[1]; c1.y = node.y[0] * 2d - node.y[1]; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.x += tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected"); p.y += tt.nval; path.quadTo(c1.x, c1.y, p.x, p.y); nextCommand = 's'; break; default: throw new IOException("Illegal command: " + command); } } if (path != null) { paths.add(path); } return paths; }
public static AffineTransform getTransform(String str) throws IOException { AffineTransform t = new AffineTransform(); if (str != null) { StreamTokenizer tt = new StreamTokenizer(new StringReader(str)); tt.resetSyntax(); tt.wordChars('a', 'z'); tt.wordChars('A', 'Z'); tt.wordChars(128 + 32, 255); tt.whitespaceChars(0, ' '); tt.whitespaceChars(',', ','); tt.parseNumbers(); while (tt.nextToken() != StreamTokenizer.TT_EOF) { if (tt.ttype != StreamTokenizer.TT_WORD) { throw new IOException("Illegal transform " + str); } String type = tt.sval; if (tt.nextToken() != '(') { throw new IOException("'(' not found in transform " + str); } if (type.equals("matrix")) { double[] m = new double[6]; for (int i = 0; i < 6; i++) { if (tt.nextToken() != StreamTokenizer.TT_NUMBER) { throw new IOException( "Matrix value " + i + " not found in transform " + str + " token:" + tt.ttype + " " + tt.sval); } if (tt.nextToken() == StreamTokenizer.TT_WORD && tt.sval.startsWith("E")) { double mantissa = tt.nval; tt.nval = Double.valueOf(tt.nval + tt.sval); } else { tt.pushBack(); } m[i] = tt.nval; } t.concatenate(new AffineTransform(m)); } else if (type.equals("translate")) { double tx, ty; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) { throw new IOException("X-translation value not found in transform " + str); } tx = tt.nval; if (tt.nextToken() == StreamTokenizer.TT_NUMBER) { ty = tt.nval; } else { tt.pushBack(); ty = 0; } t.translate(tx, ty); } else if (type.equals("scale")) { double sx, sy; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) { throw new IOException("X-scale value not found in transform " + str); } sx = tt.nval; if (tt.nextToken() == StreamTokenizer.TT_NUMBER) { sy = tt.nval; } else { tt.pushBack(); sy = sx; } t.scale(sx, sy); } else if (type.equals("rotate")) { double angle, cx, cy; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) { throw new IOException("Angle value not found in transform " + str); } angle = tt.nval; if (tt.nextToken() == StreamTokenizer.TT_NUMBER) { cx = tt.nval; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) { throw new IOException("Y-center value not found in transform " + str); } cy = tt.nval; } else { tt.pushBack(); cx = cy = 0; } t.rotate(angle * Math.PI / 180d, cx * Math.PI / 180d, cy * Math.PI / 180d); } else if (type.equals("skewX")) { double angle; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) { throw new IOException("Skew angle not found in transform " + str); } angle = tt.nval; t.concatenate(new AffineTransform(1, 0, Math.tan(angle * Math.PI / 180), 1, 0, 0)); } else if (type.equals("skewY")) { double angle; if (tt.nextToken() != StreamTokenizer.TT_NUMBER) { throw new IOException("Skew angle not found in transform " + str); } angle = tt.nval; t.concatenate(new AffineTransform(1, Math.tan(angle * Math.PI / 180), 0, 1, 0, 0)); } else { throw new IOException("Unknown transform " + type + " in " + str); } if (tt.nextToken() != ')') { throw new IOException("')' not found in transform " + str); } } } return t; }
/** * Return an interned VarInfoAux that represents a given string. Elements are separated by commas, * in the form: * * <p>x = a, "a key" = "a value" * * <p>Parse allow for quoted elements. White space to the left and right of keys and values do not * matter, but inbetween does. */ public static /*@Interned*/ VarInfoAux parse(String inString) throws IOException { Reader inStringReader = new StringReader(inString); StreamTokenizer tok = new StreamTokenizer(inStringReader); tok.resetSyntax(); tok.wordChars(0, Integer.MAX_VALUE); tok.quoteChar('\"'); tok.whitespaceChars(' ', ' '); tok.ordinaryChar('['); tok.ordinaryChar(']'); tok.ordinaryChars(',', ','); tok.ordinaryChars('=', '='); Map</*@Interned*/ String, /*@Interned*/ String> map = theDefault.map; String key = ""; String value = ""; boolean seenEqual = false; boolean insideVector = false; for (int tokInfo = tok.nextToken(); tokInfo != StreamTokenizer.TT_EOF; tokInfo = tok.nextToken()) { @SuppressWarnings("interning") // initialization-checking pattern boolean mapUnchanged = (map == theDefault.map); if (mapUnchanged) { // We use default values if none are specified. We initialize // here rather than above to save time when there are no tokens. map = new HashMap</*@Interned*/ String, /*@Interned*/ String>(theDefault.map); } /*@Interned*/ String token; if (tok.ttype == StreamTokenizer.TT_WORD || tok.ttype == '\"') { assert tok.sval != null : "@AssumeAssertion(nullness): representation invariant of StreamTokenizer"; token = tok.sval.trim().intern(); } else { token = ((char) tok.ttype + "").intern(); } debug.fine("Token info: " + tokInfo + " " + token); if (token == "[") { // interned if (!seenEqual) throw new IOException("Aux option did not contain an '='"); if (insideVector) throw new IOException("Vectors cannot be nested in an aux option"); if (value.length() > 0) throw new IOException("Cannot mix scalar and vector values"); insideVector = true; value = ""; } else if (token == "]") { // interned if (!insideVector) throw new IOException("']' without preceding '['"); insideVector = false; } else if (token == ",") { // interned if (!seenEqual) throw new IOException("Aux option did not contain an '='"); if (insideVector) throw new IOException("',' cannot be used inside a vector"); map.put(key.intern(), value.intern()); key = ""; value = ""; seenEqual = false; } else if (token == "=") { // interned if (seenEqual) throw new IOException("Aux option contained more than one '='"); if (insideVector) throw new IOException("'=' cannot be used inside a vector"); seenEqual = true; } else { if (!seenEqual) { key = (key + " " + token).trim(); } else if (insideVector) { value = value + " \"" + token.trim() + "\""; } else { value = (value + " " + token).trim(); } } } if (seenEqual) { map.put(key.intern(), value.intern()); } // Interning VarInfoAux result = new VarInfoAux(map).intern(); assert interningMap != null : "@AssumeAssertion(nullness): application invariant: postcondition of intern(), which was just called"; if (debug.isLoggable(Level.FINE)) { debug.fine("New parse " + result); debug.fine("Intern table size: " + new Integer(interningMap.size())); } return result; }