public void loadTree() { System.out.println("Loading tree"); StreamTokenizer stream = null; try { FileInputStream f = new FileInputStream(tree); Reader input = new BufferedReader(new InputStreamReader(f)); stream = new StreamTokenizer(input); stream.resetSyntax(); stream.wordChars(32, 127); } catch (Exception e) { System.out.println("Error opening " + tree); System.exit(1); } list = new ArrayList(); try { // read the file to the end while (stream.nextToken() != StreamTokenizer.TT_EOF) { // is a word being read if (stream.ttype == StreamTokenizer.TT_WORD) { list.add(new String(stream.sval)); } // is a number being read if (stream.ttype == StreamTokenizer.TT_NUMBER) { list.add(new Double(stream.nval)); } } } catch (Exception e) { System.out.println("\nError reading " + tree + ". Exiting..."); System.exit(1); } }
public String[] parseTokens(String line) throws IOException { List tokens = new ArrayList(); /*StringTokenizer st = new StringTokenizer(line); String token; while((token = st.nextToken()) != null) { tokens.add(token); } */ StreamTokenizer st = new StreamTokenizer(new StringReader(line)); st.parseNumbers(); st.wordChars('_', '_'); // A word can be THIS_IS_A_WORD int token = st.nextToken(); while (token != StreamTokenizer.TT_EOF) { String element = null; switch (token) { case StreamTokenizer.TT_NUMBER: element = String.valueOf(st.nval); break; case StreamTokenizer.TT_WORD: element = st.sval; break; case '"': case '\'': element = st.sval; break; case StreamTokenizer.TT_EOL: break; case StreamTokenizer.TT_EOF: break; default: element = String.valueOf((char) st.ttype); break; } if (element != null) tokens.add(element); token = st.nextToken(); } String[] result = new String[tokens.size()]; for (int index = 0; index < tokens.size(); index++) result[index] = (String) tokens.get(index); return result; }