Esempio n. 1
0
 /**
  * Initializes the stream tokenizer.
  *
  * @param tokenizer the tokenizer to initialize
  */
 private void initTokenizer(StreamTokenizer tokenizer) {
   tokenizer.resetSyntax();
   tokenizer.whitespaceChars(0, (' ' - 1));
   tokenizer.wordChars(' ', '\u00FF');
   tokenizer.whitespaceChars(m_FieldSeparator.charAt(0), m_FieldSeparator.charAt(0));
   tokenizer.commentChar('%');
   tokenizer.quoteChar('"');
   tokenizer.quoteChar('\'');
   tokenizer.eolIsSignificant(true);
 }
 /** Creates a StreamTokenizer for reading ARFF files. */
 private StreamTokenizer createTokenizer(Reader in) {
   StreamTokenizer tokenizer = new StreamTokenizer(in);
   tokenizer.resetSyntax();
   tokenizer.whitespaceChars(0, ' ');
   tokenizer.wordChars(' ' + 1, '\u00FF');
   tokenizer.whitespaceChars(',', ',');
   tokenizer.commentChar('%');
   tokenizer.quoteChar('"');
   tokenizer.quoteChar('\'');
   tokenizer.ordinaryChar('{');
   tokenizer.ordinaryChar('}');
   tokenizer.eolIsSignificant(true);
   return tokenizer;
 }
Esempio n. 3
0
  private void setupTokenizer() {
    st.resetSyntax();
    st.wordChars('a', 'z');
    st.wordChars('A', 'Z');
    st.wordChars('0', '9');
    st.wordChars(':', ':');
    st.wordChars('.', '.');
    st.wordChars('_', '_');
    st.wordChars('-', '-');
    st.wordChars('/', '/');
    st.wordChars('\\', '\\');
    st.wordChars('$', '$');
    st.wordChars('{', '{'); // need {} for property subst
    st.wordChars('}', '}');
    st.wordChars('*', '*');
    st.wordChars('+', '+');
    st.wordChars('~', '~');
    // XXX check ASCII table and add all other characters except special

    // special: #="(),
    st.whitespaceChars(0, ' ');
    st.commentChar('#');
    st.eolIsSignificant(true);
    st.quoteChar('\"');
  }
Esempio n. 4
0
  public static String StreamTokenizer(StringReader reader) throws IOException {

    StringBuilder buffer = new StringBuilder();

    StreamTokenizer tokenizer = new StreamTokenizer(reader);
    tokenizer.lowerCaseMode(true);
    tokenizer.eolIsSignificant(false);
    tokenizer.whitespaceChars('.', '.');

    while (tokenizer.nextToken() != StreamTokenizer.TT_EOF) {
      switch (tokenizer.ttype) {
        case StreamTokenizer.TT_WORD:
          buffer.append(tokenizer.sval + " ");
          break;
        case StreamTokenizer.TT_NUMBER:
          buffer.append(tokenizer.nval + " ");
          break;
        case StreamTokenizer.TT_EOL:
          break;
        default:
          break;
      }
    }

    return buffer.toString();
  }
Esempio n. 5
0
  private static int[][] deserialize(String str) throws IOException {
    StreamTokenizer tok = new StreamTokenizer(new StringReader(str));
    tok.resetSyntax();
    tok.wordChars('0', '9');
    tok.whitespaceChars(' ', ' ');
    tok.parseNumbers();

    tok.nextToken();

    int rows = (int) tok.nval;
    int[][] out = new int[rows][];

    for (int i = 0; i < rows; i++) {
      tok.nextToken();

      int length = (int) tok.nval;
      int[] row = new int[length];
      out[i] = row;

      for (int j = 0; j < length; j++) {
        tok.nextToken();
        row[j] = (int) tok.nval;
      }
    }

    return out;
  }
Esempio n. 6
0
  /**
   * Initializes the stream tokenizer.
   *
   * @param tokenizer the tokenizer to initialize
   */
  private void initTokenizer(StreamTokenizer tokenizer) {
    tokenizer.resetSyntax();
    tokenizer.whitespaceChars(0, (' ' - 1));
    tokenizer.wordChars(' ', '\u00FF');
    tokenizer.whitespaceChars(m_FieldSeparator.charAt(0), m_FieldSeparator.charAt(0));
    // tokenizer.commentChar('%');

    String[] parts = m_Enclosures.split(",");
    for (String e : parts) {
      if (e.length() > 1 || e.length() == 0) {
        throw new IllegalArgumentException("Enclosures can only be single characters");
      }
      tokenizer.quoteChar(e.charAt(0));
    }

    tokenizer.eolIsSignificant(true);
  }
Esempio n. 7
0
 /**
  * This method sets the syntax of the StreamTokenizer. i.e. set the whitespace, comment and
  * delimit chars.
  */
 protected void setSyntax(StreamTokenizer tk) {
   tk.resetSyntax();
   tk.eolIsSignificant(false);
   tk.slashStarComments(true);
   tk.slashSlashComments(true);
   tk.whitespaceChars(0, ' ');
   tk.wordChars(' ' + 1, '\u00ff');
   tk.ordinaryChar('[');
   tk.ordinaryChar(']');
   tk.ordinaryChar('{');
   tk.ordinaryChar('}');
   tk.ordinaryChar('-');
   tk.ordinaryChar('>');
   tk.ordinaryChar('/');
   tk.ordinaryChar('*');
   tk.quoteChar('"');
   tk.whitespaceChars(';', ';');
   tk.ordinaryChar('=');
 }
Esempio n. 8
0
  /**
   * Configure the lexical analyzer.
   *
   * @param reader the input stream reader
   * @return an s-expression lexer
   */
  private StreamTokenizer createLexer(Reader reader) {
    StreamTokenizer tokenizer = new StreamTokenizer(reader);

    tokenizer.resetSyntax();
    tokenizer.eolIsSignificant(false);
    tokenizer.whitespaceChars(0, ' ');
    tokenizer.wordChars('!', '!');
    tokenizer.wordChars('*', 'z');

    return tokenizer;
  }
    /** Creates a new input stream for the given reader */
    public StringInputStream(Reader r) {
      st = new StreamTokenizer(r);

      st.resetSyntax();
      st.eolIsSignificant(false);

      // Parse numbers as words
      st.wordChars('0', '9');
      st.wordChars('-', '.');

      // Characters as words
      st.wordChars('\u0000', '\u00FF');

      // Skip everything after '%' on the same line
      st.commentChar('%');

      // Skip whitespace and newlines
      st.whitespaceChars(' ', ' ');
      st.whitespaceChars('\u0009', '\u000e');
    }
Esempio n. 10
0
  protected void initialize(Element arrayE) throws Exception {
    String arrayS = arrayE.getTagName();

    // get the type of the array
    if (arrayS.equals("Array")) {
      String type = arrayE.getAttribute("type");
      if (type.equals("int")) {
        m_type = ArrayType.INT;
      } else if (type.equals("real")) {
        m_type = ArrayType.REAL;
      } else if (type.equals("string")) {
        m_type = ArrayType.STRING;
      }
    } else {
      for (ArrayType a : ArrayType.values()) {
        if (a.toString().equals(arrayS)) {
          m_type = a;
          break;
        }
      }
    }
    // now read the values
    String contents = arrayE.getChildNodes().item(0).getNodeValue();
    StringReader sr = new StringReader(contents);
    StreamTokenizer st = new StreamTokenizer(sr);
    st.resetSyntax();
    st.whitespaceChars(0, ' ');
    st.wordChars(' ' + 1, '\u00FF');
    st.whitespaceChars(' ', ' ');
    st.quoteChar('"');
    st.quoteChar('\'');
    // m_Tokenizer.eolIsSignificant(true);

    st.nextToken();
    while (st.ttype != StreamTokenizer.TT_EOF && st.ttype != StreamTokenizer.TT_EOL) {
      m_values.add(st.sval);
      st.nextToken();
    }
  }
Esempio n. 11
0
 private static void loadCmdFile(String name, ListBuffer<String> args) throws IOException {
   Reader r = new BufferedReader(new FileReader(name));
   StreamTokenizer st = new StreamTokenizer(r);
   st.resetSyntax();
   st.wordChars(' ', 255);
   st.whitespaceChars(0, ' ');
   st.commentChar('#');
   st.quoteChar('"');
   st.quoteChar('\'');
   while (st.nextToken() != StreamTokenizer.TT_EOF) {
     args.append(st.sval);
   }
   r.close();
 }
  public CSVReader(BufferedReader input, char customizedSeparator) {
    this.separator = customizedSeparator;

    parser = new StreamTokenizer(input);
    parser.ordinaryChars(0, 255);
    parser.wordChars(0, 255);
    parser.ordinaryChar('\"');
    parser.ordinaryChar(customizedSeparator);

    // Need to do set EOL significance after setting ordinary and word
    // chars, and need to explicitly set \n and \r as whitespace chars
    // for EOL detection to work
    parser.eolIsSignificant(true);
    parser.whitespaceChars('\n', '\n');
    parser.whitespaceChars('\r', '\r');
    atEOF = false;
  }
Esempio n. 13
0
  /**
   * Read a matrix from a stream. The format is the same the print method, so printed matrices can
   * be read back in (provided they were printed using US Locale). Elements are separated by
   * whitespace, all the elements for each row appear on a single line, the last row is followed by
   * a blank line.
   *
   * @param input the input stream.
   */
  public static Matrix read(BufferedReader input) throws java.io.IOException {
    StreamTokenizer tokenizer = new StreamTokenizer(input);

    // Although StreamTokenizer will parse numbers, it doesn't recognize
    // scientific notation (E or D); however, Double.valueOf does.
    // The strategy here is to disable StreamTokenizer's number parsing.
    // We'll only get whitespace delimited words, EOL's and EOF's.
    // These words should all be numbers, for Double.valueOf to parse.

    tokenizer.resetSyntax();
    tokenizer.wordChars(0, 255);
    tokenizer.whitespaceChars(0, ' ');
    tokenizer.eolIsSignificant(true);
    java.util.Vector v = new java.util.Vector();

    // Ignore initial empty lines
    while (tokenizer.nextToken() == StreamTokenizer.TT_EOL) ;
    if (tokenizer.ttype == StreamTokenizer.TT_EOF)
      throw new java.io.IOException("Unexpected EOF on matrix read.");
    do {
      v.addElement(Double.valueOf(tokenizer.sval)); // Read & store 1st row.
    } while (tokenizer.nextToken() == StreamTokenizer.TT_WORD);

    int n = v.size(); // Now we've got the number of columns!
    double row[] = new double[n];
    for (int j = 0; j < n; j++) // extract the elements of the 1st row.
    row[j] = ((Double) v.elementAt(j)).doubleValue();
    v.removeAllElements();
    v.addElement(row); // Start storing rows instead of columns.
    while (tokenizer.nextToken() == StreamTokenizer.TT_WORD) {
      // While non-empty lines
      v.addElement(row = new double[n]);
      int j = 0;
      do {
        if (j >= n) throw new java.io.IOException("Row " + v.size() + " is too long.");
        row[j++] = Double.valueOf(tokenizer.sval).doubleValue();
      } while (tokenizer.nextToken() == StreamTokenizer.TT_WORD);
      if (j < n) throw new java.io.IOException("Row " + v.size() + " is too short.");
    }
    int m = v.size(); // Now we've got the number of rows.
    double[][] A = new double[m][];
    v.copyInto(A); // copy the rows out of the vector
    return new Matrix(A);
  }
Esempio n. 14
0
  public static Map<String, String> getStyles(String str) throws IOException {
    HashMap<String, String> styles = new HashMap<String, String>();
    if (str == null) return styles;

    StreamTokenizer tt = new StreamTokenizer(new StringReader(str));
    tt.resetSyntax();
    tt.wordChars('!', '9');
    tt.wordChars('<', '~');
    tt.wordChars(128 + 32, 255);
    tt.whitespaceChars(0, ' ');

    while (tt.nextToken() != StreamTokenizer.TT_EOF) {
      if (tt.ttype != ';') {
        String key, value;
        if (tt.ttype != StreamTokenizer.TT_WORD) {
          throw new IOException(
              "Key token expected in " + str + " " + Integer.toHexString(tt.ttype));
        }
        key = tt.sval;
        if (tt.nextToken() != ':') {
          throw new IOException("Colon expected after " + key + " in " + str);
        }
        if (tt.nextToken() != StreamTokenizer.TT_WORD) {
          throw new IOException(
              "Value token expected after " + key + " in " + str + " " + tt.ttype);
        }
        value = tt.sval;
        while (tt.nextToken() == StreamTokenizer.TT_WORD) {
          value += ' ' + tt.sval;
        }
        tt.pushBack();
        styles.put(key, value);
      }
    }

    return styles;
  }
Esempio n. 15
0
  StreamTokenizer createScanner(Reader r) {
    StreamTokenizer s = new StreamTokenizer(r);

    // disable number parsing, since it doesn't work in the context of string expansion
    // and we also would have to preserve the number type (int or double)
    s.ordinaryChars('0', '9');
    s.wordChars('0', '9');
    // s.wordChars('"', '"');

    // those are used to expand events
    s.wordChars('[', '[');
    s.wordChars(']', ']');
    s.wordChars('|', '|');
    s.wordChars('-', '-');
    s.wordChars('<', '<');
    s.wordChars('>', '>');

    // those can be part of component names
    s.wordChars('_', '_');
    s.wordChars('#', '#');
    s.wordChars('*', '*');
    s.wordChars('@', '@');
    s.wordChars('$', '$');
    s.wordChars(':', ':');
    s.wordChars('~', '~');

    s.quoteChar('"');

    s.slashSlashComments(true);
    s.slashStarComments(true);

    s.whitespaceChars(',', ',');
    s.whitespaceChars(';', ';');

    return s;
  }
Esempio n. 16
0
  /**
   * Returns the string array associated with a key, assuming it is defined. It is recommended to
   * check that it is defined first with {@link #hasValue(String)}.
   *
   * @throws RuntimeException if the key is not defined.
   * @see #hasValue(String)
   */
  public String[] getList(/*@KeyFor("this.map")*/ String key) {
    try {
      if (!hasValue(key)) {
        throw new RuntimeException(String.format("Key '%s' is not defined", key));
      }
      final String sValue = getValue(key);
      StreamTokenizer tok = new StreamTokenizer(new StringReader(sValue));
      tok.quoteChar('"');
      tok.whitespaceChars(' ', ' ');
      ArrayList<String> lValues = new ArrayList<String>();

      int tokInfo = tok.nextToken();
      while (tokInfo != StreamTokenizer.TT_EOF) {
        if (tok.ttype != '"') continue;
        assert tok.sval != null
            : "@AssumeAssertion(nullness)"; // tok.type == '"' guarantees not null
        lValues.add(tok.sval.trim());
        tokInfo = tok.nextToken();
      }
      return lValues.toArray(new String[] {});
    } catch (IOException ex) {
      throw new RuntimeException(String.format("Parsing for key '%s' failed", key), ex);
    }
  }
 /** @tests java.io.StreamTokenizer#whitespaceChars(int, int) */
 public void test_whitespaceCharsII() throws IOException {
   setTest("azbc iof z 893");
   st.whitespaceChars('a', 'z');
   assertTrue("OrdinaryChar failed.", st.nextToken() == StreamTokenizer.TT_NUMBER);
 }
Esempio n. 18
0
  public static List<BezierPath> fromPathData(String str) throws IOException {
    LinkedList<BezierPath> paths = new LinkedList<BezierPath>();

    BezierPath path = null;
    Point2D.Double p = new Point2D.Double();
    Point2D.Double c1 = new Point2D.Double();
    Point2D.Double c2 = new Point2D.Double();
    StreamTokenizer tt = new StreamTokenizer(new StringReader(str));
    tt.resetSyntax();
    tt.parseNumbers();
    tt.whitespaceChars(0, ' ');
    tt.whitespaceChars(',', ',');

    char nextCommand = 'M';
    char command = 'M';
    while (tt.nextToken() != StreamTokenizer.TT_EOF) {
      if (tt.ttype > 0) {
        command = (char) tt.ttype;
      } else {
        command = nextCommand;
        tt.pushBack();
      }

      BezierPath.Node node;
      switch (command) {
          // moveto
        case 'M':
          if (path != null) {
            paths.add(path);
          }
          path = new BezierPath();

          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y = tt.nval;
          path.moveTo(p.x, p.y);
          nextCommand = 'L';
          break;
        case 'm':
          if (path != null) {
            paths.add(path);
          }
          path = new BezierPath();

          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x += tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y += tt.nval;
          path.moveTo(p.x, p.y);
          nextCommand = 'l';

          // close path
          break;
        case 'Z':
        case 'z':
          p.x = path.get(0).x[0];
          p.y = path.get(0).y[0];
          path.setClosed(true);

          // lineto
          break;
        case 'L':
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y = tt.nval;
          path.lineTo(p.x, p.y);
          nextCommand = 'L';

          break;
        case 'l':
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x += tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y += tt.nval;
          path.lineTo(p.x, p.y);
          nextCommand = 'l';

          break;
        case 'H':
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x = tt.nval;
          path.lineTo(p.x, p.y);
          nextCommand = 'H';

          break;
        case 'h':
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x += tt.nval;
          path.lineTo(p.x, p.y);
          nextCommand = 'h';

          break;
        case 'V':
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y = tt.nval;
          path.lineTo(p.x, p.y);
          nextCommand = 'V';

          break;
        case 'v':
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y += tt.nval;
          path.lineTo(p.x, p.y);
          nextCommand = 'v';

          // curveto
          break;
        case 'C':
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c1.x = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c1.y = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c2.x = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c2.y = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y = tt.nval;
          path.curveTo(c1.x, c1.y, c2.x, c2.y, p.x, p.y);
          nextCommand = 'C';

          break;
        case 'c':
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c1.x = p.x + tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c1.y = p.y + tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c2.x = p.x + tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c2.y = p.y + tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x += tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y += tt.nval;
          path.curveTo(c1.x, c1.y, c2.x, c2.y, p.x, p.y);
          nextCommand = 'c';

          break;
        case 'S':
          node = path.get(path.size() - 1);
          c1.x = node.x[0] * 2d - node.x[1];
          c1.y = node.y[0] * 2d - node.y[1];
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c2.x = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c2.y = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y = tt.nval;
          path.curveTo(c1.x, c1.y, c2.x, c2.y, p.x, p.y);
          nextCommand = 'S';

          break;
        case 's':
          node = path.get(path.size() - 1);
          c1.x = node.x[0] * 2d - node.x[1];
          c1.y = node.y[0] * 2d - node.y[1];
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c2.x = p.x + tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c2.y = p.y + tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x += tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y += tt.nval;
          path.curveTo(c1.x, c1.y, c2.x, c2.y, p.x, p.y);
          nextCommand = 's';

          // quadto
          break;
        case 'Q':
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c1.x = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c1.y = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y = tt.nval;
          path.quadTo(c1.x, c1.y, p.x, p.y);
          nextCommand = 'Q';

          break;
        case 'q':
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c1.x = p.x + tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          c1.y = p.y + tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x += tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y += tt.nval;
          path.quadTo(c1.x, c1.y, p.x, p.y);
          nextCommand = 'q';

          break;
        case 'T':
          node = path.get(path.size() - 1);
          c1.x = node.x[0] * 2d - node.x[1];
          c1.y = node.y[0] * 2d - node.y[1];
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x = tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y = tt.nval;
          path.quadTo(c1.x, c1.y, p.x, p.y);
          nextCommand = 'T';

          break;
        case 't':
          node = path.get(path.size() - 1);
          c1.x = node.x[0] * 2d - node.x[1];
          c1.y = node.y[0] * 2d - node.y[1];
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.x += tt.nval;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) throw new IOException("Number expected");
          p.y += tt.nval;
          path.quadTo(c1.x, c1.y, p.x, p.y);
          nextCommand = 's';

          break;
        default:
          throw new IOException("Illegal command: " + command);
      }
    }
    if (path != null) {
      paths.add(path);
    }
    return paths;
  }
Esempio n. 19
0
  public static AffineTransform getTransform(String str) throws IOException {
    AffineTransform t = new AffineTransform();

    if (str != null) {

      StreamTokenizer tt = new StreamTokenizer(new StringReader(str));
      tt.resetSyntax();
      tt.wordChars('a', 'z');
      tt.wordChars('A', 'Z');
      tt.wordChars(128 + 32, 255);
      tt.whitespaceChars(0, ' ');
      tt.whitespaceChars(',', ',');
      tt.parseNumbers();

      while (tt.nextToken() != StreamTokenizer.TT_EOF) {
        if (tt.ttype != StreamTokenizer.TT_WORD) {
          throw new IOException("Illegal transform " + str);
        }
        String type = tt.sval;
        if (tt.nextToken() != '(') {
          throw new IOException("'(' not found in transform " + str);
        }
        if (type.equals("matrix")) {
          double[] m = new double[6];
          for (int i = 0; i < 6; i++) {
            if (tt.nextToken() != StreamTokenizer.TT_NUMBER) {
              throw new IOException(
                  "Matrix value "
                      + i
                      + " not found in transform "
                      + str
                      + " token:"
                      + tt.ttype
                      + " "
                      + tt.sval);
            }
            if (tt.nextToken() == StreamTokenizer.TT_WORD && tt.sval.startsWith("E")) {
              double mantissa = tt.nval;
              tt.nval = Double.valueOf(tt.nval + tt.sval);
            } else {
              tt.pushBack();
            }
            m[i] = tt.nval;
          }
          t.concatenate(new AffineTransform(m));

        } else if (type.equals("translate")) {
          double tx, ty;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) {
            throw new IOException("X-translation value not found in transform " + str);
          }
          tx = tt.nval;
          if (tt.nextToken() == StreamTokenizer.TT_NUMBER) {
            ty = tt.nval;
          } else {
            tt.pushBack();
            ty = 0;
          }
          t.translate(tx, ty);

        } else if (type.equals("scale")) {
          double sx, sy;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) {
            throw new IOException("X-scale value not found in transform " + str);
          }
          sx = tt.nval;
          if (tt.nextToken() == StreamTokenizer.TT_NUMBER) {
            sy = tt.nval;
          } else {
            tt.pushBack();
            sy = sx;
          }
          t.scale(sx, sy);

        } else if (type.equals("rotate")) {
          double angle, cx, cy;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) {
            throw new IOException("Angle value not found in transform " + str);
          }
          angle = tt.nval;
          if (tt.nextToken() == StreamTokenizer.TT_NUMBER) {
            cx = tt.nval;
            if (tt.nextToken() != StreamTokenizer.TT_NUMBER) {
              throw new IOException("Y-center value not found in transform " + str);
            }
            cy = tt.nval;
          } else {
            tt.pushBack();
            cx = cy = 0;
          }
          t.rotate(angle * Math.PI / 180d, cx * Math.PI / 180d, cy * Math.PI / 180d);

        } else if (type.equals("skewX")) {
          double angle;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) {
            throw new IOException("Skew angle not found in transform " + str);
          }
          angle = tt.nval;
          t.concatenate(new AffineTransform(1, 0, Math.tan(angle * Math.PI / 180), 1, 0, 0));

        } else if (type.equals("skewY")) {
          double angle;
          if (tt.nextToken() != StreamTokenizer.TT_NUMBER) {
            throw new IOException("Skew angle not found in transform " + str);
          }
          angle = tt.nval;
          t.concatenate(new AffineTransform(1, Math.tan(angle * Math.PI / 180), 0, 1, 0, 0));

        } else {
          throw new IOException("Unknown transform " + type + " in " + str);
        }
        if (tt.nextToken() != ')') {
          throw new IOException("')' not found in transform " + str);
        }
      }
    }
    return t;
  }
Esempio n. 20
0
  /**
   * Parses the given string as MIME type. The mediatype, mediasubtype and all parameter/value
   * combinations are extracted, comments are dropped.
   *
   * @param mimeType the string to parse
   * @throws IllegalArgumentException if not conformant.
   */
  private void parseMimeType(String mimeType) {
    int MEDIA = 1;
    int MEDIASUB = 2;
    int PARAM_NAME = 3;
    int PARAM_VALUE = 4;
    int COMMENT_START = 5;

    int state = 0;
    int lastState = 0; // keeps track of state before comment
    int tok;

    try {
      String paramName = null;
      StreamTokenizer in = new StreamTokenizer(new StringReader(mimeType));
      in.resetSyntax();
      // Allowed characters are anything except:
      // SPACE, CTLs (= Unicode characters U+0000 - U+001F and U+007F)
      // and tspecials ( ) < > @ , ; : \ " / [ ] ? =
      in.whitespaceChars(0x00, 0x20);
      in.whitespaceChars(0x7F, 0x7F);
      in.wordChars('A', 'Z');
      in.wordChars('a', 'z');
      in.wordChars('0', '9');
      in.wordChars(0xA0, 0xFF);
      in.wordChars(0x21, 0x21);
      in.wordChars(0x23, 0x27);
      in.wordChars(0x2A, 0x2B);
      in.wordChars(0x2D, 0x2E);
      in.wordChars(0x5E, 0x60);
      in.wordChars(0x7B, 0x7E);
      in.quoteChar('"');

      while ((tok = in.nextToken()) != StreamTokenizer.TT_EOF) {
        switch (tok) {
          case StreamTokenizer.TT_WORD:
            if (state == 0) {
              mediaType = in.sval.toLowerCase();
              state = MEDIA;
              break;
            }
            if (state == MEDIA) {
              mediaSubtype = in.sval.toLowerCase();
              state = MEDIASUB;
              break;
            }
            // begin of parameters is either after mediasub or a parameter value
            if (state == MEDIASUB || state == PARAM_VALUE) {
              paramName = in.sval.toLowerCase();
              state = PARAM_NAME;
              break;
            }
            // a parameter always needs to follow a value
            if (state == PARAM_NAME) {
              String paramValue = in.sval;
              // if a charset param the value needs to be stored lowercase
              if (paramName.equals("charset")) paramValue = paramValue.toLowerCase();

              state = PARAM_VALUE;
              params.put(paramName, paramValue);
              break;
            }
            if (state == COMMENT_START) {
              // ignore;
              break;
            }
            break;
          case '/':
            // may only occur after the mediatype
            if (state != MEDIA) throw new IllegalArgumentException();

            break;
          case '=':
            // may only occur after a parameter
            if (state != PARAM_NAME) throw new IllegalArgumentException();

            break;
          case ';':
            // differentiates mime type and parameters/value combinations
            if (state != MEDIASUB && state != PARAM_VALUE) throw new IllegalArgumentException();

            break;
          case '(': // begin comment
            lastState = state;
            state = COMMENT_START;
            break;
          case ')': // end comment
            state = lastState;
            break;
            // a parameter always needs to follow a value / or quoted value
          case '"':
            if (state == PARAM_NAME) {
              String paramValue = in.sval;
              // if a charset param the value needs to be stored lowercase
              if (paramName.equals("charset")) paramValue = paramValue.toLowerCase();

              state = PARAM_VALUE;
              params.put(paramName, paramValue);
              break;
            }

            // only values may be quoted
            throw new IllegalArgumentException();
          default:
            // if any other char is observed its not allowed
            throw new IllegalArgumentException();
        }
      }
    } catch (IOException e) {
      // should not happen as mimetype str cannot be null
      throw new InternalError("IOException during parsing String " + mimeType);
    }
  }
Esempio n. 21
0
  /**
   * Parse a policy file, incorporating the permission definitions described therein.
   *
   * @param url The URL of the policy file to read.
   * @throws IOException if an I/O error occurs, or if the policy file cannot be parsed.
   */
  private void parse(final URL url) throws IOException {
    logger.log(Component.POLICY, "reading policy file from {0}", url);
    final StreamTokenizer in = new StreamTokenizer(new InputStreamReader(url.openStream()));
    in.resetSyntax();
    in.slashSlashComments(true);
    in.slashStarComments(true);
    in.wordChars('A', 'Z');
    in.wordChars('a', 'z');
    in.wordChars('0', '9');
    in.wordChars('.', '.');
    in.wordChars('_', '_');
    in.wordChars('$', '$');
    in.whitespaceChars(' ', ' ');
    in.whitespaceChars('\t', '\t');
    in.whitespaceChars('\f', '\f');
    in.whitespaceChars('\n', '\n');
    in.whitespaceChars('\r', '\r');
    in.quoteChar('\'');
    in.quoteChar('"');

    int tok;
    int state = STATE_BEGIN;
    List keystores = new LinkedList();
    URL currentBase = null;
    List currentCerts = new LinkedList();
    Permissions currentPerms = new Permissions();
    while ((tok = in.nextToken()) != StreamTokenizer.TT_EOF) {
      switch (tok) {
        case '{':
          if (state != STATE_GRANT) error(url, in, "spurious '{'");
          state = STATE_PERMS;
          tok = in.nextToken();
          break;
        case '}':
          if (state != STATE_PERMS) error(url, in, "spurious '}'");
          state = STATE_BEGIN;
          currentPerms.setReadOnly();
          Certificate[] c = null;
          if (!currentCerts.isEmpty())
            c = (Certificate[]) currentCerts.toArray(new Certificate[currentCerts.size()]);
          cs2pc.put(new CodeSource(currentBase, c), currentPerms);
          currentCerts.clear();
          currentPerms = new Permissions();
          currentBase = null;
          tok = in.nextToken();
          if (tok != ';') in.pushBack();
          continue;
      }
      if (tok != StreamTokenizer.TT_WORD) {
        error(url, in, "expecting word token");
      }

      // keystore "<keystore-path>" [',' "<keystore-type>"] ';'
      if (in.sval.equalsIgnoreCase("keystore")) {
        String alg = KeyStore.getDefaultType();
        tok = in.nextToken();
        if (tok != '"' && tok != '\'') error(url, in, "expecting key store URL");
        String store = in.sval;
        tok = in.nextToken();
        if (tok == ',') {
          tok = in.nextToken();
          if (tok != '"' && tok != '\'') error(url, in, "expecting key store type");
          alg = in.sval;
          tok = in.nextToken();
        }
        if (tok != ';') error(url, in, "expecting semicolon");
        try {
          KeyStore keystore = KeyStore.getInstance(alg);
          keystore.load(new URL(url, store).openStream(), null);
          keystores.add(keystore);
        } catch (Exception x) {
          error(url, in, x.toString());
        }
      } else if (in.sval.equalsIgnoreCase("grant")) {
        if (state != STATE_BEGIN) error(url, in, "extraneous grant keyword");
        state = STATE_GRANT;
      } else if (in.sval.equalsIgnoreCase("signedBy")) {
        if (state != STATE_GRANT && state != STATE_PERMS) error(url, in, "spurious 'signedBy'");
        if (keystores.isEmpty()) error(url, in, "'signedBy' with no keystores");
        tok = in.nextToken();
        if (tok != '"' && tok != '\'') error(url, in, "expecting signedBy name");
        StringTokenizer st = new StringTokenizer(in.sval, ",");
        while (st.hasMoreTokens()) {
          String alias = st.nextToken();
          for (Iterator it = keystores.iterator(); it.hasNext(); ) {
            KeyStore keystore = (KeyStore) it.next();
            try {
              if (keystore.isCertificateEntry(alias))
                currentCerts.add(keystore.getCertificate(alias));
            } catch (KeyStoreException kse) {
              error(url, in, kse.toString());
            }
          }
        }
        tok = in.nextToken();
        if (tok != ',') {
          if (state != STATE_GRANT) error(url, in, "spurious ','");
          in.pushBack();
        }
      } else if (in.sval.equalsIgnoreCase("codeBase")) {
        if (state != STATE_GRANT) error(url, in, "spurious 'codeBase'");
        tok = in.nextToken();
        if (tok != '"' && tok != '\'') error(url, in, "expecting code base URL");
        String base = expand(in.sval);
        if (File.separatorChar != '/') base = base.replace(File.separatorChar, '/');
        try {
          currentBase = new URL(base);
        } catch (MalformedURLException mue) {
          error(url, in, mue.toString());
        }
        tok = in.nextToken();
        if (tok != ',') in.pushBack();
      } else if (in.sval.equalsIgnoreCase("principal")) {
        if (state != STATE_GRANT) error(url, in, "spurious 'principal'");
        tok = in.nextToken();
        if (tok == StreamTokenizer.TT_WORD) {
          tok = in.nextToken();
          if (tok != '"' && tok != '\'') error(url, in, "expecting principal name");
          String name = in.sval;
          Principal p = null;
          try {
            Class pclass = Class.forName(in.sval);
            Constructor c = pclass.getConstructor(new Class[] {String.class});
            p = (Principal) c.newInstance(new Object[] {name});
          } catch (Exception x) {
            error(url, in, x.toString());
          }
          for (Iterator it = keystores.iterator(); it.hasNext(); ) {
            KeyStore ks = (KeyStore) it.next();
            try {
              for (Enumeration e = ks.aliases(); e.hasMoreElements(); ) {
                String alias = (String) e.nextElement();
                if (ks.isCertificateEntry(alias)) {
                  Certificate cert = ks.getCertificate(alias);
                  if (!(cert instanceof X509Certificate)) continue;
                  if (p.equals(((X509Certificate) cert).getSubjectDN())
                      || p.equals(((X509Certificate) cert).getSubjectX500Principal()))
                    currentCerts.add(cert);
                }
              }
            } catch (KeyStoreException kse) {
              error(url, in, kse.toString());
            }
          }
        } else if (tok == '"' || tok == '\'') {
          String alias = in.sval;
          for (Iterator it = keystores.iterator(); it.hasNext(); ) {
            KeyStore ks = (KeyStore) it.next();
            try {
              if (ks.isCertificateEntry(alias)) currentCerts.add(ks.getCertificate(alias));
            } catch (KeyStoreException kse) {
              error(url, in, kse.toString());
            }
          }
        } else error(url, in, "expecting principal");
        tok = in.nextToken();
        if (tok != ',') in.pushBack();
      } else if (in.sval.equalsIgnoreCase("permission")) {
        if (state != STATE_PERMS) error(url, in, "spurious 'permission'");
        tok = in.nextToken();
        if (tok != StreamTokenizer.TT_WORD) error(url, in, "expecting permission class name");
        String className = in.sval;
        Class clazz = null;
        try {
          clazz = Class.forName(className);
        } catch (ClassNotFoundException cnfe) {
        }
        tok = in.nextToken();
        if (tok == ';') {
          if (clazz == null) {
            currentPerms.add(
                new UnresolvedPermission(
                    className,
                    null,
                    null,
                    (Certificate[]) currentCerts.toArray(new Certificate[currentCerts.size()])));
            continue;
          }
          try {
            currentPerms.add((Permission) clazz.newInstance());
          } catch (Exception x) {
            error(url, in, x.toString());
          }
          continue;
        }
        if (tok != '"' && tok != '\'') error(url, in, "expecting permission target");
        String target = expand(in.sval);
        tok = in.nextToken();
        if (tok == ';') {
          if (clazz == null) {
            currentPerms.add(
                new UnresolvedPermission(
                    className,
                    target,
                    null,
                    (Certificate[]) currentCerts.toArray(new Certificate[currentCerts.size()])));
            continue;
          }
          try {
            Constructor c = clazz.getConstructor(new Class[] {String.class});
            currentPerms.add((Permission) c.newInstance(new Object[] {target}));
          } catch (Exception x) {
            error(url, in, x.toString());
          }
          continue;
        }
        if (tok != ',') error(url, in, "expecting ','");
        tok = in.nextToken();
        if (tok == StreamTokenizer.TT_WORD) {
          if (!in.sval.equalsIgnoreCase("signedBy")) error(url, in, "expecting 'signedBy'");
          try {
            Constructor c = clazz.getConstructor(new Class[] {String.class});
            currentPerms.add((Permission) c.newInstance(new Object[] {target}));
          } catch (Exception x) {
            error(url, in, x.toString());
          }
          in.pushBack();
          continue;
        }
        if (tok != '"' && tok != '\'') error(url, in, "expecting permission action");
        String action = in.sval;
        if (clazz == null) {
          currentPerms.add(
              new UnresolvedPermission(
                  className,
                  target,
                  action,
                  (Certificate[]) currentCerts.toArray(new Certificate[currentCerts.size()])));
          continue;
        } else {
          try {
            Constructor c = clazz.getConstructor(new Class[] {String.class, String.class});
            currentPerms.add((Permission) c.newInstance(new Object[] {target, action}));
          } catch (Exception x) {
            error(url, in, x.toString());
          }
        }
        tok = in.nextToken();
        if (tok != ';' && tok != ',') error(url, in, "expecting ';' or ','");
      }
    }
  }
Esempio n. 22
0
  /**
   * Return an interned VarInfoAux that represents a given string. Elements are separated by commas,
   * in the form:
   *
   * <p>x = a, "a key" = "a value"
   *
   * <p>Parse allow for quoted elements. White space to the left and right of keys and values do not
   * matter, but inbetween does.
   */
  public static /*@Interned*/ VarInfoAux parse(String inString) throws IOException {
    Reader inStringReader = new StringReader(inString);
    StreamTokenizer tok = new StreamTokenizer(inStringReader);
    tok.resetSyntax();
    tok.wordChars(0, Integer.MAX_VALUE);
    tok.quoteChar('\"');
    tok.whitespaceChars(' ', ' ');
    tok.ordinaryChar('[');
    tok.ordinaryChar(']');
    tok.ordinaryChars(',', ',');
    tok.ordinaryChars('=', '=');
    Map</*@Interned*/ String, /*@Interned*/ String> map = theDefault.map;

    String key = "";
    String value = "";
    boolean seenEqual = false;
    boolean insideVector = false;
    for (int tokInfo = tok.nextToken();
        tokInfo != StreamTokenizer.TT_EOF;
        tokInfo = tok.nextToken()) {
      @SuppressWarnings("interning") // initialization-checking pattern
      boolean mapUnchanged = (map == theDefault.map);
      if (mapUnchanged) {
        // We use default values if none are specified.  We initialize
        // here rather than above to save time when there are no tokens.

        map = new HashMap</*@Interned*/ String, /*@Interned*/ String>(theDefault.map);
      }

      /*@Interned*/ String token;
      if (tok.ttype == StreamTokenizer.TT_WORD || tok.ttype == '\"') {
        assert tok.sval != null
            : "@AssumeAssertion(nullness): representation invariant of StreamTokenizer";
        token = tok.sval.trim().intern();
      } else {
        token = ((char) tok.ttype + "").intern();
      }

      debug.fine("Token info: " + tokInfo + " " + token);

      if (token == "[") { // interned
        if (!seenEqual) throw new IOException("Aux option did not contain an '='");
        if (insideVector) throw new IOException("Vectors cannot be nested in an aux option");
        if (value.length() > 0) throw new IOException("Cannot mix scalar and vector values");

        insideVector = true;
        value = "";
      } else if (token == "]") { // interned
        if (!insideVector) throw new IOException("']' without preceding '['");
        insideVector = false;
      } else if (token == ",") { // interned
        if (!seenEqual) throw new IOException("Aux option did not contain an '='");
        if (insideVector) throw new IOException("',' cannot be used inside a vector");
        map.put(key.intern(), value.intern());
        key = "";
        value = "";
        seenEqual = false;
      } else if (token == "=") { // interned
        if (seenEqual) throw new IOException("Aux option contained more than one '='");
        if (insideVector) throw new IOException("'=' cannot be used inside a vector");
        seenEqual = true;
      } else {
        if (!seenEqual) {
          key = (key + " " + token).trim();
        } else if (insideVector) {
          value = value + " \"" + token.trim() + "\"";
        } else {
          value = (value + " " + token).trim();
        }
      }
    }

    if (seenEqual) {
      map.put(key.intern(), value.intern());
    }

    // Interning
    VarInfoAux result = new VarInfoAux(map).intern();
    assert interningMap != null
        : "@AssumeAssertion(nullness):  application invariant:  postcondition of intern(), which was just called";
    if (debug.isLoggable(Level.FINE)) {
      debug.fine("New parse " + result);
      debug.fine("Intern table size: " + new Integer(interningMap.size()));
    }
    return result;
  }