Example #1
0
 /**
  * Advances to the next newline or the end of input. The newline will not be consumed. If the
  * current token is a newline no action is performed. Returns the end offset of the last token
  * before the newline.
  *
  * @param origin parameter for the {@link OffsetLimitReachedException} when it has to be thrown.
  * @since 5.0
  */
 public final int consumeLine(int origin) throws OffsetLimitReachedException {
   Token t = fToken;
   Token lt = null;
   while (true) {
     switch (t.getType()) {
       case IToken.tCOMPLETION:
         if (lt != null) {
           fLastToken = lt;
         }
         fToken = t;
         throw new OffsetLimitReachedException(origin, t);
       case IToken.tEND_OF_INPUT:
         if (fSupportContentAssist) {
           t.setType(IToken.tCOMPLETION);
           throw new OffsetLimitReachedException(origin, t);
         }
         // $FALL-THROUGH$
       case Lexer.tNEWLINE:
         fToken = t;
         if (lt != null) {
           fLastToken = lt;
         }
         return getLastEndOffset();
     }
     lt = t;
     t = fetchToken();
   }
 }
  /**
   * Handles the case when there is a "#pragma once" before an include guard.
   *
   * <p>This can be in the form of:
   *
   * <pre>
   * #pragma once
   * </pre>
   *
   * or:
   *
   * <pre>
   * #if (anything)
   * #pragma once
   * #endif
   * </pre>
   */
  private static Token skipPragmaOnce(Lexer l, CharArrayIntMap ppKeywords)
      throws OffsetLimitReachedException {
    boolean foundPragma = false;
    boolean quit = false;
    boolean foundIf = false;

    // Skip to the first statement.
    Token t = skipAll(l, Lexer.tNEWLINE);
    l.saveState(); // Save the state in case we don't find a "#pragma once".

    while (!quit) {
      switch (t.getType()) {
        case IToken.tPOUND:
          t = l.nextToken(); // Just get the next token.
          break;
        case IToken.tIDENTIFIER:
          switch (ppKeywords.get(t.getCharImage())) {
            case IPreprocessorDirective.ppPragma:
              t = l.nextToken(); // Get the next token (expecting "once").
              if (CharArrayUtils.equals(t.getCharImage(), ONCE)) {
                foundPragma = true;
                t = skipAll(l, Lexer.tNEWLINE);
                if (!foundIf) // Just quit if we are not in an '#if' block.
                quit = true;
              }
              break;
            case IPreprocessorDirective.ppIf:
              if (foundIf) {
                quit = true;
                break;
              }
              foundIf = true;
              t = l.nextDirective(); // Go to the next directive.
              break;
            case IPreprocessorDirective.ppEndif:
              if (foundIf) t = skipAll(l, Lexer.tNEWLINE);
              quit = true;
              break;
            default:
              quit = true;
              break;
          }
          break;
        default:
          quit = true;
          break;
      }
    }

    if (!foundPragma) {
      l.restoreState();
      return l.currentToken();
    }
    return t;
  }
Example #3
0
 /** @return the next token from the scanner or <code>null</code> on end-of-input. */
 protected IToken nextToken() {
   Token token;
   try {
     token = fLexer.nextToken();
     if (token.getType() == IToken.tEND_OF_INPUT) {
       token = null;
     }
   } catch (OffsetLimitReachedException exc) {
     token = null;
   }
   return token;
 }
 private static char[] findNotDefined(Lexer l) throws OffsetLimitReachedException {
   Token t;
   if (skipAll(l, IToken.tLPAREN).getType() == IToken.tNOT
       && checkToken(skipAll(l, IToken.tLPAREN), Keywords.cDEFINED)) {
     t = l.nextToken(); // Only a single parenthesis is allowed.
     if (t.getType() == IToken.tLPAREN) t = l.nextToken();
     if (t.getType() == IToken.tIDENTIFIER) {
       char[] guard = t.getCharImage();
       if (skipAll(l, IToken.tRPAREN).getType() == Lexer.tNEWLINE) return guard;
     }
   }
   return null;
 }
Example #5
0
 /**
  * Advances to the next pound token that starts a preprocessor directive.
  *
  * @return pound token of the directive or end-of-input.
  * @throws OffsetLimitReachedException when completion is requested in a literal or an
  *     header-name.
  */
 public Token nextDirective() throws OffsetLimitReachedException {
   Token t0;
   Token t1 = fToken;
   for (; ; ) {
     t0 = t1;
     t1 = fetchToken();
     final int tt1 = t1.getType();
     if (tt1 == IToken.tEND_OF_INPUT) break;
     if (tt1 == IToken.tPOUND) {
       final int tt0 = t0.getType();
       if (tt0 == tNEWLINE || tt0 == tBEFORE_INPUT) break;
     }
   }
   fLastToken = t0;
   return fToken = t1;
 }
 private static boolean currentIfSpansFile(Lexer l, CharArrayIntMap ppKeywords) {
   // Check if the #ifndef spans the entire file.
   try {
     int nesting = 1;
     while (nesting > 0) {
       Token t = l.nextDirective();
       if (t.getType() == IToken.tEND_OF_INPUT) return true;
       switch (ppKeywords.get(l.nextToken().getCharImage())) {
         case IPreprocessorDirective.ppIf:
         case IPreprocessorDirective.ppIfdef:
         case IPreprocessorDirective.ppIfndef:
           nesting++;
           break;
         case IPreprocessorDirective.ppEndif:
           nesting--;
           break;
       }
     }
     l.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
     return skipAll(l, Lexer.tNEWLINE).getType() == IToken.tEND_OF_INPUT;
   } catch (OffsetLimitReachedException e) {
   }
   return true;
 }
 private static char[] findIncludeGuard(Lexer l, CharArrayIntMap ppKeywords) {
   try {
     if (skipPragmaOnce(l, ppKeywords).getType() == IToken.tPOUND) {
       Token t = l.nextToken();
       if (t.getType() == IToken.tIDENTIFIER) {
         char[] guard = null;
         switch (ppKeywords.get(t.getCharImage())) {
           case IPreprocessorDirective.ppIfndef:
             // #ifndef GUARD
             t = l.nextToken();
             if (t.getType() == IToken.tIDENTIFIER) {
               guard = t.getCharImage();
             }
             break;
           case IPreprocessorDirective.ppIf:
             // #if !defined GUARD
             // #if ((!((defined (GUARD)))))
             guard = findNotDefined(l);
             break;
         }
         if (guard != null) {
           // #define GUARD
           l.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
           if (skipAll(l, Lexer.tNEWLINE).getType() == IToken.tPOUND
               && checkToken(l.nextToken(), Keywords.cDEFINE)
               && checkToken(l.nextToken(), guard)) {
             l.consumeLine(ORIGIN_PREPROCESSOR_DIRECTIVE);
             return guard;
           }
         }
       }
     }
   } catch (OffsetLimitReachedException e) {
   }
   return null;
 }
Example #8
0
 public boolean currentTokenIsFirstOnLine() {
   final int type = fLastToken.getType();
   return type == tNEWLINE || type == tBEFORE_INPUT;
 }
Example #9
0
 /** Returns the endoffset of the token before the current one. */
 @Override
 public int getLastEndOffset() {
   return fLastToken.getEndOffset();
 }
 private static Token skipAll(Lexer l, int kind) throws OffsetLimitReachedException {
   // Skip empty lines.
   Token t = l.nextToken();
   while (t.getType() == kind) t = l.nextToken();
   return t;
 }
 private static boolean checkToken(Token t, char[] image) throws OffsetLimitReachedException {
   return CharArrayUtils.equals(t.getCharImage(), image);
 }