コード例 #1
0
  protected boolean tokenEndsAtEndOfLine(TokenSourceWithStateV4<TState> lexer, Token token) {
    CharStream charStream = lexer.getInputStream();
    if (charStream != null) {
      int nextCharIndex = token.getStopIndex() + 1;
      if (nextCharIndex >= charStream.size()) {
        return true;
      }

      int c =
          charStream
              .getText(Interval.of(token.getStopIndex() + 1, token.getStopIndex() + 1))
              .charAt(0);
      return c == '\r' || c == '\n';
    }

    if (token.getStopIndex() + 1 >= snapshot.length()) {
      return true;
    }

    char c = snapshot.subSequence(token.getStopIndex() + 1, token.getStopIndex() + 1).charAt(0);
    return c == '\r' || c == '\n';

    /*int line = snapshot.findLineNumber(token.getStopIndex() + 1);
    int lineStart = snapshot.findLineFromOffset(line).getStart().getOffset();
    int nextLineStart = snapshot.findLineFromOffset(line + 1).getStart().getOffset();
    int lineEnd = nextLineStart - 1;
    if (lineEnd > 0 && lineEnd > lineStart) {
        char c = snapshot.charAt(lineEnd - 1);
        if (c == '\r' || c == '\n') {
            lineEnd--;
        }
    }

    return lineEnd <= token.getStopIndex() + 1 && nextLineStart >= token.getStopIndex() + 1;*/
  }
コード例 #2
0
  protected ParseRequest<TState> adjustParseSpan(OffsetRegion span) {
    int start = span.getStart();
    int end = span.getEnd();

    if (firstDirtyLine != null) {
      int firstDirtyLineOffset =
          snapshot.findLineFromLineNumber(firstDirtyLine).getStart().getOffset();
      start = Math.min(start, firstDirtyLineOffset);
    }

    TState state = null;
    int startLine = snapshot.findLineNumber(start);
    while (startLine > 0) {
      TState lineState = lineStates.get(startLine - 1);
      if (!lineState.getIsMultiLineToken()) {
        state = lineState;
        break;
      }

      startLine--;
    }

    if (startLine == 0) {
      state = getStartState();
    }

    start = snapshot.findLineFromLineNumber(startLine).getStart().getOffset();
    int length = end - start;
    ParseRequest<TState> request = new ParseRequest<>(new OffsetRegion(start, length), state);
    return request;
  }
コード例 #3
0
  protected boolean isMultiLineToken(TokenSourceWithStateV4<TState> lexer, Token token) {
    /*if (lexer != null && lexer.getLine() > token.getLine()) {
        return true;
    }*/

    int startLine = snapshot.findLineNumber(token.getStartIndex());
    int stopLine = snapshot.findLineNumber(token.getStopIndex() + 1);
    return startLine != stopLine;
  }
コード例 #4
0
  public final @NonNull AbstractTokensTaskTaggerSnapshot<TState> translateTo(
      @NonNull DocumentSnapshot targetSnapshot) {
    Parameters.notNull("targetSnapshot", targetSnapshot);
    if (targetSnapshot.equals(snapshot)) {
      return this;
    }

    if (!targetSnapshot.getVersionedDocument().equals(snapshot.getVersionedDocument())) {
      throw new IllegalArgumentException("The target snapshot is not from the same document.");
    }

    if (targetSnapshot.getVersion().getVersionNumber() < snapshot.getVersion().getVersionNumber()) {
      throw new UnsupportedOperationException(
          "The target snapshot is from an earlier version of the document.");
    }

    return translateToImpl(targetSnapshot);
  }
コード例 #5
0
  @Override
  public Iterable<TaggedPositionRegion<TokenTag<Token>>> getTags(
      NormalizedSnapshotPositionRegionCollection regions) {
    Parameters.notNull("regions", regions);
    if (regions.isEmpty()) {
      return Collections.emptyList();
    }

    DocumentSnapshot requestedSnapshot = regions.get(0).getSnapshot();
    if (!requestedSnapshot.equals(snapshot)) {
      throw new UnsupportedOperationException(
          "This tagger snapshot only supports requests from the same document.");
    }

    int startOffset = regions.get(0).getStart().getOffset();
    int endOffset = regions.get(regions.size() - 1).getEnd().getOffset();

    return getHighlights(startOffset, endOffset);
  }
コード例 #6
0
  public void initialize() {
    int lineCount = snapshot.getLineCount();
    if (lineStates.size() != lineCount) {
      lineStates.ensureCapacity(lineCount);
      TState dirtyState = getStartState().createDirtyState();
      for (int i = lineStates.size(); i < lineCount; i++) {
        lineStates.add(dirtyState);
      }

      forceRehighlightLines(0, lineCount - 1);
    }
  }
コード例 #7
0
  public void forceRehighlightLines(int startLine, int endLineInclusive) {
    if (startLine > endLineInclusive) {
      throw new IllegalArgumentException();
    } else if (endLineInclusive < 0 || endLineInclusive >= snapshot.getLineCount()) {
      throw new IllegalArgumentException();
    }

    firstDirtyLine = firstDirtyLine != null ? Math.min(firstDirtyLine, startLine) : startLine;
    lastDirtyLine =
        lastDirtyLine != null ? Math.max(lastDirtyLine, endLineInclusive) : endLineInclusive;

    //        int start = snapshot.findLineFromOffset(startLine).getStart().getOffset();
    //        int end = (endLineInclusive == lineStates.size() - 1) ? snapshot.length() :
    // snapshot.findLineFromOffset(endLineInclusive + 1).getStart().getOffset();
    //        if (FIX_HIGHLIGHTER_UPDATE_BUG) {
    //            fireHighlightsChange(start, snapshot.length());
    //        } else {
    //            fireHighlightsChange(start, end);
    //        }
  }
コード例 #8
0
  @Override
  protected void updateHighlights(
      OffsetsBag targetContainer,
      DocumentSnapshot sourceSnapshot,
      DocumentSnapshot currentSnapshot,
      SemanticAnalyzerListener listener) {
    List<Tuple2<OffsetRegion, AttributeSet>> intermediateContainer = new ArrayList<>();

    addHighlights(
        intermediateContainer,
        sourceSnapshot,
        currentSnapshot,
        listener.getTemplateDeclarations(),
        templateDeclarationAttributes);
    addHighlights(
        intermediateContainer,
        sourceSnapshot,
        currentSnapshot,
        listener.getTemplateUses(),
        templateUseAttributes);
    addHighlights(
        intermediateContainer,
        sourceSnapshot,
        currentSnapshot,
        listener.getRegionDeclarations(),
        regionDeclarationAttributes);
    addHighlights(
        intermediateContainer,
        sourceSnapshot,
        currentSnapshot,
        listener.getRegionUses(),
        regionUseAttributes);
    addHighlights(
        intermediateContainer,
        sourceSnapshot,
        currentSnapshot,
        listener.getDictionaryDeclarations(),
        dictionaryDeclarationAttributes);
    addHighlights(
        intermediateContainer,
        sourceSnapshot,
        currentSnapshot,
        listener.getDictionaryUses(),
        dictionaryUseAttributes);
    addHighlights(
        intermediateContainer,
        sourceSnapshot,
        currentSnapshot,
        listener.getParameterDeclarations(),
        parameterDeclarationAttributes);
    addHighlights(
        intermediateContainer,
        sourceSnapshot,
        currentSnapshot,
        listener.getParameterUses(),
        parameterUseAttributes);
    addHighlights(
        intermediateContainer,
        sourceSnapshot,
        currentSnapshot,
        listener.getAttributeUses(),
        attributeUseAttributes);
    addHighlights(
        intermediateContainer,
        sourceSnapshot,
        currentSnapshot,
        listener.getOptions(),
        expressionOptionAttributes);

    OffsetsBag container = new OffsetsBag(currentSnapshot.getVersionedDocument().getDocument());
    fillHighlights(container, intermediateContainer);
    targetContainer.setHighlights(container);
  }
コード例 #9
0
  protected AbstractTokensTaskTaggerSnapshot(
      @NonNull AbstractTokensTaskTaggerSnapshot<TState> reference,
      @NonNull DocumentSnapshot snapshot) {
    Parameters.notNull("reference", reference);
    Parameters.notNull("snapshot", snapshot);

    if (!snapshot.getVersionedDocument().equals(reference.snapshot.getVersionedDocument())) {
      throw new IllegalArgumentException("The target snapshot is not from the same document.");
    }

    if (snapshot.getVersion().getVersionNumber()
        <= reference.snapshot.getVersion().getVersionNumber()) {
      throw new UnsupportedOperationException(
          "The target snapshot must be a future version of the reference document.");
    }

    this.snapshot = snapshot;
    this.lineStates.addAll(reference.lineStates);
    this.firstDirtyLine = reference.firstDirtyLine;
    this.lastDirtyLine = reference.lastDirtyLine;
    Integer firstChangedLine = null;
    Integer lastChangedLine = null;

    for (DocumentVersion version = reference.snapshot.getVersion();
        version != null && version.getVersionNumber() < snapshot.getVersion().getVersionNumber();
        version = version.getNext()) {
      DocumentSnapshot source = version.getSnapshot();
      DocumentVersion targetVersion = version.getNext();
      assert targetVersion != null;
      DocumentSnapshot target = targetVersion.getSnapshot();
      NormalizedDocumentChangeCollection changes = version.getChanges();
      assert changes != null;
      for (int i = changes.size() - 1; i >= 0; i--) {
        DocumentChange change = changes.get(i);
        int lineCountDelta = change.getLineCountDelta();
        int oldOffset = change.getOldOffset();
        int oldLength = change.getOldLength();
        int newOffset = change.getNewOffset();
        int newLength = change.getNewLength();

        /* processChange */
        int oldStartLine = source.findLineNumber(oldOffset);
        int oldEndLine =
            oldLength == 0 ? oldStartLine : source.findLineNumber(oldOffset + oldLength - 1);
        if (lineCountDelta < 0) {
          lineStates.subList(oldStartLine, oldStartLine + Math.abs(lineCountDelta)).clear();
        } else if (lineCountDelta > 0) {
          TState endLineState = lineStates.get(oldStartLine);
          ArrayList<TState> insertedElements = new ArrayList<>();
          for (int j = 0; j < lineCountDelta; j++) {
            insertedElements.add(endLineState);
          }
          lineStates.addAll(oldStartLine, insertedElements);
        }

        if (lastDirtyLine != null && lastDirtyLine > oldStartLine) {
          lastDirtyLine += lineCountDelta;
        }

        if (lastChangedLine != null && lastChangedLine > oldStartLine) {
          lastChangedLine += lineCountDelta;
        }

        for (int j = oldStartLine; j <= oldEndLine + lineCountDelta; j++) {
          TState state = lineStates.get(i);
          lineStates.set(j, state.createDirtyState());
        }

        firstChangedLine =
            firstChangedLine != null ? Math.min(firstChangedLine, oldStartLine) : oldStartLine;
        lastChangedLine =
            lastChangedLine != null ? Math.max(lastChangedLine, oldEndLine) : oldEndLine;

        /* processAfterChange */
        if (firstChangedLine != null && lastChangedLine != null) {
          int startLine = firstChangedLine;
          int endLineInclusive = Math.min(lastChangedLine, source.getLineCount() - 1);

          firstChangedLine = null;
          lastChangedLine = null;

          /* forceRehighlightLines(startRehighlightLine, endRehighlightLine); */
          firstDirtyLine = firstDirtyLine != null ? Math.min(firstDirtyLine, startLine) : startLine;
          lastDirtyLine =
              lastDirtyLine != null ? Math.max(lastDirtyLine, endLineInclusive) : endLineInclusive;

          //                    int start =
          // currentSnapshot.findLineFromLineNumber(startLine).getStart().getOffset();
          //                    int end = (endLineInclusive == lineStates.size() - 1) ?
          // currentSnapshot.length() : currentSnapshot.findLineFromLineNumber(endLineInclusive +
          // 1).getStart().getOffset();
          //                    if (FIX_HIGHLIGHTER_UPDATE_BUG) {
          //                        fireHighlightsChange(start, document.getLength());
          //                    } else {
          //                        fireHighlightsChange(start, end);
          //                    }
        }
      }
    }

    firstDirtyLine = Math.min(firstDirtyLine, snapshot.getLineCount() - 1);
    lastDirtyLine = Math.min(lastDirtyLine, snapshot.getLineCount() - 1);
  }
コード例 #10
0
 protected boolean tokenSkippedLines(int endLinePrevious, Token token) {
   int startLineCurrent = snapshot.findLineNumber(token.getStartIndex());
   return startLineCurrent > endLinePrevious + 1;
 }
コード例 #11
0
  public List<TaggedPositionRegion<TokenTag<Token>>> getHighlights(int startOffset, int endOffset) {
    List<TaggedPositionRegion<TokenTag<Token>>> tags = new ArrayList<>();
    boolean updateOffsets = true;

    if (endOffset == Integer.MAX_VALUE) {
      endOffset = snapshot.length();
    }

    OffsetRegion span = OffsetRegion.fromBounds(startOffset, endOffset);

    if (failedTimeout) {
      return tags;
    }

    boolean spanExtended = false;

    int extendMultiLineSpanToLine = 0;
    OffsetRegion extendedSpan = span;

    synchronized (lock) {
      OffsetRegion requestedSpan = span;

      ParseRequest<TState> request = adjustParseSpan(span);
      TState startState = request.getState();
      span = request.getRegion();

      CharStream input;
      try {
        input = createInputStream(span);
      } catch (BadLocationException ex) {
        LOGGER.log(Level.WARNING, ex.getMessage(), ex);
        return tags;
      }

      TokenSourceWithStateV4<TState> lexer = createLexer(input, startState);
      lexer.setTokenFactory(new DocumentSnapshotTokenFactory(getEffectiveTokenSource(lexer)));

      Token previousToken = null;
      boolean previousTokenEndsLine = false;

      /* this is held outside the loop because only tokens which end at the end of a line
       * impact its value.
       */
      boolean lineStateChanged = false;

      while (true) {
        // TODO: perform this under a read lock
        Token token = lexer.nextToken();

        // The latter is true for EOF token with span.getEnd() at the end of the document
        boolean inBounds =
            token.getStartIndex() < span.getEnd() || token.getStopIndex() < span.getEnd();

        if (updateOffsets) {
          int startLineCurrent;
          if (token.getType() == Token.EOF) startLineCurrent = snapshot.getLineCount();
          else startLineCurrent = snapshot.findLineNumber(token.getStartIndex());

          // endLinePrevious is the line number the previous token ended on
          int endLinePrevious;
          if (previousToken != null)
            endLinePrevious = snapshot.findLineNumber(previousToken.getStopIndex() + 1);
          else endLinePrevious = snapshot.findLineNumber(span.getStart()) - 1;

          if (startLineCurrent > endLinePrevious + 1
              || (startLineCurrent == endLinePrevious + 1 && !previousTokenEndsLine)) {
            int firstMultilineLine = endLinePrevious;
            if (previousToken == null || previousTokenEndsLine) firstMultilineLine++;

            for (int i = firstMultilineLine; i < startLineCurrent; i++) {
              if (!lineStates.get(i).getIsMultiLineToken() || lineStateChanged)
                extendMultiLineSpanToLine = i + 1;

              if (inBounds) setLineState(i, lineStates.get(i).createMultiLineState());
            }
          }
        }

        if (token.getType() == Token.EOF) break;

        if (updateOffsets && isMultiLineToken(lexer, token)) {
          int startLine = snapshot.findLineNumber(token.getStartIndex());
          int stopLine = snapshot.findLineNumber(token.getStopIndex() + 1);
          for (int i = startLine; i < stopLine; i++) {
            if (!lineStates.get(i).getIsMultiLineToken()) extendMultiLineSpanToLine = i + 1;

            if (inBounds) setLineState(i, lineStates.get(i).createMultiLineState());
          }
        }

        boolean tokenEndsLine = tokenEndsAtEndOfLine(lexer, token);
        if (updateOffsets && tokenEndsLine) {
          TState stateAtEndOfLine = lexer.getCurrentState();
          int line = snapshot.findLineNumber(token.getStopIndex() + 1);
          lineStateChanged =
              lineStates.get(line).getIsMultiLineToken()
                  || !lineStates.get(line).equals(stateAtEndOfLine);

          // even if the state didn't change, we call SetLineState to make sure the
          // _first/_lastChangedLine values get updated.
          // have to check bounds for this one or the editor might not get an update (if the token
          // ends a line)
          if (updateOffsets && inBounds) setLineState(line, stateAtEndOfLine);

          if (lineStateChanged) {
            if (line < snapshot.getLineCount() - 1) {
              /* update the span's end position or the line state change won't be reflected
               * in the editor
               */
              int endPosition =
                  line < snapshot.getLineCount() - 2
                      ? snapshot.findLineFromLineNumber(line + 2).getStart().getOffset()
                      : snapshot.length();
              if (endPosition > extendedSpan.getEnd()) {
                spanExtended = true;
                extendedSpan = OffsetRegion.fromBounds(extendedSpan.getStart(), endPosition);
              }
            }
          }
        }

        if (token.getStartIndex() >= span.getEnd()) {
          break;
        }

        previousToken = token;
        previousTokenEndsLine = tokenEndsLine;

        if (token.getStopIndex() < requestedSpan.getStart()) {
          continue;
        }

        Collection<TaggedPositionRegion<TokenTag<Token>>> tokenClassificationSpans =
            getTagsForToken(token);
        if (tokenClassificationSpans != null) {
          tags.addAll(tokenClassificationSpans);
        }

        if (!inBounds) {
          break;
        }
      }
    }

    if (updateOffsets && extendMultiLineSpanToLine > 0) {
      int endPosition =
          extendMultiLineSpanToLine < snapshot.getLineCount() - 1
              ? snapshot
                  .findLineFromLineNumber(extendMultiLineSpanToLine + 1)
                  .getStart()
                  .getOffset()
              : snapshot.length();
      if (endPosition > extendedSpan.getEnd()) {
        spanExtended = true;
        extendedSpan = OffsetRegion.fromBounds(extendedSpan.getStart(), endPosition);
      }
    }

    if (updateOffsets && spanExtended) {
      /* Subtract 1 from each of these because the spans include the line break on their last
       * line, forcing it to appear as the first position on the following line.
       */
      assert extendedSpan.getEnd() > span.getEnd();
      int firstLine = snapshot.findLineNumber(span.getEnd());
      int lastLine = snapshot.findLineNumber(extendedSpan.getEnd()) - 1;
      // when considering the last line of a document, span and extendedSpan may end on the same
      // line
      forceRehighlightLines(firstLine, Math.max(firstLine, lastLine));
    }

    return tags;
  }
コード例 #12
0
  @Override
  public void parse(
      ParserTaskManager taskManager,
      ParseContext context,
      DocumentSnapshot snapshot,
      Collection<? extends ParserDataDefinition<?>> requestedData,
      ParserResultHandler results)
      throws InterruptedException, ExecutionException {

    if (requestedData.contains(GrammarParserDataDefinitions.LEXER_TOKENS)) {
      DocumentCacheMap documentCache;

      synchronized (lock) {
        documentCache =
            (DocumentCacheMap) snapshot.getVersionedDocument().getProperty(DOCUMENT_CACHE_KEY);

        if (documentCache == null) {
          documentCache = new DocumentCacheMap();
          snapshot.getVersionedDocument().putProperty(DOCUMENT_CACHE_KEY, documentCache);
        }
      }

      ParserData<Tagger<TokenTag<Token>>> result;
      synchronized (documentCache) {
        if (GrammarEditorKit.isLegacyMode(snapshot)) {
          result =
              new BaseParserData<>(
                  context, GrammarParserDataDefinitions.LEXER_TOKENS, snapshot, null);
          documentCache.clear();
          results.addResult(result);
          return;
        }

        result = documentCache.get(snapshot);

        if (result == null) {
          int requestedVersion = snapshot.getVersion().getVersionNumber();
          ParserData<Tagger<TokenTag<Token>>> previousResult = null;
          int previousVersion = -1;
          List<ParserData<Tagger<TokenTag<Token>>>> values;
          synchronized (documentCache) {
            values = new ArrayList<>(documentCache.values());
          }

          for (ParserData<Tagger<TokenTag<Token>>> data : values) {
            int dataVersion = data.getSnapshot().getVersion().getVersionNumber();
            if (dataVersion > previousVersion && dataVersion < requestedVersion) {
              previousResult = data;
              previousVersion = dataVersion;
            }
          }

          if (previousResult != null) {
            GrammarTokensTaskTaggerSnapshot previousTagger =
                (GrammarTokensTaskTaggerSnapshot) previousResult.getData();
            result =
                new BaseParserData<>(
                    context,
                    GrammarParserDataDefinitions.LEXER_TOKENS,
                    snapshot,
                    previousTagger.translateTo(snapshot));
          } else {
            GrammarTokensTaskTaggerSnapshot tagger = new GrammarTokensTaskTaggerSnapshot(snapshot);
            tagger.initialize();
            result =
                new BaseParserData<>(
                    context, GrammarParserDataDefinitions.LEXER_TOKENS, snapshot, tagger);
          }

          synchronized (documentCache) {
            ParserData<Tagger<TokenTag<Token>>> updatedResult = documentCache.get(snapshot);
            if (updatedResult != null) {
              result = updatedResult;
            } else {
              documentCache.put(snapshot, result);
            }
          }
        }
      }

      results.addResult(result);
    }
  }