diff --git a/src/tokenized-buffer.coffee b/src/tokenized-buffer.coffee index d9f1bcba7..082c923dd 100644 --- a/src/tokenized-buffer.coffee +++ b/src/tokenized-buffer.coffee @@ -153,7 +153,7 @@ class TokenizedBuffer extends Model row = startRow loop previousStack = @stackForRow(row) - @tokenizedLines[row] = @buildTokenizedLineForRow(row, @stackForRow(row - 1)) + @tokenizedLines[row] = @buildTokenizedLineForRow(row, @stackForRow(row - 1), @parentScopesForRow(row)) if --rowsRemaining is 0 filledRegion = false endRow = row @@ -213,7 +213,7 @@ class TokenizedBuffer extends Model @updateInvalidRows(start, end, delta) previousEndStack = @stackForRow(end) # used in spill detection below - newTokenizedLines = @buildTokenizedLinesForRows(start, end + delta, @stackForRow(start - 1)) + newTokenizedLines = @buildTokenizedLinesForRows(start, end + delta, @stackForRow(start - 1), @parentScopesForRow(start)) _.spliceWithArray(@tokenizedLines, start, end - start + 1, newTokenizedLines) start = @retokenizeWhitespaceRowsIfIndentLevelChanged(start - 1, -1) @@ -234,7 +234,7 @@ class TokenizedBuffer extends Model line = @tokenizedLines[row] if line?.isOnlyWhitespace() and @indentLevelForRow(row) isnt line.indentLevel while line?.isOnlyWhitespace() - @tokenizedLines[row] = @buildTokenizedLineForRow(row, @stackForRow(row - 1)) + @tokenizedLines[row] = @buildTokenizedLineForRow(row, @stackForRow(row - 1), @parentScopesForRow(row)) row += increment line = @tokenizedLines[row] @@ -276,16 +276,18 @@ class TokenizedBuffer extends Model @tokenizedLineForRow(row).isComment() and @tokenizedLineForRow(nextRow).isComment() - buildTokenizedLinesForRows: (startRow, endRow, startingStack) -> + buildTokenizedLinesForRows: (startRow, endRow, startingStack, startingParentScopes) -> ruleStack = startingStack + parentScopes = startingParentScopes stopTokenizingAt = startRow + @chunkSize tokenizedLines = for row in [startRow..endRow] if (ruleStack or row is 0) and row < stopTokenizingAt - screenLine = @buildTokenizedLineForRow(row, ruleStack) - ruleStack = screenLine.ruleStack + tokenizedLine = @buildTokenizedLineForRow(row, ruleStack, parentScopes) + ruleStack = tokenizedLine.ruleStack + parentScopes = @scopesFromContent(parentScopes, tokenizedLine.content) else - screenLine = @buildPlaceholderTokenizedLineForRow(row) - screenLine + tokenizedLine = @buildPlaceholderTokenizedLineForRow(row, parentScopes) + tokenizedLine if endRow >= stopTokenizingAt @invalidateRow(stopTokenizingAt) @@ -298,21 +300,22 @@ class TokenizedBuffer extends Model buildPlaceholderTokenizedLineForRow: (row) -> line = @buffer.lineForRow(row) - tokens = [new Token(value: line, scopes: [@grammar.scopeName])] + parentScopes = [@grammar.idForScope(@grammar.scopeName)] + content = [line] tabLength = @getTabLength() indentLevel = @indentLevelForRow(row) lineEnding = @buffer.lineEndingForRow(row) - new TokenizedLine({tokens, tabLength, indentLevel, @invisibles, lineEnding}) + new TokenizedLine({parentScopes, content, tabLength, indentLevel, @invisibles, lineEnding}) - buildTokenizedLineForRow: (row, ruleStack) -> - @buildTokenizedLineForRowWithText(row, @buffer.lineForRow(row), ruleStack) + buildTokenizedLineForRow: (row, ruleStack, parentScopes) -> + @buildTokenizedLineForRowWithText(row, @buffer.lineForRow(row), ruleStack, parentScopes) - buildTokenizedLineForRowWithText: (row, line, ruleStack = @stackForRow(row - 1)) -> + buildTokenizedLineForRowWithText: (row, line, ruleStack = @stackForRow(row - 1), parentScopes = @parentScopesForRow(row)) -> lineEnding = @buffer.lineEndingForRow(row) tabLength = @getTabLength() indentLevel = @indentLevelForRow(row) - {tokens, ruleStack} = @grammar.tokenizeLine(line, ruleStack, row is 0) - new TokenizedLine({tokens, ruleStack, tabLength, lineEnding, indentLevel, @invisibles}) + {content, ruleStack} = @grammar.tokenizeLine(line, ruleStack, row is 0) + new TokenizedLine({parentScopes, content, ruleStack, tabLength, lineEnding, indentLevel, @invisibles}) tokenizedLineForRow: (bufferRow) -> @tokenizedLines[bufferRow] @@ -320,6 +323,24 @@ class TokenizedBuffer extends Model stackForRow: (bufferRow) -> @tokenizedLines[bufferRow]?.ruleStack + parentScopesForRow: (bufferRow) -> + if bufferRow > 0 + precedingLine = @tokenizedLines[bufferRow - 1] + @scopesFromContent(precedingLine.parentScopes, precedingLine.content) + else + [] + + scopesFromContent: (startingScopes, content) -> + scopes = startingScopes.slice() + for symbol in content when typeof symbol is 'number' + if symbol > 0 + scopes.push(symbol) + else + popped = scopes.pop() + unless -popped is symbol + throw new Error("Encountered an invalid scope end id. Popped #{popped}, expected to pop #{-symbol}.") + scopes + indentLevelForRow: (bufferRow) -> line = @buffer.lineForRow(bufferRow) indentLevel = 0 diff --git a/src/tokenized-line.coffee b/src/tokenized-line.coffee index b81d972a0..942d72c1a 100644 --- a/src/tokenized-line.coffee +++ b/src/tokenized-line.coffee @@ -1,5 +1,6 @@ _ = require 'underscore-plus' {isPairedCharacter} = require './text-utils' +Token = require './token' NonWhitespaceRegex = /\S/ LeadingWhitespaceRegex = /^\s*/ @@ -14,9 +15,9 @@ class TokenizedLine firstNonWhitespaceIndex: 0 foldable: false - constructor: ({tokens, @lineEnding, @ruleStack, @startBufferColumn, @fold, @tabLength, @indentLevel, @invisibles}) -> + constructor: ({@parentScopes, @content, @lineEnding, @ruleStack, @startBufferColumn, @fold, @tabLength, @indentLevel, @invisibles}) -> @startBufferColumn ?= 0 - @tokens = @breakOutAtomicTokens(tokens) + # @tokens = @breakOutAtomicTokens(tokens) @text = @buildText() @bufferDelta = @buildBufferDelta() @softWrapIndentationTokens = @getSoftWrapIndentationTokens() @@ -28,6 +29,10 @@ class TokenizedLine @substituteInvisibleCharacters() @buildEndOfLineInvisibles() if @lineEnding? + Object.defineProperty @prototype, 'tokens', get: -> + tokens = atom.grammars.decodeContent(@parentScopes.concat(@content)) + tokens.map (properties) -> new Token(properties) + buildText: -> text = "" text += token.value for token in @tokens