mirror of
https://github.com/atom/atom.git
synced 2026-02-10 14:45:11 -05:00
Build Token objects in TextMateGrammar. Tokenization-related renames.
This commit is contained in:
@@ -168,6 +168,5 @@ class LanguageMode
|
||||
if desiredIndentLevel < currentIndentLevel
|
||||
@editSession.setIndentationForBufferRow(bufferRow, desiredIndentLevel)
|
||||
|
||||
getLineTokens: (line, stack) ->
|
||||
{tokens, stack} = @grammar.getLineTokens(line, stack)
|
||||
|
||||
tokenizeLine: (line, stack) ->
|
||||
{tokens, stack} = @grammar.tokenizeLine(line, stack)
|
||||
|
||||
@@ -2,14 +2,14 @@ _ = require 'underscore'
|
||||
|
||||
module.exports =
|
||||
class ScreenLine
|
||||
constructor: ({@tokens, @stack, @bufferRows, @startBufferColumn, @fold, @foldable}) ->
|
||||
constructor: ({@tokens, @ruleStack, @bufferRows, @startBufferColumn, @fold, @foldable}) ->
|
||||
@bufferRows ?= 1
|
||||
@startBufferColumn ?= 0
|
||||
@foldable ?= false
|
||||
@text = _.pluck(@tokens, 'value').join('')
|
||||
|
||||
copy: ->
|
||||
new ScreenLine({@tokens, @stack, @bufferRows, @startBufferColumn, @fold, @foldable})
|
||||
new ScreenLine({@tokens, @ruleStack, @bufferRows, @startBufferColumn, @fold, @foldable})
|
||||
|
||||
clipScreenColumn: (column, options={}) ->
|
||||
{ skipAtomicTokens } = options
|
||||
@@ -73,13 +73,13 @@ class ScreenLine
|
||||
tokens: leftTokens
|
||||
bufferRows: 0
|
||||
startBufferColumn: @startBufferColumn
|
||||
stack: @stack
|
||||
ruleStack: @ruleStack
|
||||
foldable: @foldable
|
||||
)
|
||||
rightFragment = new ScreenLine(
|
||||
tokens: rightTokens
|
||||
startBufferColumn: @startBufferColumn + column
|
||||
stack: @stack
|
||||
ruleStack: @ruleStack
|
||||
)
|
||||
[leftFragment, rightFragment]
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
_ = require 'underscore'
|
||||
fs = require 'fs'
|
||||
plist = require 'plist'
|
||||
Token = require 'token'
|
||||
|
||||
module.exports =
|
||||
class TextMateGrammar
|
||||
@@ -27,7 +28,8 @@ class TextMateGrammar
|
||||
for name, data of repository
|
||||
@repository[name] = new Rule(this, data)
|
||||
|
||||
getLineTokens: (line, ruleStack=[@initialRule]) ->
|
||||
tokenizeLine: (line, {ruleStack, tabLength}={}) ->
|
||||
ruleStack ?= [@initialRule]
|
||||
ruleStack = new Array(ruleStack...) # clone ruleStack
|
||||
tokens = []
|
||||
position = 0
|
||||
@@ -36,28 +38,31 @@ class TextMateGrammar
|
||||
scopes = scopesFromStack(ruleStack)
|
||||
|
||||
if line.length == 0
|
||||
tokens = [{value: "", scopes: scopes}]
|
||||
return { tokens, scopes }
|
||||
tokens = [new Token(value: "", scopes: scopes)]
|
||||
return { tokens, ruleStack }
|
||||
|
||||
break if position == line.length
|
||||
|
||||
if match = _.last(ruleStack).getNextTokens(ruleStack, line, position)
|
||||
{ nextTokens, tokensStartPosition, tokensEndPosition } = match
|
||||
if position < tokensStartPosition # unmatched text before next tokens
|
||||
tokens.push
|
||||
tokens.push(new Token(
|
||||
value: line[position...tokensStartPosition]
|
||||
scopes: scopes
|
||||
))
|
||||
|
||||
tokens.push(nextTokens...)
|
||||
position = tokensEndPosition
|
||||
|
||||
else # push filler token for unmatched text at end of line
|
||||
tokens.push
|
||||
tokens.push(new Token(
|
||||
value: line[position...line.length]
|
||||
scopes: scopes
|
||||
))
|
||||
break
|
||||
|
||||
{ tokens, stack: ruleStack }
|
||||
tokens = _.flatten(tokens.map (token) -> token.breakOutTabCharacters(tabLength))
|
||||
{ tokens, ruleStack }
|
||||
|
||||
ruleForInclude: (name) ->
|
||||
if name[0] == "#"
|
||||
@@ -158,7 +163,6 @@ class Pattern
|
||||
getIncludedPatterns: (included) ->
|
||||
if @include
|
||||
rule = @grammar.ruleForInclude(@include)
|
||||
# console.log "Could not find rule for include #{@include} in #{@grammar.name} grammar" unless rule
|
||||
rule?.getIncludedPatterns(included) ? []
|
||||
else
|
||||
[this]
|
||||
@@ -175,7 +179,7 @@ class Pattern
|
||||
if zeroLengthMatch
|
||||
tokens = []
|
||||
else
|
||||
tokens = [{ value: line[start...end], scopes: scopes }]
|
||||
tokens = [new Token(value: line[start...end], scopes: scopes)]
|
||||
if @pushRule
|
||||
stack.push(@pushRule.getRuleToPush(line, captureIndices))
|
||||
else if @popRule
|
||||
@@ -201,18 +205,20 @@ class Pattern
|
||||
continue
|
||||
|
||||
if childCaptureStart > previousChildCaptureEnd
|
||||
tokens.push
|
||||
tokens.push(new Token(
|
||||
value: line[previousChildCaptureEnd...childCaptureStart]
|
||||
scopes: scopes
|
||||
))
|
||||
|
||||
captureTokens = @getTokensForCaptureIndices(line, captureIndices, scopes)
|
||||
tokens.push(captureTokens...)
|
||||
previousChildCaptureEnd = childCaptureEnd
|
||||
|
||||
if parentCaptureEnd > previousChildCaptureEnd
|
||||
tokens.push
|
||||
tokens.push(new Token(
|
||||
value: line[previousChildCaptureEnd...parentCaptureEnd]
|
||||
scopes: scopes
|
||||
))
|
||||
|
||||
tokens
|
||||
|
||||
|
||||
@@ -53,24 +53,18 @@ class TokenizedBuffer
|
||||
@trigger("change", {oldRange, newRange})
|
||||
|
||||
buildScreenLinesForRows: (startRow, endRow, startingStack) ->
|
||||
stack = startingStack
|
||||
ruleStack = startingStack
|
||||
for row in [startRow..endRow]
|
||||
screenLine = @buildScreenLineForRow(row, stack)
|
||||
stack = screenLine.stack
|
||||
screenLine = @buildScreenLineForRow(row, ruleStack)
|
||||
ruleStack = screenLine.ruleStack
|
||||
screenLine
|
||||
|
||||
buildScreenLineForRow: (row, stack) ->
|
||||
buildScreenLineForRow: (row, ruleStack) ->
|
||||
line = @buffer.lineForRow(row)
|
||||
{tokens, stack} = @languageMode.getLineTokens(line, stack)
|
||||
tokenObjects = []
|
||||
for tokenProperties in tokens
|
||||
token = new Token(tokenProperties)
|
||||
tokenObjects.push(token.breakOutTabCharacters(@tabLength)...)
|
||||
text = _.pluck(tokenObjects, 'value').join('')
|
||||
new ScreenLine(
|
||||
tokens: tokenObjects
|
||||
stack: stack
|
||||
)
|
||||
|
||||
val = @languageMode.tokenizeLine(line, {ruleStack, @tabLength})
|
||||
console.log val, line unless val.ruleStack
|
||||
new ScreenLine(val)
|
||||
|
||||
lineForScreenRow: (row) ->
|
||||
@screenLines[row]
|
||||
@@ -79,7 +73,7 @@ class TokenizedBuffer
|
||||
@screenLines[startRow..endRow]
|
||||
|
||||
stackForRow: (row) ->
|
||||
@screenLines[row]?.stack
|
||||
@screenLines[row]?.ruleStack
|
||||
|
||||
scopesForPosition: (position) ->
|
||||
position = Point.fromObject(position)
|
||||
|
||||
Reference in New Issue
Block a user