Only parse the first 100 tokens of a line

This fixes the UI thread lockup when there is a gigantic line in a
file (like minified js). I took a stab at making line tokeninization
async on the atom/async-single-line-tokenization branch, but it was
still too slow.

Closes #150
This commit is contained in:
probablycorey
2013-03-28 17:14:45 -07:00
parent f51102a230
commit aab50d3c2c
2 changed files with 18 additions and 0 deletions

View File

@@ -298,3 +298,13 @@ describe "TextMateGrammar", ->
grammar = syntax.selectGrammar("style.scss")
{tokens} = grammar.tokenizeLine("@mixin x() { -moz-selector: whatever; }")
expect(tokens[9]).toEqual value: "-moz-selector", scopes: ["source.css.scss", "meta.property-list.scss", "meta.property-name.scss"]
describe "when a line has more tokens than `maxTokensPerLine`", ->
it "creates a final token with the remaining text and resets the ruleStack to match the begining of the line", ->
grammar = syntax.selectGrammar("hello.js")
grammar.maxTokensPerLine = 5
originalRuleStack = [grammar.initialRule, grammar.initialRule, grammar.initialRule]
{tokens, ruleStack} = grammar.tokenizeLine("one(two(three(four(five(_param_)))))", originalRuleStack)
expect(tokens.length).toBe 5
expect(tokens[4].value).toBe "three(four(five(_param_)))))"
expect(ruleStack).toEqual originalRuleStack

View File

@@ -26,6 +26,7 @@ class TextMateGrammar
repository: null
initialRule: null
firstLineRegex: null
maxTokensPerLine: 100
constructor: ({ @name, @fileTypes, @scopeName, patterns, repository, @foldingStopMarker, firstLineMatch}) ->
@initialRule = new Rule(this, {@scopeName, patterns})
@@ -38,6 +39,7 @@ class TextMateGrammar
@repository[name] = new Rule(this, data)
tokenizeLine: (line, ruleStack=[@initialRule], firstLine=false) ->
originalRuleStack = ruleStack
ruleStack = new Array(ruleStack...) # clone ruleStack
tokens = []
position = 0
@@ -46,6 +48,12 @@ class TextMateGrammar
previousRuleStackLength = ruleStack.length
previousPosition = position
if tokens.length >= (@maxTokensPerLine - 1)
token = new Token(value: line[position..], scopes: scopes)
tokens.push token
ruleStack = originalRuleStack
break
if line.length == 0
tokens = [new Token(value: "", scopes: scopes)]
return { tokens, ruleStack }