mirror of
https://github.com/atom/atom.git
synced 2026-01-23 13:58:08 -05:00
Only parse the first 100 tokens of a line
This fixes the UI thread lockup when there is a gigantic line in a file (like minified js). I took a stab at making line tokeninization async on the atom/async-single-line-tokenization branch, but it was still too slow. Closes #150
This commit is contained in:
@@ -298,3 +298,13 @@ describe "TextMateGrammar", ->
|
||||
grammar = syntax.selectGrammar("style.scss")
|
||||
{tokens} = grammar.tokenizeLine("@mixin x() { -moz-selector: whatever; }")
|
||||
expect(tokens[9]).toEqual value: "-moz-selector", scopes: ["source.css.scss", "meta.property-list.scss", "meta.property-name.scss"]
|
||||
|
||||
describe "when a line has more tokens than `maxTokensPerLine`", ->
|
||||
it "creates a final token with the remaining text and resets the ruleStack to match the begining of the line", ->
|
||||
grammar = syntax.selectGrammar("hello.js")
|
||||
grammar.maxTokensPerLine = 5
|
||||
originalRuleStack = [grammar.initialRule, grammar.initialRule, grammar.initialRule]
|
||||
{tokens, ruleStack} = grammar.tokenizeLine("one(two(three(four(five(_param_)))))", originalRuleStack)
|
||||
expect(tokens.length).toBe 5
|
||||
expect(tokens[4].value).toBe "three(four(five(_param_)))))"
|
||||
expect(ruleStack).toEqual originalRuleStack
|
||||
@@ -26,6 +26,7 @@ class TextMateGrammar
|
||||
repository: null
|
||||
initialRule: null
|
||||
firstLineRegex: null
|
||||
maxTokensPerLine: 100
|
||||
|
||||
constructor: ({ @name, @fileTypes, @scopeName, patterns, repository, @foldingStopMarker, firstLineMatch}) ->
|
||||
@initialRule = new Rule(this, {@scopeName, patterns})
|
||||
@@ -38,6 +39,7 @@ class TextMateGrammar
|
||||
@repository[name] = new Rule(this, data)
|
||||
|
||||
tokenizeLine: (line, ruleStack=[@initialRule], firstLine=false) ->
|
||||
originalRuleStack = ruleStack
|
||||
ruleStack = new Array(ruleStack...) # clone ruleStack
|
||||
tokens = []
|
||||
position = 0
|
||||
@@ -46,6 +48,12 @@ class TextMateGrammar
|
||||
previousRuleStackLength = ruleStack.length
|
||||
previousPosition = position
|
||||
|
||||
if tokens.length >= (@maxTokensPerLine - 1)
|
||||
token = new Token(value: line[position..], scopes: scopes)
|
||||
tokens.push token
|
||||
ruleStack = originalRuleStack
|
||||
break
|
||||
|
||||
if line.length == 0
|
||||
tokens = [new Token(value: "", scopes: scopes)]
|
||||
return { tokens, ruleStack }
|
||||
|
||||
Reference in New Issue
Block a user