Don't terminate tokenization if stack size changes

Previously Python import blocks were not tokenizing correctly since
the loop was prematurely terminating when a match at the end of the line
was reached and no tokens were generated for it.

This approach was incorrect since the tokenizer may have just popped a rule
and another loop could possibly pop more rules.

Now this early termination is only performed if the stack size hasn't changed.
This commit is contained in:
Kevin Sawicki
2013-08-20 11:28:32 -07:00
parent b10a01ddc2
commit 27cee3e19c
2 changed files with 25 additions and 1 deletions

View File

@@ -14,6 +14,7 @@ describe "TextMateGrammar", ->
atom.activatePackage('ruby-tmbundle', sync: true)
atom.activatePackage('html-tmbundle', sync: true)
atom.activatePackage('php-tmbundle', sync: true)
atom.activatePackage('python-tmbundle', sync: true)
grammar = syntax.selectGrammar("hello.coffee")
describe "@loadSync(path)", ->
@@ -661,3 +662,26 @@ describe "TextMateGrammar", ->
expect(tokens[0].value).toBe "'"
expect(tokens[1].value).toBe "\uD835\uDF97"
expect(tokens[2].value).toBe "'"
describe "python", ->
it "parses import blocks correctly", ->
grammar = syntax.selectGrammar("file.py")
lines = grammar.tokenizeLines "import a\nimport b"
line1 = lines[0]
expect(line1.length).toBe 3
expect(line1[0].value).toEqual "import"
expect(line1[0].scopes).toEqual ["source.python", "keyword.control.import.python"]
expect(line1[1].value).toEqual " "
expect(line1[1].scopes).toEqual ["source.python"]
expect(line1[2].value).toEqual "a"
expect(line1[2].scopes).toEqual ["source.python"]
line2 = lines[1]
expect(line2.length).toBe 3
expect(line2[0].value).toEqual "import"
expect(line2[0].scopes).toEqual ["source.python", "keyword.control.import.python"]
expect(line2[1].value).toEqual " "
expect(line2[1].scopes).toEqual ["source.python"]
expect(line2[2].value).toEqual "b"
expect(line2[2].scopes).toEqual ["source.python"]

View File

@@ -146,7 +146,7 @@ class TextMateGrammar
tokens.push(nextTokens...)
position = tokensEndPosition
break if position is line.length and nextTokens.length is 0
break if position is line.length and nextTokens.length is 0 and ruleStack.length is previousRuleStackLength
else # push filler token for unmatched text at end of line
if position < line.length