Merge pull request #19538 from atom/as-ns/tokenized-line-tokens-shim

Shim tokens in TokenizedLines returned from TreeSitterLanguageMode
This commit is contained in:
Nathan Sobo
2019-06-14 15:06:22 -07:00
committed by GitHub
3 changed files with 91 additions and 10 deletions

View File

@@ -2210,6 +2210,47 @@ describe('TreeSitterLanguageMode', () => {
expect(editor.getSelectedText()).toBe('html ` <b>c${def()}e${f}g</b> `');
});
});
describe('.tokenizedLineForRow(row)', () => {
it('returns a shimmed TokenizedLine with tokens', () => {
const grammar = new TreeSitterGrammar(atom.grammars, jsGrammarPath, {
parser: 'tree-sitter-javascript',
scopes: {
program: 'source',
'call_expression > identifier': 'function',
property_identifier: 'property',
'call_expression > member_expression > property_identifier': 'method',
identifier: 'variable'
}
});
buffer.setText('aa.bbb = cc(d.eee());\n\n \n b');
const languageMode = new TreeSitterLanguageMode({ buffer, grammar });
buffer.setLanguageMode(languageMode);
expect(languageMode.tokenizedLineForRow(0).tokens).toEqual([
{ value: 'aa', scopes: ['source', 'variable'] },
{ value: '.', scopes: ['source'] },
{ value: 'bbb', scopes: ['source', 'property'] },
{ value: ' = ', scopes: ['source'] },
{ value: 'cc', scopes: ['source', 'function'] },
{ value: '(', scopes: ['source'] },
{ value: 'd', scopes: ['source', 'variable'] },
{ value: '.', scopes: ['source'] },
{ value: 'eee', scopes: ['source', 'method'] },
{ value: '());', scopes: ['source'] }
]);
expect(languageMode.tokenizedLineForRow(1).tokens).toEqual([]);
expect(languageMode.tokenizedLineForRow(2).tokens).toEqual([
{ value: ' ', scopes: ['source'] }
]);
expect(languageMode.tokenizedLineForRow(3).tokens).toEqual([
{ value: ' ', scopes: ['source'] },
{ value: 'b', scopes: ['source', 'variable'] }
]);
});
});
});
function nextHighlightingUpdate(languageMode) {

View File

@@ -10,21 +10,25 @@ class TokenizedLine
return unless properties?
{@openScopes, @text, @tags, @ruleStack, @tokenIterator, @grammar} = properties
{@openScopes, @text, @tags, @ruleStack, @tokenIterator, @grammar, tokens} = properties
@cachedTokens = tokens
getTokenIterator: -> @tokenIterator.reset(this)
Object.defineProperty @prototype, 'tokens', get: ->
iterator = @getTokenIterator()
tokens = []
if @cachedTokens
@cachedTokens
else
iterator = @getTokenIterator()
tokens = []
while iterator.next()
tokens.push(new Token({
value: iterator.getText()
scopes: iterator.getScopes().slice()
}))
while iterator.next()
tokens.push(new Token({
value: iterator.getText()
scopes: iterator.getScopes().slice()
}))
tokens
tokens
tokenAtBufferColumn: (bufferColumn) ->
@tokens[@tokenIndexAtBufferColumn(bufferColumn)]

View File

@@ -488,9 +488,45 @@ class TreeSitterLanguageMode {
*/
tokenizedLineForRow(row) {
const lineText = this.buffer.lineForRow(row);
const tokens = [];
const iterator = this.buildHighlightIterator();
let start = { row, column: 0 };
const scopes = iterator.seek(start, row);
while (true) {
const end = iterator.getPosition();
if (end.row > row) {
end.row = row;
end.column = lineText.length;
}
if (end.column > start.column) {
tokens.push(
new Token({
value: lineText.substring(start.column, end.column),
scopes: scopes.map(s => this.grammar.scopeNameForScopeId(s))
})
);
}
if (end.column < lineText.length) {
const closeScopeCount = iterator.getCloseScopeIds().length;
for (let i = 0; i < closeScopeCount; i++) {
scopes.pop();
}
scopes.push(...iterator.getOpenScopeIds());
start = end;
iterator.moveToSuccessor();
} else {
break;
}
}
return new TokenizedLine({
openScopes: [],
text: this.buffer.lineForRow(row),
text: lineText,
tokens,
tags: [],
ruleStack: [],
lineEnding: this.buffer.lineEndingForRow(row),