mirror of
https://github.com/atom/atom.git
synced 2026-01-25 06:48:28 -05:00
Merge pull request #16299 from atom/mb-tree-sitter-parsers
Allow Tree-sitter parsers to be used for syntax highlighting and code folding
This commit is contained in:
@@ -133,6 +133,8 @@
|
||||
'cmd-ctrl-left': 'editor:move-selection-left'
|
||||
'cmd-ctrl-right': 'editor:move-selection-right'
|
||||
'cmd-shift-V': 'editor:paste-without-reformatting'
|
||||
'alt-up': 'editor:select-larger-syntax-node'
|
||||
'alt-down': 'editor:select-smaller-syntax-node'
|
||||
|
||||
# Emacs
|
||||
'alt-f': 'editor:move-to-end-of-word'
|
||||
|
||||
13
package.json
13
package.json
@@ -71,6 +71,7 @@
|
||||
"sinon": "1.17.4",
|
||||
"temp": "^0.8.3",
|
||||
"text-buffer": "13.10.1",
|
||||
"tree-sitter": "^0.8.4",
|
||||
"typescript-simple": "1.0.0",
|
||||
"underscore-plus": "^1.6.6",
|
||||
"winreg": "^1.2.1",
|
||||
@@ -136,18 +137,18 @@
|
||||
"welcome": "0.36.6",
|
||||
"whitespace": "0.37.5",
|
||||
"wrap-guide": "0.40.3",
|
||||
"language-c": "0.58.1",
|
||||
"language-c": "0.59.0-3",
|
||||
"language-clojure": "0.22.5",
|
||||
"language-coffee-script": "0.49.3",
|
||||
"language-csharp": "0.14.4",
|
||||
"language-css": "0.42.8",
|
||||
"language-gfm": "0.90.3",
|
||||
"language-git": "0.19.1",
|
||||
"language-go": "0.44.4",
|
||||
"language-go": "0.45.0-4",
|
||||
"language-html": "0.48.5",
|
||||
"language-hyperlink": "0.16.3",
|
||||
"language-java": "0.27.6",
|
||||
"language-javascript": "0.127.7",
|
||||
"language-javascript": "0.128.0-4",
|
||||
"language-json": "0.19.1",
|
||||
"language-less": "0.34.1",
|
||||
"language-make": "0.22.3",
|
||||
@@ -156,17 +157,17 @@
|
||||
"language-perl": "0.38.1",
|
||||
"language-php": "0.43.0",
|
||||
"language-property-list": "0.9.1",
|
||||
"language-python": "0.45.6",
|
||||
"language-python": "0.46.0-2",
|
||||
"language-ruby": "0.71.4",
|
||||
"language-ruby-on-rails": "0.25.3",
|
||||
"language-sass": "0.61.4",
|
||||
"language-shellscript": "0.25.4",
|
||||
"language-shellscript": "0.26.0-3",
|
||||
"language-source": "0.9.0",
|
||||
"language-sql": "0.25.9",
|
||||
"language-text": "0.7.3",
|
||||
"language-todo": "0.29.3",
|
||||
"language-toml": "0.18.1",
|
||||
"language-typescript": "0.2.3",
|
||||
"language-typescript": "0.3.0-3",
|
||||
"language-xml": "0.35.2",
|
||||
"language-yaml": "0.31.1"
|
||||
},
|
||||
|
||||
@@ -58,7 +58,8 @@ module.exports = function (packagedAppPath) {
|
||||
relativePath === path.join('..', 'node_modules', 'spelling-manager', 'node_modules', 'natural', 'lib', 'natural', 'index.js') ||
|
||||
relativePath === path.join('..', 'node_modules', 'tar', 'tar.js') ||
|
||||
relativePath === path.join('..', 'node_modules', 'temp', 'lib', 'temp.js') ||
|
||||
relativePath === path.join('..', 'node_modules', 'tmp', 'lib', 'tmp.js')
|
||||
relativePath === path.join('..', 'node_modules', 'tmp', 'lib', 'tmp.js') ||
|
||||
relativePath === path.join('..', 'node_modules', 'tree-sitter', 'index.js')
|
||||
)
|
||||
}
|
||||
}).then((snapshotScript) => {
|
||||
|
||||
@@ -106,6 +106,15 @@ describe "Config", ->
|
||||
atom.config.set("foo.bar.baz", 1, scopeSelector: ".source.coffee", source: "some-package")
|
||||
expect(atom.config.get("foo.bar.baz", scope: [".source.coffee"])).toBe 100
|
||||
|
||||
describe "when the first component of the scope descriptor matches a legacy scope alias", ->
|
||||
it "falls back to properties defined for the legacy scope if no value is found for the original scope descriptor", ->
|
||||
atom.config.addLegacyScopeAlias('javascript', '.source.js')
|
||||
atom.config.set('foo', 100, scopeSelector: '.source.js')
|
||||
atom.config.set('foo', 200, scopeSelector: 'javascript for_statement')
|
||||
|
||||
expect(atom.config.get('foo', scope: ['javascript', 'for_statement', 'identifier'])).toBe(200)
|
||||
expect(atom.config.get('foo', scope: ['javascript', 'function', 'identifier'])).toBe(100)
|
||||
|
||||
describe ".getAll(keyPath, {scope, sources, excludeSources})", ->
|
||||
it "reads all of the values for a given key-path", ->
|
||||
expect(atom.config.set("foo", 41)).toBe true
|
||||
@@ -130,6 +139,20 @@ describe "Config", ->
|
||||
{scopeSelector: '*', value: 40}
|
||||
]
|
||||
|
||||
describe "when the first component of the scope descriptor matches a legacy scope alias", ->
|
||||
it "includes the values defined for the legacy scope", ->
|
||||
atom.config.addLegacyScopeAlias('javascript', '.source.js')
|
||||
|
||||
expect(atom.config.set('foo', 41)).toBe true
|
||||
expect(atom.config.set('foo', 42, scopeSelector: 'javascript')).toBe true
|
||||
expect(atom.config.set('foo', 43, scopeSelector: '.source.js')).toBe true
|
||||
|
||||
expect(atom.config.getAll('foo', scope: ['javascript'])).toEqual([
|
||||
{scopeSelector: 'javascript', value: 42},
|
||||
{scopeSelector: '.js.source', value: 43},
|
||||
{scopeSelector: '*', value: 41}
|
||||
])
|
||||
|
||||
describe ".set(keyPath, value, {source, scopeSelector})", ->
|
||||
it "allows a key path's value to be written", ->
|
||||
expect(atom.config.set("foo.bar.baz", 42)).toBe true
|
||||
|
||||
1
spec/fixtures/packages/package-with-tree-sitter-grammar/grammars/fake-parser.js
vendored
Normal file
1
spec/fixtures/packages/package-with-tree-sitter-grammar/grammars/fake-parser.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
exports.isFakeTreeSitterParser = true
|
||||
14
spec/fixtures/packages/package-with-tree-sitter-grammar/grammars/some-language.cson
vendored
Normal file
14
spec/fixtures/packages/package-with-tree-sitter-grammar/grammars/some-language.cson
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: 'Some Language'
|
||||
|
||||
id: 'some-language'
|
||||
|
||||
type: 'tree-sitter'
|
||||
|
||||
parser: './fake-parser'
|
||||
|
||||
fileTypes: [
|
||||
'somelang'
|
||||
]
|
||||
|
||||
scopes:
|
||||
'class > identifier': 'entity.name.type.class'
|
||||
@@ -1,10 +1,13 @@
|
||||
const {it, fit, ffit, fffit, beforeEach, afterEach} = require('./async-spec-helpers')
|
||||
|
||||
const dedent = require('dedent')
|
||||
const path = require('path')
|
||||
const fs = require('fs-plus')
|
||||
const temp = require('temp').track()
|
||||
const TextBuffer = require('text-buffer')
|
||||
const GrammarRegistry = require('../src/grammar-registry')
|
||||
const TreeSitterGrammar = require('../src/tree-sitter-grammar')
|
||||
const FirstMate = require('first-mate')
|
||||
|
||||
describe('GrammarRegistry', () => {
|
||||
let grammarRegistry
|
||||
@@ -13,8 +16,8 @@ describe('GrammarRegistry', () => {
|
||||
grammarRegistry = new GrammarRegistry({config: atom.config})
|
||||
})
|
||||
|
||||
describe('.assignLanguageMode(buffer, languageName)', () => {
|
||||
it('assigns to the buffer a language mode with the given language name', async () => {
|
||||
describe('.assignLanguageMode(buffer, languageId)', () => {
|
||||
it('assigns to the buffer a language mode with the given language id', async () => {
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-css/grammars/css.cson'))
|
||||
|
||||
@@ -34,7 +37,7 @@ describe('GrammarRegistry', () => {
|
||||
expect(buffer.getLanguageMode().getLanguageId()).toBe('source.css')
|
||||
})
|
||||
|
||||
describe('when no languageName is passed', () => {
|
||||
describe('when no languageId is passed', () => {
|
||||
it('makes the buffer use the null grammar', () => {
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-css/grammars/css.cson'))
|
||||
|
||||
@@ -48,6 +51,36 @@ describe('GrammarRegistry', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('.grammarForId(languageId)', () => {
|
||||
it('converts the language id to a text-mate language id when `core.useTreeSitterParsers` is false', () => {
|
||||
atom.config.set('core.useTreeSitterParsers', false)
|
||||
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/tree-sitter-javascript.cson'))
|
||||
|
||||
const grammar = grammarRegistry.grammarForId('javascript')
|
||||
expect(grammar instanceof FirstMate.Grammar).toBe(true)
|
||||
expect(grammar.scopeName).toBe('source.js')
|
||||
|
||||
grammarRegistry.removeGrammar(grammar)
|
||||
expect(grammarRegistry.grammarForId('javascript')).toBe(undefined)
|
||||
})
|
||||
|
||||
it('converts the language id to a tree-sitter language id when `core.useTreeSitterParsers` is true', () => {
|
||||
atom.config.set('core.useTreeSitterParsers', true)
|
||||
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/tree-sitter-javascript.cson'))
|
||||
|
||||
const grammar = grammarRegistry.grammarForId('source.js')
|
||||
expect(grammar instanceof TreeSitterGrammar).toBe(true)
|
||||
expect(grammar.id).toBe('javascript')
|
||||
|
||||
grammarRegistry.removeGrammar(grammar)
|
||||
expect(grammarRegistry.grammarForId('source.js') instanceof FirstMate.Grammar).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('.autoAssignLanguageMode(buffer)', () => {
|
||||
it('assigns to the buffer a language mode based on the best available grammar', () => {
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
|
||||
@@ -78,7 +111,9 @@ describe('GrammarRegistry', () => {
|
||||
expect(buffer.getLanguageMode().getLanguageId()).toBe('source.c')
|
||||
})
|
||||
|
||||
it('updates the buffer\'s grammar when a more appropriate grammar is added for its path', async () => {
|
||||
it('updates the buffer\'s grammar when a more appropriate text-mate grammar is added for its path', async () => {
|
||||
atom.config.set('core.useTreeSitterParsers', false)
|
||||
|
||||
const buffer = new TextBuffer()
|
||||
expect(buffer.getLanguageMode().getLanguageId()).toBe(null)
|
||||
|
||||
@@ -87,6 +122,25 @@ describe('GrammarRegistry', () => {
|
||||
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
|
||||
expect(buffer.getLanguageMode().getLanguageId()).toBe('source.js')
|
||||
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/tree-sitter-javascript.cson'))
|
||||
expect(buffer.getLanguageMode().getLanguageId()).toBe('source.js')
|
||||
})
|
||||
|
||||
it('updates the buffer\'s grammar when a more appropriate tree-sitter grammar is added for its path', async () => {
|
||||
atom.config.set('core.useTreeSitterParsers', true)
|
||||
|
||||
const buffer = new TextBuffer()
|
||||
expect(buffer.getLanguageMode().getLanguageId()).toBe(null)
|
||||
|
||||
buffer.setPath('test.js')
|
||||
grammarRegistry.maintainLanguageMode(buffer)
|
||||
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/tree-sitter-javascript.cson'))
|
||||
expect(buffer.getLanguageMode().getLanguageId()).toBe('javascript')
|
||||
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
|
||||
expect(buffer.getLanguageMode().getLanguageId()).toBe('javascript')
|
||||
})
|
||||
|
||||
it('can be overridden by calling .assignLanguageMode', () => {
|
||||
@@ -226,6 +280,32 @@ describe('GrammarRegistry', () => {
|
||||
expect(atom.grammars.selectGrammar('/hu.git/config').name).toBe('Null Grammar')
|
||||
})
|
||||
|
||||
describe('when the grammar has a contentRegExp field', () => {
|
||||
it('favors grammars whose contentRegExp matches a prefix of the file\'s content', () => {
|
||||
atom.grammars.addGrammar({
|
||||
id: 'javascript-1',
|
||||
fileTypes: ['js']
|
||||
})
|
||||
atom.grammars.addGrammar({
|
||||
id: 'flow-javascript',
|
||||
contentRegExp: new RegExp('//.*@flow'),
|
||||
fileTypes: ['js']
|
||||
})
|
||||
atom.grammars.addGrammar({
|
||||
id: 'javascript-2',
|
||||
fileTypes: ['js']
|
||||
})
|
||||
|
||||
const selectedGrammar = atom.grammars.selectGrammar('test.js', dedent`
|
||||
// Copyright EvilCorp
|
||||
// @flow
|
||||
|
||||
module.exports = function () { return 1 + 1 }
|
||||
`)
|
||||
expect(selectedGrammar.id).toBe('flow-javascript')
|
||||
})
|
||||
})
|
||||
|
||||
it("uses the filePath's shebang line if the grammar cannot be determined by the extension or basename", async () => {
|
||||
await atom.packages.activatePackage('language-javascript')
|
||||
await atom.packages.activatePackage('language-ruby')
|
||||
@@ -335,14 +415,38 @@ describe('GrammarRegistry', () => {
|
||||
await atom.packages.activatePackage('language-javascript')
|
||||
expect(atom.grammars.selectGrammar('foo.rb', '#!/usr/bin/env node').scopeName).toBe('source.ruby')
|
||||
})
|
||||
|
||||
describe('tree-sitter vs text-mate', () => {
|
||||
it('favors a text-mate grammar over a tree-sitter grammar when `core.useTreeSitterParsers` is false', () => {
|
||||
atom.config.set('core.useTreeSitterParsers', false)
|
||||
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/tree-sitter-javascript.cson'))
|
||||
|
||||
const grammar = grammarRegistry.selectGrammar('test.js')
|
||||
expect(grammar.scopeName).toBe('source.js')
|
||||
expect(grammar instanceof FirstMate.Grammar).toBe(true)
|
||||
})
|
||||
|
||||
it('favors a tree-sitter grammar over a text-mate grammar when `core.useTreeSitterParsers` is true', () => {
|
||||
atom.config.set('core.useTreeSitterParsers', true)
|
||||
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
|
||||
grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/tree-sitter-javascript.cson'))
|
||||
|
||||
const grammar = grammarRegistry.selectGrammar('test.js')
|
||||
expect(grammar.id).toBe('javascript')
|
||||
expect(grammar instanceof TreeSitterGrammar).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('.removeGrammar(grammar)', () => {
|
||||
it("removes the grammar, so it won't be returned by selectGrammar", async () => {
|
||||
await atom.packages.activatePackage('language-javascript')
|
||||
const grammar = atom.grammars.selectGrammar('foo.js')
|
||||
await atom.packages.activatePackage('language-css')
|
||||
const grammar = atom.grammars.selectGrammar('foo.css')
|
||||
atom.grammars.removeGrammar(grammar)
|
||||
expect(atom.grammars.selectGrammar('foo.js').name).not.toBe(grammar.name)
|
||||
expect(atom.grammars.selectGrammar('foo.css').name).not.toBe(grammar.name)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -1030,6 +1030,13 @@ describe('PackageManager', () => {
|
||||
expect(atom.grammars.selectGrammar('a.alot').name).toBe('Alot')
|
||||
expect(atom.grammars.selectGrammar('a.alittle').name).toBe('Alittle')
|
||||
})
|
||||
|
||||
it('loads any tree-sitter grammars defined in the package', async () => {
|
||||
await atom.packages.activatePackage('package-with-tree-sitter-grammar')
|
||||
const grammar = atom.grammars.selectGrammar('test.somelang')
|
||||
expect(grammar.name).toBe('Some Language')
|
||||
expect(grammar.languageModule.isFakeTreeSitterParser).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('scoped-property loading', () => {
|
||||
|
||||
@@ -111,7 +111,8 @@ beforeEach ->
|
||||
new CompositeDisposable(
|
||||
@emitter.on("did-tokenize", callback),
|
||||
@onDidChangeGrammar =>
|
||||
if @buffer.getLanguageMode().tokenizeInBackground.originalValue
|
||||
languageMode = @buffer.getLanguageMode()
|
||||
if languageMode.tokenizeInBackground?.originalValue
|
||||
callback()
|
||||
)
|
||||
|
||||
|
||||
77
spec/syntax-scope-map-spec.js
Normal file
77
spec/syntax-scope-map-spec.js
Normal file
@@ -0,0 +1,77 @@
|
||||
const SyntaxScopeMap = require('../src/syntax-scope-map')
|
||||
|
||||
describe('SyntaxScopeMap', () => {
|
||||
it('can match immediate child selectors', () => {
|
||||
const map = new SyntaxScopeMap({
|
||||
'a > b > c': 'x',
|
||||
'b > c': 'y',
|
||||
'c': 'z'
|
||||
})
|
||||
|
||||
expect(map.get(['a', 'b', 'c'], [0, 0, 0])).toBe('x')
|
||||
expect(map.get(['d', 'b', 'c'], [0, 0, 0])).toBe('y')
|
||||
expect(map.get(['d', 'e', 'c'], [0, 0, 0])).toBe('z')
|
||||
expect(map.get(['e', 'c'], [0, 0, 0])).toBe('z')
|
||||
expect(map.get(['c'], [0, 0, 0])).toBe('z')
|
||||
expect(map.get(['d'], [0, 0, 0])).toBe(undefined)
|
||||
})
|
||||
|
||||
it('can match :nth-child pseudo-selectors on leaves', () => {
|
||||
const map = new SyntaxScopeMap({
|
||||
'a > b': 'w',
|
||||
'a > b:nth-child(1)': 'x',
|
||||
'b': 'y',
|
||||
'b:nth-child(2)': 'z'
|
||||
})
|
||||
|
||||
expect(map.get(['a', 'b'], [0, 0])).toBe('w')
|
||||
expect(map.get(['a', 'b'], [0, 1])).toBe('x')
|
||||
expect(map.get(['a', 'b'], [0, 2])).toBe('w')
|
||||
expect(map.get(['b'], [0])).toBe('y')
|
||||
expect(map.get(['b'], [1])).toBe('y')
|
||||
expect(map.get(['b'], [2])).toBe('z')
|
||||
})
|
||||
|
||||
it('can match :nth-child pseudo-selectors on interior nodes', () => {
|
||||
const map = new SyntaxScopeMap({
|
||||
'b:nth-child(1) > c': 'w',
|
||||
'a > b > c': 'x',
|
||||
'a > b:nth-child(2) > c': 'y'
|
||||
})
|
||||
|
||||
expect(map.get(['b', 'c'], [0, 0])).toBe(undefined)
|
||||
expect(map.get(['b', 'c'], [1, 0])).toBe('w')
|
||||
expect(map.get(['a', 'b', 'c'], [1, 0, 0])).toBe('x')
|
||||
expect(map.get(['a', 'b', 'c'], [1, 2, 0])).toBe('y')
|
||||
})
|
||||
|
||||
it('allows anonymous tokens to be referred to by their string value', () => {
|
||||
const map = new SyntaxScopeMap({
|
||||
'"b"': 'w',
|
||||
'a > "b"': 'x',
|
||||
'a > "b":nth-child(1)': 'y'
|
||||
})
|
||||
|
||||
expect(map.get(['b'], [0], true)).toBe(undefined)
|
||||
expect(map.get(['b'], [0], false)).toBe('w')
|
||||
expect(map.get(['a', 'b'], [0, 0], false)).toBe('x')
|
||||
expect(map.get(['a', 'b'], [0, 1], false)).toBe('y')
|
||||
})
|
||||
|
||||
it('supports the wildcard selector', () => {
|
||||
const map = new SyntaxScopeMap({
|
||||
'*': 'w',
|
||||
'a > *': 'x',
|
||||
'a > *:nth-child(1)': 'y',
|
||||
'a > *:nth-child(1) > b': 'z'
|
||||
})
|
||||
|
||||
expect(map.get(['b'], [0])).toBe('w')
|
||||
expect(map.get(['c'], [0])).toBe('w')
|
||||
expect(map.get(['a', 'b'], [0, 0])).toBe('x')
|
||||
expect(map.get(['a', 'b'], [0, 1])).toBe('y')
|
||||
expect(map.get(['a', 'c'], [0, 1])).toBe('y')
|
||||
expect(map.get(['a', 'c', 'b'], [0, 1, 1])).toBe('z')
|
||||
expect(map.get(['a', 'c', 'b'], [0, 2, 1])).toBe('w')
|
||||
})
|
||||
})
|
||||
560
spec/tree-sitter-language-mode-spec.js
Normal file
560
spec/tree-sitter-language-mode-spec.js
Normal file
@@ -0,0 +1,560 @@
|
||||
const {it, fit, ffit, fffit, beforeEach, afterEach} = require('./async-spec-helpers')
|
||||
|
||||
const dedent = require('dedent')
|
||||
const TextBuffer = require('text-buffer')
|
||||
const {Point} = TextBuffer
|
||||
const TextEditor = require('../src/text-editor')
|
||||
const TreeSitterGrammar = require('../src/tree-sitter-grammar')
|
||||
const TreeSitterLanguageMode = require('../src/tree-sitter-language-mode')
|
||||
|
||||
const cGrammarPath = require.resolve('language-c/grammars/tree-sitter-c.cson')
|
||||
const pythonGrammarPath = require.resolve('language-python/grammars/tree-sitter-python.cson')
|
||||
const jsGrammarPath = require.resolve('language-javascript/grammars/tree-sitter-javascript.cson')
|
||||
|
||||
describe('TreeSitterLanguageMode', () => {
|
||||
let editor, buffer
|
||||
|
||||
beforeEach(async () => {
|
||||
editor = await atom.workspace.open('')
|
||||
buffer = editor.getBuffer()
|
||||
})
|
||||
|
||||
describe('highlighting', () => {
|
||||
it('applies the most specific scope mapping to each node in the syntax tree', () => {
|
||||
const grammar = new TreeSitterGrammar(atom.grammars, jsGrammarPath, {
|
||||
parser: 'tree-sitter-javascript',
|
||||
scopes: {
|
||||
'program': 'source',
|
||||
'call_expression > identifier': 'function',
|
||||
'property_identifier': 'property',
|
||||
'call_expression > member_expression > property_identifier': 'method'
|
||||
}
|
||||
})
|
||||
|
||||
buffer.setLanguageMode(new TreeSitterLanguageMode({buffer, grammar}))
|
||||
buffer.setText('aa.bbb = cc(d.eee());')
|
||||
expectTokensToEqual(editor, [[
|
||||
{text: 'aa.', scopes: ['source']},
|
||||
{text: 'bbb', scopes: ['source', 'property']},
|
||||
{text: ' = ', scopes: ['source']},
|
||||
{text: 'cc', scopes: ['source', 'function']},
|
||||
{text: '(d.', scopes: ['source']},
|
||||
{text: 'eee', scopes: ['source', 'method']},
|
||||
{text: '());', scopes: ['source']}
|
||||
]])
|
||||
})
|
||||
|
||||
it('can start or end multiple scopes at the same position', () => {
|
||||
const grammar = new TreeSitterGrammar(atom.grammars, jsGrammarPath, {
|
||||
parser: 'tree-sitter-javascript',
|
||||
scopes: {
|
||||
'program': 'source',
|
||||
'call_expression': 'call',
|
||||
'member_expression': 'member',
|
||||
'identifier': 'variable',
|
||||
'"("': 'open-paren',
|
||||
'")"': 'close-paren',
|
||||
}
|
||||
})
|
||||
|
||||
buffer.setLanguageMode(new TreeSitterLanguageMode({buffer, grammar}))
|
||||
buffer.setText('a = bb.ccc();')
|
||||
expectTokensToEqual(editor, [[
|
||||
{text: 'a', scopes: ['source', 'variable']},
|
||||
{text: ' = ', scopes: ['source']},
|
||||
{text: 'bb', scopes: ['source', 'call', 'member', 'variable']},
|
||||
{text: '.ccc', scopes: ['source', 'call', 'member']},
|
||||
{text: '(', scopes: ['source', 'call', 'open-paren']},
|
||||
{text: ')', scopes: ['source', 'call', 'close-paren']},
|
||||
{text: ';', scopes: ['source']}
|
||||
]])
|
||||
})
|
||||
|
||||
it('can resume highlighting on a line that starts with whitespace', () => {
|
||||
const grammar = new TreeSitterGrammar(atom.grammars, jsGrammarPath, {
|
||||
parser: 'tree-sitter-javascript',
|
||||
scopes: {
|
||||
'call_expression > member_expression > property_identifier': 'function',
|
||||
'property_identifier': 'member',
|
||||
'identifier': 'variable'
|
||||
}
|
||||
})
|
||||
|
||||
buffer.setLanguageMode(new TreeSitterLanguageMode({buffer, grammar}))
|
||||
buffer.setText('a\n .b();')
|
||||
expectTokensToEqual(editor, [
|
||||
[
|
||||
{text: 'a', scopes: ['variable']},
|
||||
],
|
||||
[
|
||||
{text: ' ', scopes: ['whitespace']},
|
||||
{text: '.', scopes: []},
|
||||
{text: 'b', scopes: ['function']},
|
||||
{text: '();', scopes: []}
|
||||
]
|
||||
])
|
||||
})
|
||||
|
||||
it('correctly skips over tokens with zero size', () => {
|
||||
const grammar = new TreeSitterGrammar(atom.grammars, jsGrammarPath, {
|
||||
parser: 'tree-sitter-c',
|
||||
scopes: {
|
||||
'primitive_type': 'type',
|
||||
'identifier': 'variable',
|
||||
}
|
||||
})
|
||||
|
||||
const languageMode = new TreeSitterLanguageMode({buffer, grammar})
|
||||
buffer.setLanguageMode(languageMode)
|
||||
buffer.setText('int main() {\n int a\n int b;\n}');
|
||||
|
||||
editor.screenLineForScreenRow(0)
|
||||
expect(
|
||||
languageMode.document.rootNode.descendantForPosition(Point(1, 2), Point(1, 6)).toString()
|
||||
).toBe('(declaration (primitive_type) (identifier) (MISSING))')
|
||||
|
||||
expectTokensToEqual(editor, [
|
||||
[
|
||||
{text: 'int', scopes: ['type']},
|
||||
{text: ' ', scopes: []},
|
||||
{text: 'main', scopes: ['variable']},
|
||||
{text: '() {', scopes: []}
|
||||
],
|
||||
[
|
||||
{text: ' ', scopes: ['whitespace']},
|
||||
{text: 'int', scopes: ['type']},
|
||||
{text: ' ', scopes: []},
|
||||
{text: 'a', scopes: ['variable']}
|
||||
],
|
||||
[
|
||||
{text: ' ', scopes: ['whitespace']},
|
||||
{text: 'int', scopes: ['type']},
|
||||
{text: ' ', scopes: []},
|
||||
{text: 'b', scopes: ['variable']},
|
||||
{text: ';', scopes: []}
|
||||
],
|
||||
[
|
||||
{text: '}', scopes: []}
|
||||
]
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('folding', () => {
|
||||
beforeEach(() => {
|
||||
editor.displayLayer.reset({foldCharacter: '…'})
|
||||
})
|
||||
|
||||
it('can fold nodes that start and end with specified tokens', () => {
|
||||
const grammar = new TreeSitterGrammar(atom.grammars, jsGrammarPath, {
|
||||
parser: 'tree-sitter-javascript',
|
||||
folds: [
|
||||
{
|
||||
start: {type: '{', index: 0},
|
||||
end: {type: '}', index: -1}
|
||||
},
|
||||
{
|
||||
start: {type: '(', index: 0},
|
||||
end: {type: ')', index: -1}
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
buffer.setLanguageMode(new TreeSitterLanguageMode({buffer, grammar}))
|
||||
buffer.setText(dedent `
|
||||
module.exports =
|
||||
class A {
|
||||
getB (c,
|
||||
d,
|
||||
e) {
|
||||
return this.f(g)
|
||||
}
|
||||
}
|
||||
`)
|
||||
|
||||
editor.screenLineForScreenRow(0)
|
||||
|
||||
expect(editor.isFoldableAtBufferRow(0)).toBe(false)
|
||||
expect(editor.isFoldableAtBufferRow(1)).toBe(true)
|
||||
expect(editor.isFoldableAtBufferRow(2)).toBe(true)
|
||||
expect(editor.isFoldableAtBufferRow(3)).toBe(false)
|
||||
expect(editor.isFoldableAtBufferRow(4)).toBe(true)
|
||||
expect(editor.isFoldableAtBufferRow(5)).toBe(false)
|
||||
|
||||
editor.foldBufferRow(2)
|
||||
expect(getDisplayText(editor)).toBe(dedent `
|
||||
module.exports =
|
||||
class A {
|
||||
getB (…) {
|
||||
return this.f(g)
|
||||
}
|
||||
}
|
||||
`)
|
||||
|
||||
editor.foldBufferRow(4)
|
||||
expect(getDisplayText(editor)).toBe(dedent `
|
||||
module.exports =
|
||||
class A {
|
||||
getB (…) {…}
|
||||
}
|
||||
`)
|
||||
})
|
||||
|
||||
it('can fold nodes of specified types', () => {
|
||||
const grammar = new TreeSitterGrammar(atom.grammars, jsGrammarPath, {
|
||||
parser: 'tree-sitter-javascript',
|
||||
folds: [
|
||||
// Start the fold after the first child (the opening tag) and end it at the last child
|
||||
// (the closing tag).
|
||||
{
|
||||
type: 'jsx_element',
|
||||
start: {index: 0},
|
||||
end: {index: -1}
|
||||
},
|
||||
|
||||
// End the fold at the *second* to last child of the self-closing tag: the `/`.
|
||||
{
|
||||
type: 'jsx_self_closing_element',
|
||||
start: {index: 1},
|
||||
end: {index: -2}
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
buffer.setLanguageMode(new TreeSitterLanguageMode({buffer, grammar}))
|
||||
buffer.setText(dedent `
|
||||
const element1 = <Element
|
||||
className='submit'
|
||||
id='something' />
|
||||
|
||||
const element2 = <Element>
|
||||
<span>hello</span>
|
||||
<span>world</span>
|
||||
</Element>
|
||||
`)
|
||||
|
||||
editor.screenLineForScreenRow(0)
|
||||
|
||||
expect(editor.isFoldableAtBufferRow(0)).toBe(true)
|
||||
expect(editor.isFoldableAtBufferRow(1)).toBe(false)
|
||||
expect(editor.isFoldableAtBufferRow(2)).toBe(false)
|
||||
expect(editor.isFoldableAtBufferRow(3)).toBe(false)
|
||||
expect(editor.isFoldableAtBufferRow(4)).toBe(true)
|
||||
expect(editor.isFoldableAtBufferRow(5)).toBe(false)
|
||||
|
||||
editor.foldBufferRow(0)
|
||||
expect(getDisplayText(editor)).toBe(dedent `
|
||||
const element1 = <Element…/>
|
||||
|
||||
const element2 = <Element>
|
||||
<span>hello</span>
|
||||
<span>world</span>
|
||||
</Element>
|
||||
`)
|
||||
|
||||
editor.foldBufferRow(4)
|
||||
expect(getDisplayText(editor)).toBe(dedent `
|
||||
const element1 = <Element…/>
|
||||
|
||||
const element2 = <Element>…
|
||||
</Element>
|
||||
`)
|
||||
})
|
||||
|
||||
it('can fold entire nodes when no start or end parameters are specified', () => {
|
||||
const grammar = new TreeSitterGrammar(atom.grammars, jsGrammarPath, {
|
||||
parser: 'tree-sitter-javascript',
|
||||
folds: [
|
||||
// By default, for a node with no children, folds are started at the *end* of the first
|
||||
// line of a node, and ended at the *beginning* of the last line.
|
||||
{type: 'comment'}
|
||||
]
|
||||
})
|
||||
|
||||
buffer.setLanguageMode(new TreeSitterLanguageMode({buffer, grammar}))
|
||||
buffer.setText(dedent `
|
||||
/**
|
||||
* Important
|
||||
*/
|
||||
const x = 1 /*
|
||||
Also important
|
||||
*/
|
||||
`)
|
||||
|
||||
editor.screenLineForScreenRow(0)
|
||||
|
||||
expect(editor.isFoldableAtBufferRow(0)).toBe(true)
|
||||
expect(editor.isFoldableAtBufferRow(1)).toBe(false)
|
||||
expect(editor.isFoldableAtBufferRow(2)).toBe(false)
|
||||
expect(editor.isFoldableAtBufferRow(3)).toBe(true)
|
||||
expect(editor.isFoldableAtBufferRow(4)).toBe(false)
|
||||
|
||||
editor.foldBufferRow(0)
|
||||
expect(getDisplayText(editor)).toBe(dedent `
|
||||
/**… */
|
||||
const x = 1 /*
|
||||
Also important
|
||||
*/
|
||||
`)
|
||||
|
||||
editor.foldBufferRow(3)
|
||||
expect(getDisplayText(editor)).toBe(dedent `
|
||||
/**… */
|
||||
const x = 1 /*…*/
|
||||
`)
|
||||
})
|
||||
|
||||
it('tries each folding strategy for a given node in the order specified', () => {
|
||||
const grammar = new TreeSitterGrammar(atom.grammars, cGrammarPath, {
|
||||
parser: 'tree-sitter-c',
|
||||
folds: [
|
||||
// If the #ifdef has an `#else` clause, then end the fold there.
|
||||
{
|
||||
type: ['preproc_ifdef', 'preproc_elif'],
|
||||
start: {index: 1},
|
||||
end: {type: ['preproc_else', 'preproc_elif']}
|
||||
},
|
||||
|
||||
// Otherwise, end the fold at the last child - the `#endif`.
|
||||
{
|
||||
type: 'preproc_ifdef',
|
||||
start: {index: 1},
|
||||
end: {index: -1}
|
||||
},
|
||||
|
||||
// When folding an `#else` clause, the fold extends to the end of the clause.
|
||||
{
|
||||
type: 'preproc_else',
|
||||
start: {index: 0}
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
buffer.setLanguageMode(new TreeSitterLanguageMode({buffer, grammar}))
|
||||
|
||||
buffer.setText(dedent `
|
||||
#ifndef FOO_H_
|
||||
#define FOO_H_
|
||||
|
||||
#ifdef _WIN32
|
||||
|
||||
#include <windows.h>
|
||||
const char *path_separator = "\\";
|
||||
|
||||
#elif defined MACOS
|
||||
|
||||
#include <carbon.h>
|
||||
const char *path_separator = "/";
|
||||
|
||||
#else
|
||||
|
||||
#include <dirent.h>
|
||||
const char *path_separator = "/";
|
||||
|
||||
#endif
|
||||
|
||||
#endif
|
||||
`)
|
||||
|
||||
editor.screenLineForScreenRow(0)
|
||||
|
||||
editor.foldBufferRow(3)
|
||||
expect(getDisplayText(editor)).toBe(dedent `
|
||||
#ifndef FOO_H_
|
||||
#define FOO_H_
|
||||
|
||||
#ifdef _WIN32…
|
||||
#elif defined MACOS
|
||||
|
||||
#include <carbon.h>
|
||||
const char *path_separator = "/";
|
||||
|
||||
#else
|
||||
|
||||
#include <dirent.h>
|
||||
const char *path_separator = "/";
|
||||
|
||||
#endif
|
||||
|
||||
#endif
|
||||
`)
|
||||
|
||||
editor.foldBufferRow(8)
|
||||
expect(getDisplayText(editor)).toBe(dedent `
|
||||
#ifndef FOO_H_
|
||||
#define FOO_H_
|
||||
|
||||
#ifdef _WIN32…
|
||||
#elif defined MACOS…
|
||||
#else
|
||||
|
||||
#include <dirent.h>
|
||||
const char *path_separator = "/";
|
||||
|
||||
#endif
|
||||
|
||||
#endif
|
||||
`)
|
||||
|
||||
editor.foldBufferRow(0)
|
||||
expect(getDisplayText(editor)).toBe(dedent `
|
||||
#ifndef FOO_H_…
|
||||
#endif
|
||||
`)
|
||||
|
||||
editor.foldAllAtIndentLevel(1)
|
||||
expect(getDisplayText(editor)).toBe(dedent `
|
||||
#ifndef FOO_H_
|
||||
#define FOO_H_
|
||||
|
||||
#ifdef _WIN32…
|
||||
#elif defined MACOS…
|
||||
#else…
|
||||
|
||||
#endif
|
||||
|
||||
#endif
|
||||
`)
|
||||
})
|
||||
|
||||
describe('when folding a node that ends with a line break', () => {
|
||||
it('ends the fold at the end of the previous line', () => {
|
||||
const grammar = new TreeSitterGrammar(atom.grammars, pythonGrammarPath, {
|
||||
parser: 'tree-sitter-python',
|
||||
folds: [
|
||||
{
|
||||
type: 'function_definition',
|
||||
start: {type: ':'}
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
buffer.setLanguageMode(new TreeSitterLanguageMode({buffer, grammar}))
|
||||
|
||||
buffer.setText(dedent `
|
||||
def ab():
|
||||
print 'a'
|
||||
print 'b'
|
||||
|
||||
def cd():
|
||||
print 'c'
|
||||
print 'd'
|
||||
`)
|
||||
|
||||
editor.screenLineForScreenRow(0)
|
||||
|
||||
editor.foldBufferRow(0)
|
||||
expect(getDisplayText(editor)).toBe(dedent `
|
||||
def ab():…
|
||||
|
||||
def cd():
|
||||
print 'c'
|
||||
print 'd'
|
||||
`)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('.scopeDescriptorForPosition', () => {
|
||||
it('returns a scope descriptor representing the given position in the syntax tree', () => {
|
||||
const grammar = new TreeSitterGrammar(atom.grammars, jsGrammarPath, {
|
||||
id: 'javascript',
|
||||
parser: 'tree-sitter-javascript'
|
||||
})
|
||||
|
||||
buffer.setLanguageMode(new TreeSitterLanguageMode({buffer, grammar}))
|
||||
|
||||
buffer.setText('foo({bar: baz});')
|
||||
|
||||
editor.screenLineForScreenRow(0)
|
||||
expect(editor.scopeDescriptorForBufferPosition({row: 0, column: 6}).getScopesArray()).toEqual([
|
||||
'javascript',
|
||||
'program',
|
||||
'expression_statement',
|
||||
'call_expression',
|
||||
'arguments',
|
||||
'object',
|
||||
'pair',
|
||||
'property_identifier'
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('TextEditor.selectLargerSyntaxNode and .selectSmallerSyntaxNode', () => {
|
||||
it('expands and contract the selection based on the syntax tree', () => {
|
||||
const grammar = new TreeSitterGrammar(atom.grammars, jsGrammarPath, {
|
||||
parser: 'tree-sitter-javascript',
|
||||
scopes: {'program': 'source'}
|
||||
})
|
||||
|
||||
buffer.setLanguageMode(new TreeSitterLanguageMode({buffer, grammar}))
|
||||
buffer.setText(dedent `
|
||||
function a (b, c, d) {
|
||||
eee.f()
|
||||
g()
|
||||
}
|
||||
`)
|
||||
|
||||
editor.screenLineForScreenRow(0)
|
||||
|
||||
editor.setCursorBufferPosition([1, 3])
|
||||
editor.selectLargerSyntaxNode()
|
||||
expect(editor.getSelectedText()).toBe('eee')
|
||||
editor.selectLargerSyntaxNode()
|
||||
expect(editor.getSelectedText()).toBe('eee.f')
|
||||
editor.selectLargerSyntaxNode()
|
||||
expect(editor.getSelectedText()).toBe('eee.f()')
|
||||
editor.selectLargerSyntaxNode()
|
||||
expect(editor.getSelectedText()).toBe('{\n eee.f()\n g()\n}')
|
||||
editor.selectLargerSyntaxNode()
|
||||
expect(editor.getSelectedText()).toBe('function a (b, c, d) {\n eee.f()\n g()\n}')
|
||||
|
||||
editor.selectSmallerSyntaxNode()
|
||||
expect(editor.getSelectedText()).toBe('{\n eee.f()\n g()\n}')
|
||||
editor.selectSmallerSyntaxNode()
|
||||
expect(editor.getSelectedText()).toBe('eee.f()')
|
||||
editor.selectSmallerSyntaxNode()
|
||||
expect(editor.getSelectedText()).toBe('eee.f')
|
||||
editor.selectSmallerSyntaxNode()
|
||||
expect(editor.getSelectedText()).toBe('eee')
|
||||
editor.selectSmallerSyntaxNode()
|
||||
expect(editor.getSelectedBufferRange()).toEqual([[1, 3], [1, 3]])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
function getDisplayText (editor) {
|
||||
return editor.displayLayer.getText()
|
||||
}
|
||||
|
||||
function expectTokensToEqual (editor, expectedTokenLines) {
|
||||
const lastRow = editor.getLastScreenRow()
|
||||
|
||||
// Assert that the correct tokens are returned regardless of which row
|
||||
// the highlighting iterator starts on.
|
||||
for (let startRow = 0; startRow <= lastRow; startRow++) {
|
||||
editor.displayLayer.clearSpatialIndex()
|
||||
editor.displayLayer.getScreenLines(startRow, Infinity)
|
||||
|
||||
const tokenLines = []
|
||||
for (let row = startRow; row <= lastRow; row++) {
|
||||
tokenLines[row] = editor.tokensForScreenRow(row).map(({text, scopes}) => ({
|
||||
text,
|
||||
scopes: scopes.map(scope => scope
|
||||
.split(' ')
|
||||
.map(className => className.slice('syntax--'.length))
|
||||
.join(' '))
|
||||
}))
|
||||
}
|
||||
|
||||
for (let row = startRow; row <= lastRow; row++) {
|
||||
const tokenLine = tokenLines[row]
|
||||
const expectedTokenLine = expectedTokenLines[row]
|
||||
|
||||
expect(tokenLine.length).toEqual(expectedTokenLine.length)
|
||||
for (let i = 0; i < tokenLine.length; i++) {
|
||||
expect(tokenLine[i]).toEqual(expectedTokenLine[i], `Token ${i}, startRow: ${startRow}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -342,6 +342,11 @@ const configSchema = {
|
||||
description: 'Emulated with Atom events'
|
||||
}
|
||||
]
|
||||
},
|
||||
useTreeSitterParsers: {
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Use the new Tree-sitter parsing system for supported languages'
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -423,6 +423,7 @@ class Config
|
||||
@configFileHasErrors = false
|
||||
@transactDepth = 0
|
||||
@pendingOperations = []
|
||||
@legacyScopeAliases = {}
|
||||
|
||||
@requestLoad = _.debounce =>
|
||||
@loadUserConfig()
|
||||
@@ -599,11 +600,22 @@ class Config
|
||||
# * `value` The value for the key-path
|
||||
getAll: (keyPath, options) ->
|
||||
{scope} = options if options?
|
||||
result = []
|
||||
|
||||
if scope?
|
||||
scopeDescriptor = ScopeDescriptor.fromObject(scope)
|
||||
result = result.concat @scopedSettingsStore.getAll(scopeDescriptor.getScopeChain(), keyPath, options)
|
||||
result = @scopedSettingsStore.getAll(
|
||||
scopeDescriptor.getScopeChain(),
|
||||
keyPath,
|
||||
options
|
||||
)
|
||||
if legacyScopeDescriptor = @getLegacyScopeDescriptor(scopeDescriptor)
|
||||
result.push(@scopedSettingsStore.getAll(
|
||||
legacyScopeDescriptor.getScopeChain(),
|
||||
keyPath,
|
||||
options
|
||||
)...)
|
||||
else
|
||||
result = []
|
||||
|
||||
if globalValue = @getRawValue(keyPath, options)
|
||||
result.push(scopeSelector: '*', value: globalValue)
|
||||
@@ -762,6 +774,12 @@ class Config
|
||||
finally
|
||||
@endTransaction()
|
||||
|
||||
addLegacyScopeAlias: (languageId, legacyScopeName) ->
|
||||
@legacyScopeAliases[languageId] = legacyScopeName
|
||||
|
||||
removeLegacyScopeAlias: (languageId) ->
|
||||
delete @legacyScopeAliases[languageId]
|
||||
|
||||
###
|
||||
Section: Internal methods used by core
|
||||
###
|
||||
@@ -1145,7 +1163,20 @@ class Config
|
||||
|
||||
getRawScopedValue: (scopeDescriptor, keyPath, options) ->
|
||||
scopeDescriptor = ScopeDescriptor.fromObject(scopeDescriptor)
|
||||
@scopedSettingsStore.getPropertyValue(scopeDescriptor.getScopeChain(), keyPath, options)
|
||||
result = @scopedSettingsStore.getPropertyValue(
|
||||
scopeDescriptor.getScopeChain(),
|
||||
keyPath,
|
||||
options
|
||||
)
|
||||
|
||||
if result?
|
||||
result
|
||||
else if legacyScopeDescriptor = @getLegacyScopeDescriptor(scopeDescriptor)
|
||||
@scopedSettingsStore.getPropertyValue(
|
||||
legacyScopeDescriptor.getScopeChain(),
|
||||
keyPath,
|
||||
options
|
||||
)
|
||||
|
||||
observeScopedKeyPath: (scope, keyPath, callback) ->
|
||||
callback(@get(keyPath, {scope}))
|
||||
@@ -1160,6 +1191,13 @@ class Config
|
||||
oldValue = newValue
|
||||
callback(event)
|
||||
|
||||
getLegacyScopeDescriptor: (scopeDescriptor) ->
|
||||
legacyAlias = @legacyScopeAliases[scopeDescriptor.scopes[0]]
|
||||
if legacyAlias
|
||||
scopes = scopeDescriptor.scopes.slice()
|
||||
scopes[0] = legacyAlias
|
||||
new ScopeDescriptor({scopes})
|
||||
|
||||
# Base schema enforcers. These will coerce raw input into the specified type,
|
||||
# and will throw an error when the value cannot be coerced. Throwing the error
|
||||
# will indicate that the value should not be set.
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
const _ = require('underscore-plus')
|
||||
const Grim = require('grim')
|
||||
const CSON = require('season')
|
||||
const FirstMate = require('first-mate')
|
||||
const {Disposable, CompositeDisposable} = require('event-kit')
|
||||
const TextMateLanguageMode = require('./text-mate-language-mode')
|
||||
const TreeSitterLanguageMode = require('./tree-sitter-language-mode')
|
||||
const TreeSitterGrammar = require('./tree-sitter-grammar')
|
||||
const Token = require('./token')
|
||||
const fs = require('fs-plus')
|
||||
const {Point, Range} = require('text-buffer')
|
||||
|
||||
const GRAMMAR_SELECTION_RANGE = Range(Point.ZERO, Point(10, 0)).freeze()
|
||||
const GRAMMAR_TYPE_BONUS = 1000
|
||||
const PATH_SPLIT_REGEX = new RegExp('[/.]')
|
||||
|
||||
// Extended: This class holds the grammars used for tokenizing.
|
||||
@@ -24,10 +27,13 @@ class GrammarRegistry {
|
||||
|
||||
clear () {
|
||||
this.textmateRegistry.clear()
|
||||
this.treeSitterGrammarsById = {}
|
||||
if (this.subscriptions) this.subscriptions.dispose()
|
||||
this.subscriptions = new CompositeDisposable()
|
||||
this.languageOverridesByBufferId = new Map()
|
||||
this.grammarScoresByBuffer = new Map()
|
||||
this.textMateScopeNamesByTreeSitterLanguageId = new Map()
|
||||
this.treeSitterLanguageIdsByTextMateScopeName = new Map()
|
||||
|
||||
const grammarAddedOrUpdated = this.grammarAddedOrUpdated.bind(this)
|
||||
this.textmateRegistry.onDidAddGrammar(grammarAddedOrUpdated)
|
||||
@@ -102,17 +108,18 @@ class GrammarRegistry {
|
||||
// Extended: Force a {TextBuffer} to use a different grammar than the
|
||||
// one that would otherwise be selected for it.
|
||||
//
|
||||
// * `buffer` The {TextBuffer} whose gramamr will be set.
|
||||
// * `buffer` The {TextBuffer} whose grammar will be set.
|
||||
// * `languageId` The {String} id of the desired language.
|
||||
//
|
||||
// Returns a {Boolean} that indicates whether the language was successfully
|
||||
// found.
|
||||
assignLanguageMode (buffer, languageId) {
|
||||
if (buffer.getBuffer) buffer = buffer.getBuffer()
|
||||
languageId = this.normalizeLanguageId(languageId)
|
||||
|
||||
let grammar = null
|
||||
if (languageId != null) {
|
||||
grammar = this.textmateRegistry.grammarForScopeName(languageId)
|
||||
grammar = this.grammarForId(languageId)
|
||||
if (!grammar) return false
|
||||
this.languageOverridesByBufferId.set(buffer.id, languageId)
|
||||
} else {
|
||||
@@ -136,7 +143,7 @@ class GrammarRegistry {
|
||||
autoAssignLanguageMode (buffer) {
|
||||
const result = this.selectGrammarWithScore(
|
||||
buffer.getPath(),
|
||||
buffer.getTextInRange(GRAMMAR_SELECTION_RANGE)
|
||||
getGrammarSelectionContent(buffer)
|
||||
)
|
||||
this.languageOverridesByBufferId.delete(buffer.id)
|
||||
this.grammarScoresByBuffer.set(buffer, result.score)
|
||||
@@ -146,7 +153,11 @@ class GrammarRegistry {
|
||||
}
|
||||
|
||||
languageModeForGrammarAndBuffer (grammar, buffer) {
|
||||
return new TextMateLanguageMode({grammar, buffer, config: this.config})
|
||||
if (grammar instanceof TreeSitterGrammar) {
|
||||
return new TreeSitterLanguageMode({grammar, buffer, config: this.config})
|
||||
} else {
|
||||
return new TextMateLanguageMode({grammar, buffer, config: this.config})
|
||||
}
|
||||
}
|
||||
|
||||
// Extended: Select a grammar for the given file path and file contents.
|
||||
@@ -165,39 +176,44 @@ class GrammarRegistry {
|
||||
selectGrammarWithScore (filePath, fileContents) {
|
||||
let bestMatch = null
|
||||
let highestScore = -Infinity
|
||||
for (let grammar of this.textmateRegistry.grammars) {
|
||||
this.forEachGrammar(grammar => {
|
||||
const score = this.getGrammarScore(grammar, filePath, fileContents)
|
||||
if ((score > highestScore) || (bestMatch == null)) {
|
||||
if (score > highestScore || bestMatch == null) {
|
||||
bestMatch = grammar
|
||||
highestScore = score
|
||||
}
|
||||
}
|
||||
})
|
||||
return {grammar: bestMatch, score: highestScore}
|
||||
}
|
||||
|
||||
// Extended: Returns a {Number} representing how well the grammar matches the
|
||||
// `filePath` and `contents`.
|
||||
getGrammarScore (grammar, filePath, contents) {
|
||||
if ((contents == null) && fs.isFileSync(filePath)) {
|
||||
if (contents == null && fs.isFileSync(filePath)) {
|
||||
contents = fs.readFileSync(filePath, 'utf8')
|
||||
}
|
||||
|
||||
let score = this.getGrammarPathScore(grammar, filePath)
|
||||
if ((score > 0) && !grammar.bundledPackage) {
|
||||
if (score > 0 && !grammar.bundledPackage) {
|
||||
score += 0.125
|
||||
}
|
||||
if (this.grammarMatchesContents(grammar, contents)) {
|
||||
score += 0.25
|
||||
}
|
||||
|
||||
if (score > 0 && this.isGrammarPreferredType(grammar)) {
|
||||
score += GRAMMAR_TYPE_BONUS
|
||||
}
|
||||
|
||||
return score
|
||||
}
|
||||
|
||||
getGrammarPathScore (grammar, filePath) {
|
||||
if (!filePath) { return -1 }
|
||||
if (!filePath) return -1
|
||||
if (process.platform === 'win32') { filePath = filePath.replace(/\\/g, '/') }
|
||||
|
||||
const pathComponents = filePath.toLowerCase().split(PATH_SPLIT_REGEX)
|
||||
let pathScore = -1
|
||||
let pathScore = 0
|
||||
|
||||
let customFileTypes
|
||||
if (this.config.get('core.customFileTypes')) {
|
||||
@@ -225,25 +241,48 @@ class GrammarRegistry {
|
||||
}
|
||||
|
||||
grammarMatchesContents (grammar, contents) {
|
||||
if ((contents == null) || (grammar.firstLineRegex == null)) { return false }
|
||||
if (contents == null) return false
|
||||
|
||||
let escaped = false
|
||||
let numberOfNewlinesInRegex = 0
|
||||
for (let character of grammar.firstLineRegex.source) {
|
||||
switch (character) {
|
||||
case '\\':
|
||||
escaped = !escaped
|
||||
break
|
||||
case 'n':
|
||||
if (escaped) { numberOfNewlinesInRegex++ }
|
||||
escaped = false
|
||||
break
|
||||
default:
|
||||
escaped = false
|
||||
if (grammar.contentRegExp) { // TreeSitter grammars
|
||||
return grammar.contentRegExp.test(contents)
|
||||
} else if (grammar.firstLineRegex) { // FirstMate grammars
|
||||
let escaped = false
|
||||
let numberOfNewlinesInRegex = 0
|
||||
for (let character of grammar.firstLineRegex.source) {
|
||||
switch (character) {
|
||||
case '\\':
|
||||
escaped = !escaped
|
||||
break
|
||||
case 'n':
|
||||
if (escaped) { numberOfNewlinesInRegex++ }
|
||||
escaped = false
|
||||
break
|
||||
default:
|
||||
escaped = false
|
||||
}
|
||||
}
|
||||
|
||||
const lines = contents.split('\n')
|
||||
return grammar.firstLineRegex.testSync(lines.slice(0, numberOfNewlinesInRegex + 1).join('\n'))
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
const lines = contents.split('\n')
|
||||
return grammar.firstLineRegex.testSync(lines.slice(0, numberOfNewlinesInRegex + 1).join('\n'))
|
||||
}
|
||||
|
||||
forEachGrammar (callback) {
|
||||
this.textmateRegistry.grammars.forEach(callback)
|
||||
for (let grammarId in this.treeSitterGrammarsById) {
|
||||
callback(this.treeSitterGrammarsById[grammarId])
|
||||
}
|
||||
}
|
||||
|
||||
grammarForId (languageId) {
|
||||
languageId = this.normalizeLanguageId(languageId)
|
||||
|
||||
return (
|
||||
this.textmateRegistry.grammarForScopeName(languageId) ||
|
||||
this.treeSitterGrammarsById[languageId]
|
||||
)
|
||||
}
|
||||
|
||||
// Deprecated: Get the grammar override for the given file path.
|
||||
@@ -284,6 +323,8 @@ class GrammarRegistry {
|
||||
}
|
||||
|
||||
grammarAddedOrUpdated (grammar) {
|
||||
if (grammar.scopeName && !grammar.id) grammar.id = grammar.scopeName
|
||||
|
||||
this.grammarScoresByBuffer.forEach((score, buffer) => {
|
||||
const languageMode = buffer.getLanguageMode()
|
||||
if (grammar.injectionSelector) {
|
||||
@@ -295,16 +336,11 @@ class GrammarRegistry {
|
||||
|
||||
const languageOverride = this.languageOverridesByBufferId.get(buffer.id)
|
||||
|
||||
if ((grammar.scopeName === buffer.getLanguageMode().getLanguageId() ||
|
||||
grammar.scopeName === languageOverride)) {
|
||||
if ((grammar.id === buffer.getLanguageMode().getLanguageId() ||
|
||||
grammar.id === languageOverride)) {
|
||||
buffer.setLanguageMode(this.languageModeForGrammarAndBuffer(grammar, buffer))
|
||||
} else if (!languageOverride) {
|
||||
const score = this.getGrammarScore(
|
||||
grammar,
|
||||
buffer.getPath(),
|
||||
buffer.getTextInRange(GRAMMAR_SELECTION_RANGE)
|
||||
)
|
||||
|
||||
const score = this.getGrammarScore(grammar, buffer.getPath(), getGrammarSelectionContent(buffer))
|
||||
const currentScore = this.grammarScoresByBuffer.get(buffer)
|
||||
if (currentScore == null || score > currentScore) {
|
||||
buffer.setLanguageMode(this.languageModeForGrammarAndBuffer(grammar, buffer))
|
||||
@@ -348,15 +384,35 @@ class GrammarRegistry {
|
||||
}
|
||||
|
||||
grammarForScopeName (scopeName) {
|
||||
return this.textmateRegistry.grammarForScopeName(scopeName)
|
||||
return this.grammarForId(scopeName)
|
||||
}
|
||||
|
||||
addGrammar (grammar) {
|
||||
return this.textmateRegistry.addGrammar(grammar)
|
||||
if (grammar instanceof TreeSitterGrammar) {
|
||||
this.treeSitterGrammarsById[grammar.id] = grammar
|
||||
if (grammar.legacyScopeName) {
|
||||
this.config.addLegacyScopeAlias(grammar.id, grammar.legacyScopeName)
|
||||
this.textMateScopeNamesByTreeSitterLanguageId.set(grammar.id, grammar.legacyScopeName)
|
||||
this.treeSitterLanguageIdsByTextMateScopeName.set(grammar.legacyScopeName, grammar.id)
|
||||
}
|
||||
this.grammarAddedOrUpdated(grammar)
|
||||
return new Disposable(() => this.removeGrammar(grammar))
|
||||
} else {
|
||||
return this.textmateRegistry.addGrammar(grammar)
|
||||
}
|
||||
}
|
||||
|
||||
removeGrammar (grammar) {
|
||||
return this.textmateRegistry.removeGrammar(grammar)
|
||||
if (grammar instanceof TreeSitterGrammar) {
|
||||
delete this.treeSitterGrammarsById[grammar.id]
|
||||
if (grammar.legacyScopeName) {
|
||||
this.config.removeLegacyScopeAlias(grammar.id)
|
||||
this.textMateScopeNamesByTreeSitterLanguageId.delete(grammar.id)
|
||||
this.treeSitterLanguageIdsByTextMateScopeName.delete(grammar.legacyScopeName)
|
||||
}
|
||||
} else {
|
||||
return this.textmateRegistry.removeGrammar(grammar)
|
||||
}
|
||||
}
|
||||
|
||||
removeGrammarForScopeName (scopeName) {
|
||||
@@ -370,7 +426,11 @@ class GrammarRegistry {
|
||||
// * `error` An {Error}, may be null.
|
||||
// * `grammar` A {Grammar} or null if an error occured.
|
||||
loadGrammar (grammarPath, callback) {
|
||||
return this.textmateRegistry.loadGrammar(grammarPath, callback)
|
||||
this.readGrammar(grammarPath, (error, grammar) => {
|
||||
if (error) return callback(error)
|
||||
this.addGrammar(grammar)
|
||||
callback(grammar)
|
||||
})
|
||||
}
|
||||
|
||||
// Extended: Read a grammar synchronously and add it to this registry.
|
||||
@@ -379,7 +439,9 @@ class GrammarRegistry {
|
||||
//
|
||||
// Returns a {Grammar}.
|
||||
loadGrammarSync (grammarPath) {
|
||||
return this.textmateRegistry.loadGrammarSync(grammarPath)
|
||||
const grammar = this.readGrammarSync(grammarPath)
|
||||
this.addGrammar(grammar)
|
||||
return grammar
|
||||
}
|
||||
|
||||
// Extended: Read a grammar asynchronously but don't add it to the registry.
|
||||
@@ -391,7 +453,15 @@ class GrammarRegistry {
|
||||
//
|
||||
// Returns undefined.
|
||||
readGrammar (grammarPath, callback) {
|
||||
return this.textmateRegistry.readGrammar(grammarPath, callback)
|
||||
if (!callback) callback = () => {}
|
||||
CSON.readFile(grammarPath, (error, params = {}) => {
|
||||
if (error) return callback(error)
|
||||
try {
|
||||
callback(null, this.createGrammar(grammarPath, params))
|
||||
} catch (error) {
|
||||
callback(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Extended: Read a grammar synchronously but don't add it to the registry.
|
||||
@@ -400,11 +470,18 @@ class GrammarRegistry {
|
||||
//
|
||||
// Returns a {Grammar}.
|
||||
readGrammarSync (grammarPath) {
|
||||
return this.textmateRegistry.readGrammarSync(grammarPath)
|
||||
return this.createGrammar(grammarPath, CSON.readFileSync(grammarPath) || {})
|
||||
}
|
||||
|
||||
createGrammar (grammarPath, params) {
|
||||
return this.textmateRegistry.createGrammar(grammarPath, params)
|
||||
if (params.type === 'tree-sitter') {
|
||||
return new TreeSitterGrammar(this, grammarPath, params)
|
||||
} else {
|
||||
if (typeof params.scopeName !== 'string' || params.scopeName.length === 0) {
|
||||
throw new Error(`Grammar missing required scopeName property: ${grammarPath}`)
|
||||
}
|
||||
return this.textmateRegistry.createGrammar(grammarPath, params)
|
||||
}
|
||||
}
|
||||
|
||||
// Extended: Get all the grammars in this registry.
|
||||
@@ -417,4 +494,25 @@ class GrammarRegistry {
|
||||
scopeForId (id) {
|
||||
return this.textmateRegistry.scopeForId(id)
|
||||
}
|
||||
|
||||
isGrammarPreferredType (grammar) {
|
||||
return this.config.get('core.useTreeSitterParsers')
|
||||
? grammar instanceof TreeSitterGrammar
|
||||
: grammar instanceof FirstMate.Grammar
|
||||
}
|
||||
|
||||
normalizeLanguageId (languageId) {
|
||||
if (this.config.get('core.useTreeSitterParsers')) {
|
||||
return this.treeSitterLanguageIdsByTextMateScopeName.get(languageId) || languageId
|
||||
} else {
|
||||
return this.textMateScopeNamesByTreeSitterLanguageId.get(languageId) || languageId
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getGrammarSelectionContent (buffer) {
|
||||
return buffer.getTextInRange(Range(
|
||||
Point(0, 0),
|
||||
buffer.positionForCharacterIndex(1024)
|
||||
))
|
||||
}
|
||||
|
||||
@@ -160,6 +160,8 @@ module.exports = ({commandRegistry, commandInstaller, config, notificationManage
|
||||
'editor:select-to-previous-subword-boundary': -> @selectToPreviousSubwordBoundary()
|
||||
'editor:select-to-first-character-of-line': -> @selectToFirstCharacterOfLine()
|
||||
'editor:select-line': -> @selectLinesContainingCursors()
|
||||
'editor:select-larger-syntax-node': -> @selectLargerSyntaxNode()
|
||||
'editor:select-smaller-syntax-node': -> @selectSmallerSyntaxNode()
|
||||
}),
|
||||
false
|
||||
)
|
||||
|
||||
@@ -39,11 +39,17 @@ class ScopeDescriptor
|
||||
getScopesArray: -> @scopes
|
||||
|
||||
getScopeChain: ->
|
||||
@scopes
|
||||
.map (scope) ->
|
||||
scope = ".#{scope}" unless scope[0] is '.'
|
||||
scope
|
||||
.join(' ')
|
||||
# For backward compatibility, prefix TextMate-style scope names with
|
||||
# leading dots (e.g. 'source.js' -> '.source.js').
|
||||
if @scopes[0].includes('.')
|
||||
result = ''
|
||||
for scope, i in @scopes
|
||||
result += ' ' if i > 0
|
||||
result += '.' if scope[0] isnt '.'
|
||||
result += scope
|
||||
result
|
||||
else
|
||||
@scopes.join(' ')
|
||||
|
||||
toString: ->
|
||||
@getScopeChain()
|
||||
|
||||
178
src/syntax-scope-map.js
Normal file
178
src/syntax-scope-map.js
Normal file
@@ -0,0 +1,178 @@
|
||||
const parser = require('postcss-selector-parser')
|
||||
|
||||
module.exports =
|
||||
class SyntaxScopeMap {
|
||||
constructor (scopeNamesBySelector) {
|
||||
this.namedScopeTable = {}
|
||||
this.anonymousScopeTable = {}
|
||||
for (let selector in scopeNamesBySelector) {
|
||||
this.addSelector(selector, scopeNamesBySelector[selector])
|
||||
}
|
||||
setTableDefaults(this.namedScopeTable)
|
||||
setTableDefaults(this.anonymousScopeTable)
|
||||
}
|
||||
|
||||
addSelector (selector, scopeName) {
|
||||
parser((parseResult) => {
|
||||
for (let selectorNode of parseResult.nodes) {
|
||||
let currentTable = null
|
||||
let currentIndexValue = null
|
||||
|
||||
for (let i = selectorNode.nodes.length - 1; i >= 0; i--) {
|
||||
const termNode = selectorNode.nodes[i]
|
||||
|
||||
switch (termNode.type) {
|
||||
case 'tag':
|
||||
if (!currentTable) currentTable = this.namedScopeTable
|
||||
if (!currentTable[termNode.value]) currentTable[termNode.value] = {}
|
||||
currentTable = currentTable[termNode.value]
|
||||
if (currentIndexValue != null) {
|
||||
if (!currentTable.indices) currentTable.indices = {}
|
||||
if (!currentTable.indices[currentIndexValue]) currentTable.indices[currentIndexValue] = {}
|
||||
currentTable = currentTable.indices[currentIndexValue]
|
||||
currentIndexValue = null
|
||||
}
|
||||
break
|
||||
|
||||
case 'string':
|
||||
if (!currentTable) currentTable = this.anonymousScopeTable
|
||||
const value = termNode.value.slice(1, -1)
|
||||
if (!currentTable[value]) currentTable[value] = {}
|
||||
currentTable = currentTable[value]
|
||||
if (currentIndexValue != null) {
|
||||
if (!currentTable.indices) currentTable.indices = {}
|
||||
if (!currentTable.indices[currentIndexValue]) currentTable.indices[currentIndexValue] = {}
|
||||
currentTable = currentTable.indices[currentIndexValue]
|
||||
currentIndexValue = null
|
||||
}
|
||||
break
|
||||
|
||||
case 'universal':
|
||||
if (currentTable) {
|
||||
if (!currentTable['*']) currentTable['*'] = {}
|
||||
currentTable = currentTable['*']
|
||||
} else {
|
||||
if (!this.namedScopeTable['*']) {
|
||||
this.namedScopeTable['*'] = this.anonymousScopeTable['*'] = {}
|
||||
}
|
||||
currentTable = this.namedScopeTable['*']
|
||||
}
|
||||
if (currentIndexValue != null) {
|
||||
if (!currentTable.indices) currentTable.indices = {}
|
||||
if (!currentTable.indices[currentIndexValue]) currentTable.indices[currentIndexValue] = {}
|
||||
currentTable = currentTable.indices[currentIndexValue]
|
||||
currentIndexValue = null
|
||||
}
|
||||
break
|
||||
|
||||
case 'combinator':
|
||||
if (currentIndexValue != null) {
|
||||
rejectSelector(selector)
|
||||
}
|
||||
|
||||
if (termNode.value === '>') {
|
||||
if (!currentTable.parents) currentTable.parents = {}
|
||||
currentTable = currentTable.parents
|
||||
} else {
|
||||
rejectSelector(selector)
|
||||
}
|
||||
break
|
||||
|
||||
case 'pseudo':
|
||||
if (termNode.value === ':nth-child') {
|
||||
currentIndexValue = termNode.nodes[0].nodes[0].value
|
||||
} else {
|
||||
rejectSelector(selector)
|
||||
}
|
||||
break
|
||||
|
||||
default:
|
||||
rejectSelector(selector)
|
||||
}
|
||||
}
|
||||
|
||||
currentTable.scopeName = scopeName
|
||||
}
|
||||
}).process(selector)
|
||||
}
|
||||
|
||||
get (nodeTypes, childIndices, leafIsNamed = true) {
|
||||
let result
|
||||
let i = nodeTypes.length - 1
|
||||
let currentTable = leafIsNamed
|
||||
? this.namedScopeTable[nodeTypes[i]]
|
||||
: this.anonymousScopeTable[nodeTypes[i]]
|
||||
|
||||
if (!currentTable) currentTable = this.namedScopeTable['*']
|
||||
|
||||
while (currentTable) {
|
||||
if (currentTable.indices && currentTable.indices[childIndices[i]]) {
|
||||
currentTable = currentTable.indices[childIndices[i]]
|
||||
}
|
||||
|
||||
if (currentTable.scopeName) {
|
||||
result = currentTable.scopeName
|
||||
}
|
||||
|
||||
if (i === 0) break
|
||||
i--
|
||||
currentTable = currentTable.parents && (
|
||||
currentTable.parents[nodeTypes[i]] ||
|
||||
currentTable.parents['*']
|
||||
)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
function setTableDefaults (table) {
|
||||
const defaultTypeTable = table['*']
|
||||
|
||||
for (let type in table) {
|
||||
let typeTable = table[type]
|
||||
if (typeTable === defaultTypeTable) continue
|
||||
|
||||
if (defaultTypeTable) {
|
||||
mergeTable(typeTable, defaultTypeTable)
|
||||
}
|
||||
|
||||
if (typeTable.parents) {
|
||||
setTableDefaults(typeTable.parents)
|
||||
}
|
||||
|
||||
for (let key in typeTable.indices) {
|
||||
const indexTable = typeTable.indices[key]
|
||||
mergeTable(indexTable, typeTable, false)
|
||||
if (indexTable.parents) {
|
||||
setTableDefaults(indexTable.parents)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function mergeTable (table, defaultTable, mergeIndices = true) {
|
||||
if (mergeIndices && defaultTable.indices) {
|
||||
if (!table.indices) table.indices = {}
|
||||
for (let key in defaultTable.indices) {
|
||||
if (!table.indices[key]) table.indices[key] = {}
|
||||
mergeTable(table.indices[key], defaultTable.indices[key])
|
||||
}
|
||||
}
|
||||
|
||||
if (defaultTable.parents) {
|
||||
if (!table.parents) table.parents = {}
|
||||
for (let key in defaultTable.parents) {
|
||||
if (!table.parents[key]) table.parents[key] = {}
|
||||
mergeTable(table.parents[key], defaultTable.parents[key])
|
||||
}
|
||||
}
|
||||
|
||||
if (defaultTable.scopeName && !table.scopeName) {
|
||||
table.scopeName = defaultTable.scopeName
|
||||
}
|
||||
}
|
||||
|
||||
function rejectSelector (selector) {
|
||||
throw new TypeError(`Unsupported selector '${selector}'`)
|
||||
}
|
||||
@@ -3083,6 +3083,36 @@ class TextEditor {
|
||||
return this.expandSelectionsBackward(selection => selection.selectToBeginningOfPreviousParagraph())
|
||||
}
|
||||
|
||||
// Extended: For each selection, select the syntax node that contains
|
||||
// that selection.
|
||||
selectLargerSyntaxNode () {
|
||||
const languageMode = this.buffer.getLanguageMode()
|
||||
if (!languageMode.getRangeForSyntaxNodeContainingRange) return
|
||||
|
||||
this.expandSelectionsForward(selection => {
|
||||
const currentRange = selection.getBufferRange()
|
||||
const newRange = languageMode.getRangeForSyntaxNodeContainingRange(currentRange)
|
||||
if (newRange) {
|
||||
if (!selection._rangeStack) selection._rangeStack = []
|
||||
selection._rangeStack.push(currentRange)
|
||||
selection.setBufferRange(newRange)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Extended: Undo the effect a preceding call to {::selectLargerSyntaxNode}.
|
||||
selectSmallerSyntaxNode () {
|
||||
this.expandSelectionsForward(selection => {
|
||||
if (selection._rangeStack) {
|
||||
const lastRange = selection._rangeStack[selection._rangeStack.length - 1]
|
||||
if (lastRange && selection.getBufferRange().containsRange(lastRange)) {
|
||||
selection._rangeStack.length--
|
||||
selection.setBufferRange(lastRange)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Extended: Select the range of the given marker if it is valid.
|
||||
//
|
||||
// * `marker` A {DisplayMarker}
|
||||
@@ -3869,7 +3899,7 @@ class TextEditor {
|
||||
|
||||
// Extended: Fold all foldable lines at the given indent level.
|
||||
//
|
||||
// * `level` A {Number}.
|
||||
// * `level` A {Number} starting at 0.
|
||||
foldAllAtIndentLevel (level) {
|
||||
const languageMode = this.buffer.getLanguageMode()
|
||||
const foldableRanges = (
|
||||
|
||||
@@ -74,10 +74,15 @@ class TextMateLanguageMode {
|
||||
//
|
||||
// Returns a {Number}.
|
||||
suggestedIndentForBufferRow (bufferRow, tabLength, options) {
|
||||
return this._suggestedIndentForTokenizedLineAtBufferRow(
|
||||
const line = this.buffer.lineForRow(bufferRow)
|
||||
const tokenizedLine = this.tokenizedLineForRow(bufferRow)
|
||||
const iterator = tokenizedLine.getTokenIterator()
|
||||
iterator.next()
|
||||
const scopeDescriptor = new ScopeDescriptor({scopes: iterator.getScopes()})
|
||||
return this._suggestedIndentForLineWithScopeAtBufferRow(
|
||||
bufferRow,
|
||||
this.buffer.lineForRow(bufferRow),
|
||||
this.tokenizedLineForRow(bufferRow),
|
||||
line,
|
||||
scopeDescriptor,
|
||||
tabLength,
|
||||
options
|
||||
)
|
||||
@@ -90,10 +95,14 @@ class TextMateLanguageMode {
|
||||
//
|
||||
// Returns a {Number}.
|
||||
suggestedIndentForLineAtBufferRow (bufferRow, line, tabLength) {
|
||||
return this._suggestedIndentForTokenizedLineAtBufferRow(
|
||||
const tokenizedLine = this.buildTokenizedLineForRowWithText(bufferRow, line)
|
||||
const iterator = tokenizedLine.getTokenIterator()
|
||||
iterator.next()
|
||||
const scopeDescriptor = new ScopeDescriptor({scopes: iterator.getScopes()})
|
||||
return this._suggestedIndentForLineWithScopeAtBufferRow(
|
||||
bufferRow,
|
||||
line,
|
||||
this.buildTokenizedLineForRowWithText(bufferRow, line),
|
||||
scopeDescriptor,
|
||||
tabLength
|
||||
)
|
||||
}
|
||||
@@ -111,7 +120,7 @@ class TextMateLanguageMode {
|
||||
const currentIndentLevel = this.indentLevelForLine(line, tabLength)
|
||||
if (currentIndentLevel === 0) return
|
||||
|
||||
const scopeDescriptor = this.scopeDescriptorForPosition([bufferRow, 0])
|
||||
const scopeDescriptor = this.scopeDescriptorForPosition(new Point(bufferRow, 0))
|
||||
const decreaseIndentRegex = this.decreaseIndentRegexForScopeDescriptor(scopeDescriptor)
|
||||
if (!decreaseIndentRegex) return
|
||||
|
||||
@@ -138,11 +147,7 @@ class TextMateLanguageMode {
|
||||
return desiredIndentLevel
|
||||
}
|
||||
|
||||
_suggestedIndentForTokenizedLineAtBufferRow (bufferRow, line, tokenizedLine, tabLength, options) {
|
||||
const iterator = tokenizedLine.getTokenIterator()
|
||||
iterator.next()
|
||||
const scopeDescriptor = new ScopeDescriptor({scopes: iterator.getScopes()})
|
||||
|
||||
_suggestedIndentForLineWithScopeAtBufferRow (bufferRow, line, scopeDescriptor, tabLength, options) {
|
||||
const increaseIndentRegex = this.increaseIndentRegexForScopeDescriptor(scopeDescriptor)
|
||||
const decreaseIndentRegex = this.decreaseIndentRegexForScopeDescriptor(scopeDescriptor)
|
||||
const decreaseNextIndentRegex = this.decreaseNextIndentRegexForScopeDescriptor(scopeDescriptor)
|
||||
|
||||
72
src/tree-sitter-grammar.js
Normal file
72
src/tree-sitter-grammar.js
Normal file
@@ -0,0 +1,72 @@
|
||||
const path = require('path')
|
||||
const SyntaxScopeMap = require('./syntax-scope-map')
|
||||
const Module = require('module')
|
||||
|
||||
module.exports =
|
||||
class TreeSitterGrammar {
|
||||
constructor (registry, filePath, params) {
|
||||
this.registry = registry
|
||||
this.id = params.id
|
||||
this.name = params.name
|
||||
this.legacyScopeName = params.legacyScopeName
|
||||
if (params.contentRegExp) this.contentRegExp = new RegExp(params.contentRegExp)
|
||||
|
||||
this.folds = params.folds || []
|
||||
|
||||
this.commentStrings = {
|
||||
commentStartString: params.comments && params.comments.start,
|
||||
commentEndString: params.comments && params.comments.end
|
||||
}
|
||||
|
||||
const scopeSelectors = {}
|
||||
for (const key in params.scopes || {}) {
|
||||
scopeSelectors[key] = params.scopes[key]
|
||||
.split('.')
|
||||
.map(s => `syntax--${s}`)
|
||||
.join(' ')
|
||||
}
|
||||
|
||||
this.scopeMap = new SyntaxScopeMap(scopeSelectors)
|
||||
this.fileTypes = params.fileTypes
|
||||
|
||||
// TODO - When we upgrade to a new enough version of node, use `require.resolve`
|
||||
// with the new `paths` option instead of this private API.
|
||||
const languageModulePath = Module._resolveFilename(params.parser, {
|
||||
id: filePath,
|
||||
filename: filePath,
|
||||
paths: Module._nodeModulePaths(path.dirname(filePath))
|
||||
})
|
||||
|
||||
this.languageModule = require(languageModulePath)
|
||||
this.scopesById = new Map()
|
||||
this.idsByScope = {}
|
||||
this.nextScopeId = 256 + 1
|
||||
this.registration = null
|
||||
}
|
||||
|
||||
idForScope (scope) {
|
||||
let id = this.idsByScope[scope]
|
||||
if (!id) {
|
||||
id = this.nextScopeId += 2
|
||||
this.idsByScope[scope] = id
|
||||
this.scopesById.set(id, scope)
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
classNameForScopeId (id) {
|
||||
return this.scopesById.get(id)
|
||||
}
|
||||
|
||||
get scopeName () {
|
||||
return this.id
|
||||
}
|
||||
|
||||
activate () {
|
||||
this.registration = this.registry.addGrammar(this)
|
||||
}
|
||||
|
||||
deactivate () {
|
||||
if (this.registration) this.registration.dispose()
|
||||
}
|
||||
}
|
||||
532
src/tree-sitter-language-mode.js
Normal file
532
src/tree-sitter-language-mode.js
Normal file
@@ -0,0 +1,532 @@
|
||||
const {Document} = require('tree-sitter')
|
||||
const {Point, Range, Emitter} = require('atom')
|
||||
const ScopeDescriptor = require('./scope-descriptor')
|
||||
const TokenizedLine = require('./tokenized-line')
|
||||
const TextMateLanguageMode = require('./text-mate-language-mode')
|
||||
|
||||
let nextId = 0
|
||||
|
||||
module.exports =
|
||||
class TreeSitterLanguageMode {
|
||||
constructor ({buffer, grammar, config}) {
|
||||
this.id = nextId++
|
||||
this.buffer = buffer
|
||||
this.grammar = grammar
|
||||
this.config = config
|
||||
this.document = new Document()
|
||||
this.document.setInput(new TreeSitterTextBufferInput(buffer))
|
||||
this.document.setLanguage(grammar.languageModule)
|
||||
this.document.parse()
|
||||
this.rootScopeDescriptor = new ScopeDescriptor({scopes: [this.grammar.id]})
|
||||
this.emitter = new Emitter()
|
||||
this.isFoldableCache = []
|
||||
|
||||
// TODO: Remove this once TreeSitterLanguageMode implements its own auto-indentation system. This
|
||||
// is temporarily needed in order to delegate to the TextMateLanguageMode's auto-indent system.
|
||||
this.regexesByPattern = {}
|
||||
}
|
||||
|
||||
getLanguageId () {
|
||||
return this.grammar.id
|
||||
}
|
||||
|
||||
bufferDidChange ({oldRange, newRange, oldText, newText}) {
|
||||
const startRow = oldRange.start.row
|
||||
const oldEndRow = oldRange.end.row
|
||||
const newEndRow = newRange.end.row
|
||||
this.isFoldableCache.splice(startRow, oldEndRow - startRow, ...new Array(newEndRow - startRow))
|
||||
this.document.edit({
|
||||
startIndex: this.buffer.characterIndexForPosition(oldRange.start),
|
||||
lengthRemoved: oldText.length,
|
||||
lengthAdded: newText.length,
|
||||
startPosition: oldRange.start,
|
||||
extentRemoved: oldRange.getExtent(),
|
||||
extentAdded: newRange.getExtent()
|
||||
})
|
||||
}
|
||||
|
||||
/*
|
||||
Section - Highlighting
|
||||
*/
|
||||
|
||||
buildHighlightIterator () {
|
||||
const invalidatedRanges = this.document.parse()
|
||||
for (let i = 0, n = invalidatedRanges.length; i < n; i++) {
|
||||
const range = invalidatedRanges[i]
|
||||
const startRow = range.start.row
|
||||
const endRow = range.end.row
|
||||
for (let row = startRow; row < endRow; row++) {
|
||||
this.isFoldableCache[row] = undefined
|
||||
}
|
||||
this.emitter.emit('did-change-highlighting', range)
|
||||
}
|
||||
return new TreeSitterHighlightIterator(this)
|
||||
}
|
||||
|
||||
onDidChangeHighlighting (callback) {
|
||||
return this.emitter.on('did-change-hightlighting', callback)
|
||||
}
|
||||
|
||||
classNameForScopeId (scopeId) {
|
||||
return this.grammar.classNameForScopeId(scopeId)
|
||||
}
|
||||
|
||||
/*
|
||||
Section - Commenting
|
||||
*/
|
||||
|
||||
commentStringsForPosition () {
|
||||
return this.grammar.commentStrings
|
||||
}
|
||||
|
||||
isRowCommented () {
|
||||
return false
|
||||
}
|
||||
|
||||
/*
|
||||
Section - Indentation
|
||||
*/
|
||||
|
||||
suggestedIndentForLineAtBufferRow (row, line, tabLength) {
|
||||
return this._suggestedIndentForLineWithScopeAtBufferRow(
|
||||
row,
|
||||
line,
|
||||
this.rootScopeDescriptor,
|
||||
tabLength
|
||||
)
|
||||
}
|
||||
|
||||
suggestedIndentForBufferRow (row, tabLength, options) {
|
||||
return this._suggestedIndentForLineWithScopeAtBufferRow(
|
||||
row,
|
||||
this.buffer.lineForRow(row),
|
||||
this.rootScopeDescriptor,
|
||||
tabLength,
|
||||
options
|
||||
)
|
||||
}
|
||||
|
||||
indentLevelForLine (line, tabLength = tabLength) {
|
||||
let indentLength = 0
|
||||
for (let i = 0, {length} = line; i < length; i++) {
|
||||
const char = line[i]
|
||||
if (char === '\t') {
|
||||
indentLength += tabLength - (indentLength % tabLength)
|
||||
} else if (char === ' ') {
|
||||
indentLength++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
return indentLength / tabLength
|
||||
}
|
||||
|
||||
/*
|
||||
Section - Folding
|
||||
*/
|
||||
|
||||
isFoldableAtRow (row) {
|
||||
if (this.isFoldableCache[row] != null) return this.isFoldableCache[row]
|
||||
const result = this.getFoldableRangeContainingPoint(Point(row, Infinity), 0, true) != null
|
||||
this.isFoldableCache[row] = result
|
||||
return result
|
||||
}
|
||||
|
||||
getFoldableRanges () {
|
||||
return this.getFoldableRangesAtIndentLevel(null)
|
||||
}
|
||||
|
||||
getFoldableRangesAtIndentLevel (goalLevel) {
|
||||
let result = []
|
||||
let stack = [{node: this.document.rootNode, level: 0}]
|
||||
while (stack.length > 0) {
|
||||
const {node, level} = stack.pop()
|
||||
|
||||
const range = this.getFoldableRangeForNode(node)
|
||||
if (range) {
|
||||
if (goalLevel == null || level === goalLevel) {
|
||||
let updatedExistingRange = false
|
||||
for (let i = 0, {length} = result; i < length; i++) {
|
||||
if (result[i].start.row === range.start.row &&
|
||||
result[i].end.row === range.end.row) {
|
||||
result[i] = range
|
||||
updatedExistingRange = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (!updatedExistingRange) result.push(range)
|
||||
}
|
||||
}
|
||||
|
||||
const parentStartRow = node.startPosition.row
|
||||
const parentEndRow = node.endPosition.row
|
||||
for (let children = node.namedChildren, i = 0, {length} = children; i < length; i++) {
|
||||
const child = children[i]
|
||||
const {startPosition: childStart, endPosition: childEnd} = child
|
||||
if (childEnd.row > childStart.row) {
|
||||
if (childStart.row === parentStartRow && childEnd.row === parentEndRow) {
|
||||
stack.push({node: child, level: level})
|
||||
} else {
|
||||
const childLevel = range && range.containsPoint(childStart) && range.containsPoint(childEnd)
|
||||
? level + 1
|
||||
: level
|
||||
if (childLevel <= goalLevel || goalLevel == null) {
|
||||
stack.push({node: child, level: childLevel})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result.sort((a, b) => a.start.row - b.start.row)
|
||||
}
|
||||
|
||||
getFoldableRangeContainingPoint (point, tabLength, existenceOnly = false) {
|
||||
let node = this.document.rootNode.descendantForPosition(this.buffer.clipPosition(point))
|
||||
while (node) {
|
||||
if (existenceOnly && node.startPosition.row < point.row) break
|
||||
if (node.endPosition.row > point.row) {
|
||||
const range = this.getFoldableRangeForNode(node, existenceOnly)
|
||||
if (range) return range
|
||||
}
|
||||
node = node.parent
|
||||
}
|
||||
}
|
||||
|
||||
getFoldableRangeForNode (node, existenceOnly) {
|
||||
const {children, type: nodeType} = node
|
||||
const childCount = children.length
|
||||
let childTypes
|
||||
|
||||
for (var i = 0, {length} = this.grammar.folds; i < length; i++) {
|
||||
const foldEntry = this.grammar.folds[i]
|
||||
|
||||
if (foldEntry.type) {
|
||||
if (typeof foldEntry.type === 'string') {
|
||||
if (foldEntry.type !== nodeType) continue
|
||||
} else {
|
||||
if (!foldEntry.type.includes(nodeType)) continue
|
||||
}
|
||||
}
|
||||
|
||||
let foldStart
|
||||
const startEntry = foldEntry.start
|
||||
if (startEntry) {
|
||||
if (startEntry.index != null) {
|
||||
const child = children[startEntry.index]
|
||||
if (!child || (startEntry.type && startEntry.type !== child.type)) continue
|
||||
foldStart = child.endPosition
|
||||
} else {
|
||||
if (!childTypes) childTypes = children.map(child => child.type)
|
||||
const index = typeof startEntry.type === 'string'
|
||||
? childTypes.indexOf(startEntry.type)
|
||||
: childTypes.findIndex(type => startEntry.type.includes(type))
|
||||
if (index === -1) continue
|
||||
foldStart = children[index].endPosition
|
||||
}
|
||||
} else {
|
||||
foldStart = new Point(node.startPosition.row, Infinity)
|
||||
}
|
||||
|
||||
let foldEnd
|
||||
const endEntry = foldEntry.end
|
||||
if (endEntry) {
|
||||
let foldEndNode
|
||||
if (endEntry.index != null) {
|
||||
const index = endEntry.index < 0 ? childCount + endEntry.index : endEntry.index
|
||||
foldEndNode = children[index]
|
||||
if (!foldEndNode || (endEntry.type && endEntry.type !== foldEndNode.type)) continue
|
||||
} else {
|
||||
if (!childTypes) childTypes = children.map(foldEndNode => foldEndNode.type)
|
||||
const index = typeof endEntry.type === 'string'
|
||||
? childTypes.indexOf(endEntry.type)
|
||||
: childTypes.findIndex(type => endEntry.type.includes(type))
|
||||
if (index === -1) continue
|
||||
foldEndNode = children[index]
|
||||
}
|
||||
|
||||
if (foldEndNode.endIndex - foldEndNode.startIndex > 1 && foldEndNode.startPosition.row > foldStart.row) {
|
||||
foldEnd = new Point(foldEndNode.startPosition.row - 1, Infinity)
|
||||
} else {
|
||||
foldEnd = foldEndNode.startPosition
|
||||
}
|
||||
} else {
|
||||
const {endPosition} = node
|
||||
if (endPosition.column === 0) {
|
||||
foldEnd = Point(endPosition.row - 1, Infinity)
|
||||
} else if (childCount > 0) {
|
||||
foldEnd = endPosition
|
||||
} else {
|
||||
foldEnd = Point(endPosition.row, 0)
|
||||
}
|
||||
}
|
||||
|
||||
return existenceOnly ? true : new Range(foldStart, foldEnd)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Syntax Tree APIs
|
||||
*/
|
||||
|
||||
getRangeForSyntaxNodeContainingRange (range) {
|
||||
const startIndex = this.buffer.characterIndexForPosition(range.start)
|
||||
const endIndex = this.buffer.characterIndexForPosition(range.end)
|
||||
let node = this.document.rootNode.descendantForIndex(startIndex, endIndex - 1)
|
||||
while (node && node.startIndex === startIndex && node.endIndex === endIndex) {
|
||||
node = node.parent
|
||||
}
|
||||
if (node) return new Range(node.startPosition, node.endPosition)
|
||||
}
|
||||
|
||||
/*
|
||||
Section - Backward compatibility shims
|
||||
*/
|
||||
|
||||
tokenizedLineForRow (row) {
|
||||
return new TokenizedLine({
|
||||
openScopes: [],
|
||||
text: this.buffer.lineForRow(row),
|
||||
tags: [],
|
||||
ruleStack: [],
|
||||
lineEnding: this.buffer.lineEndingForRow(row),
|
||||
tokenIterator: null,
|
||||
grammar: this.grammar
|
||||
})
|
||||
}
|
||||
|
||||
scopeDescriptorForPosition (point) {
|
||||
const result = []
|
||||
let node = this.document.rootNode.descendantForPosition(point)
|
||||
|
||||
// Don't include anonymous token types like '(' because they prevent scope chains
|
||||
// from being parsed as CSS selectors by the `slick` parser. Other css selector
|
||||
// parsers like `postcss-selector-parser` do allow arbitrary quoted strings in
|
||||
// selectors.
|
||||
if (!node.isNamed) node = node.parent
|
||||
|
||||
while (node) {
|
||||
result.push(node.type)
|
||||
node = node.parent
|
||||
}
|
||||
result.push(this.grammar.id)
|
||||
return new ScopeDescriptor({scopes: result.reverse()})
|
||||
}
|
||||
|
||||
hasTokenForSelector (scopeSelector) {
|
||||
return false
|
||||
}
|
||||
|
||||
getGrammar () {
|
||||
return this.grammar
|
||||
}
|
||||
}
|
||||
|
||||
class TreeSitterHighlightIterator {
|
||||
constructor (layer, document) {
|
||||
this.layer = layer
|
||||
|
||||
// Conceptually, the iterator represents a single position in the text. It stores this
|
||||
// position both as a character index and as a `Point`. This position corresponds to a
|
||||
// leaf node of the syntax tree, which either contains or follows the iterator's
|
||||
// textual position. The `currentNode` property represents that leaf node, and
|
||||
// `currentChildIndex` represents the child index of that leaf node within its parent.
|
||||
this.currentIndex = null
|
||||
this.currentPosition = null
|
||||
this.currentNode = null
|
||||
this.currentChildIndex = null
|
||||
|
||||
// In order to determine which selectors match its current node, the iterator maintains
|
||||
// a list of the current node's ancestors. Because the selectors can use the `:nth-child`
|
||||
// pseudo-class, each node's child index is also stored.
|
||||
this.containingNodeTypes = []
|
||||
this.containingNodeChildIndices = []
|
||||
|
||||
// At any given position, the iterator exposes the list of class names that should be
|
||||
// *ended* at its current position and the list of class names that should be *started*
|
||||
// at its current position.
|
||||
this.closeTags = []
|
||||
this.openTags = []
|
||||
}
|
||||
|
||||
seek (targetPosition) {
|
||||
const containingTags = []
|
||||
|
||||
this.closeTags.length = 0
|
||||
this.openTags.length = 0
|
||||
this.containingNodeTypes.length = 0
|
||||
this.containingNodeChildIndices.length = 0
|
||||
this.currentPosition = targetPosition
|
||||
this.currentIndex = this.layer.buffer.characterIndexForPosition(targetPosition)
|
||||
|
||||
var node = this.layer.document.rootNode
|
||||
var childIndex = -1
|
||||
var done = false
|
||||
var nodeContainsTarget = true
|
||||
do {
|
||||
this.currentNode = node
|
||||
this.currentChildIndex = childIndex
|
||||
if (!nodeContainsTarget) break
|
||||
this.containingNodeTypes.push(node.type)
|
||||
this.containingNodeChildIndices.push(childIndex)
|
||||
|
||||
const scopeName = this.currentScopeName()
|
||||
if (scopeName) {
|
||||
const id = this.layer.grammar.idForScope(scopeName)
|
||||
if (this.currentIndex === node.startIndex) {
|
||||
this.openTags.push(id)
|
||||
} else {
|
||||
containingTags.push(id)
|
||||
}
|
||||
}
|
||||
|
||||
done = true
|
||||
for (var i = 0, {children} = node, childCount = children.length; i < childCount; i++) {
|
||||
const child = children[i]
|
||||
if (child.endIndex > this.currentIndex) {
|
||||
node = child
|
||||
childIndex = i
|
||||
done = false
|
||||
if (child.startIndex > this.currentIndex) nodeContainsTarget = false
|
||||
break
|
||||
}
|
||||
}
|
||||
} while (!done)
|
||||
|
||||
return containingTags
|
||||
}
|
||||
|
||||
moveToSuccessor () {
|
||||
this.closeTags.length = 0
|
||||
this.openTags.length = 0
|
||||
|
||||
if (!this.currentNode) {
|
||||
this.currentPosition = {row: Infinity, column: Infinity}
|
||||
return false
|
||||
}
|
||||
|
||||
do {
|
||||
if (this.currentIndex < this.currentNode.startIndex) {
|
||||
this.currentIndex = this.currentNode.startIndex
|
||||
this.currentPosition = this.currentNode.startPosition
|
||||
this.pushOpenTag()
|
||||
this.descendLeft()
|
||||
} else if (this.currentIndex < this.currentNode.endIndex) {
|
||||
while (true) {
|
||||
this.currentIndex = this.currentNode.endIndex
|
||||
this.currentPosition = this.currentNode.endPosition
|
||||
this.pushCloseTag()
|
||||
|
||||
const {nextSibling} = this.currentNode
|
||||
if (nextSibling) {
|
||||
this.currentNode = nextSibling
|
||||
this.currentChildIndex++
|
||||
if (this.currentIndex === nextSibling.startIndex) {
|
||||
this.pushOpenTag()
|
||||
this.descendLeft()
|
||||
}
|
||||
break
|
||||
} else {
|
||||
this.currentNode = this.currentNode.parent
|
||||
this.currentChildIndex = last(this.containingNodeChildIndices)
|
||||
if (!this.currentNode) break
|
||||
}
|
||||
}
|
||||
} else if (this.currentNode.startIndex < this.currentNode.endIndex) {
|
||||
this.currentNode = this.currentNode.nextSibling
|
||||
if (this.currentNode) {
|
||||
this.currentChildIndex++
|
||||
this.currentPosition = this.currentNode.startPosition
|
||||
this.currentIndex = this.currentNode.startIndex
|
||||
this.pushOpenTag()
|
||||
this.descendLeft()
|
||||
}
|
||||
} else {
|
||||
this.pushCloseTag()
|
||||
this.currentNode = this.currentNode.parent
|
||||
this.currentChildIndex = last(this.containingNodeChildIndices)
|
||||
}
|
||||
} while (this.closeTags.length === 0 && this.openTags.length === 0 && this.currentNode)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
getPosition () {
|
||||
return this.currentPosition
|
||||
}
|
||||
|
||||
getCloseScopeIds () {
|
||||
return this.closeTags.slice()
|
||||
}
|
||||
|
||||
getOpenScopeIds () {
|
||||
return this.openTags.slice()
|
||||
}
|
||||
|
||||
// Private methods
|
||||
|
||||
descendLeft () {
|
||||
let child
|
||||
while ((child = this.currentNode.firstChild) && this.currentIndex === child.startIndex) {
|
||||
this.currentNode = child
|
||||
this.currentChildIndex = 0
|
||||
this.pushOpenTag()
|
||||
}
|
||||
}
|
||||
|
||||
currentScopeName () {
|
||||
return this.layer.grammar.scopeMap.get(
|
||||
this.containingNodeTypes,
|
||||
this.containingNodeChildIndices,
|
||||
this.currentNode.isNamed
|
||||
)
|
||||
}
|
||||
|
||||
pushCloseTag () {
|
||||
const scopeName = this.currentScopeName()
|
||||
if (scopeName) this.closeTags.push(this.layer.grammar.idForScope(scopeName))
|
||||
this.containingNodeTypes.pop()
|
||||
this.containingNodeChildIndices.pop()
|
||||
}
|
||||
|
||||
pushOpenTag () {
|
||||
this.containingNodeTypes.push(this.currentNode.type)
|
||||
this.containingNodeChildIndices.push(this.currentChildIndex)
|
||||
const scopeName = this.currentScopeName()
|
||||
if (scopeName) this.openTags.push(this.layer.grammar.idForScope(scopeName))
|
||||
}
|
||||
}
|
||||
|
||||
class TreeSitterTextBufferInput {
|
||||
constructor (buffer) {
|
||||
this.buffer = buffer
|
||||
this.seek(0)
|
||||
}
|
||||
|
||||
seek (characterIndex) {
|
||||
this.position = this.buffer.positionForCharacterIndex(characterIndex)
|
||||
}
|
||||
|
||||
read () {
|
||||
const endPosition = this.buffer.clipPosition(this.position.traverse({row: 1000, column: 0}))
|
||||
const text = this.buffer.getTextInRange([this.position, endPosition])
|
||||
this.position = endPosition
|
||||
return text
|
||||
}
|
||||
}
|
||||
|
||||
function last (array) {
|
||||
return array[array.length - 1]
|
||||
}
|
||||
|
||||
// TODO: Remove this once TreeSitterLanguageMode implements its own auto-indent system.
|
||||
[
|
||||
'_suggestedIndentForLineWithScopeAtBufferRow',
|
||||
'suggestedIndentForEditedBufferRow',
|
||||
'increaseIndentRegexForScopeDescriptor',
|
||||
'decreaseIndentRegexForScopeDescriptor',
|
||||
'decreaseNextIndentRegexForScopeDescriptor',
|
||||
'regexForPattern'
|
||||
].forEach(methodName => {
|
||||
module.exports.prototype[methodName] = TextMateLanguageMode.prototype[methodName]
|
||||
})
|
||||
Reference in New Issue
Block a user