Add initial TOML support

This commit is contained in:
Kevin Sawicki
2013-02-24 09:55:22 -08:00
parent 259e20e7cc
commit fca182a470
2 changed files with 104 additions and 0 deletions

View File

@@ -0,0 +1,46 @@
'name': 'TOML'
'scopeName': 'source.toml'
'fileTypes': ['toml']
'patterns': [
{
'match': '(?:^\\s*)(\\[([^\\]]+)\\])'
'captures':
'2': 'name': 'variable.keygroup.toml'
'name': 'keygroup.toml'
}
{
'match': '(?:^\\s*)(\\S+)\\s*='
'captures':
'1': 'name': 'entity.key.toml'
'name': 'key.toml'
}
{
'begin': '"'
'beginCaptures':
'0': 'name': 'string.begin.toml'
'end': '"'
'endCaptures':
'0': 'name': 'string.end.toml'
'name': 'string.toml'
}
{
'match': '#.*$'
'name': 'comment.toml'
}
{
'match': 'true'
'name': 'constant.language.boolean.true.toml'
}
{
'match': 'false'
'name': 'constant.language.boolean.false.toml'
}
{
'match': '\\d{4}-\\d{2}-\\d{2}(T)\\d{2}:\\d{2}:\\d{2}(Z)'
'name': 'support.date.toml'
}
{
'match': '-?\\d+(\\.?\\d+)?'
'name': 'constant.numeric.toml'
}
]

View File

@@ -0,0 +1,58 @@
TextMatePackage = require 'text-mate-package'
describe "TOML grammar", ->
grammar = null
beforeEach ->
spyOn(syntax, "addGrammar")
pack = new TextMatePackage(require.resolve("toml.tmbundle"))
pack.load()
grammar = pack.grammars[0]
it "parses the grammar", ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe "source.toml"
it "tokenizes comments", ->
{tokens} = grammar.tokenizeLine("# I am a comment")
expect(tokens[0]).toEqual value: "# I am a comment", scopes: ["source.toml", "comment.toml"]
it "tokenizes strings", ->
{tokens} = grammar.tokenizeLine('"I am a string"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.toml", "string.toml", "string.begin.toml"]
expect(tokens[1]).toEqual value: 'I am a string', scopes: ["source.toml", "string.toml"]
expect(tokens[2]).toEqual value: '"', scopes: ["source.toml", "string.toml","string.end.toml"]
it "tokenizes booleans", ->
{tokens} = grammar.tokenizeLine("true")
expect(tokens[0]).toEqual value: "true", scopes: ["source.toml", "constant.language.boolean.true.toml"]
{tokens} = grammar.tokenizeLine("false")
expect(tokens[0]).toEqual value: "false", scopes: ["source.toml", "constant.language.boolean.false.toml"]
it "tokenizes numbers", ->
{tokens} = grammar.tokenizeLine("123")
expect(tokens[0]).toEqual value: "123", scopes: ["source.toml", "constant.numeric.toml"]
{tokens} = grammar.tokenizeLine("-1")
expect(tokens[0]).toEqual value: "-1", scopes: ["source.toml", "constant.numeric.toml"]
{tokens} = grammar.tokenizeLine("3.14")
expect(tokens[0]).toEqual value: "3.14", scopes: ["source.toml", "constant.numeric.toml"]
{tokens} = grammar.tokenizeLine("-123.456")
expect(tokens[0]).toEqual value: "-123.456", scopes: ["source.toml", "constant.numeric.toml"]
it "tokenizes dates", ->
{tokens} = grammar.tokenizeLine("1979-05-27T07:32:00Z")
expect(tokens[0]).toEqual value: "1979-05-27T07:32:00Z", scopes: ["source.toml", "support.date.toml"]
it "tokenizes keygroups", ->
{tokens} = grammar.tokenizeLine("[keygroup]")
expect(tokens[0]).toEqual value: "[", scopes: ["source.toml", "keygroup.toml"]
expect(tokens[1]).toEqual value: "keygroup", scopes: ["source.toml", "keygroup.toml", "variable.keygroup.toml"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.toml", "keygroup.toml"]
it "tokenizes keys", ->
{tokens} = grammar.tokenizeLine("key =")
expect(tokens[0]).toEqual value: "key", scopes: ["source.toml", "key.toml", "entity.key.toml"]
expect(tokens[1]).toEqual value: " =", scopes: ["source.toml", "key.toml"]