From fca182a4706711086aebf17ca41a5ddc88308e51 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Sun, 24 Feb 2013 09:55:22 -0800 Subject: [PATCH] Add initial TOML support --- src/packages/toml.tmbundle/Syntaxes/toml.cson | 46 +++++++++++++++ .../toml.tmbundle/spec/toml-spec.coffee | 58 +++++++++++++++++++ 2 files changed, 104 insertions(+) create mode 100644 src/packages/toml.tmbundle/Syntaxes/toml.cson create mode 100644 src/packages/toml.tmbundle/spec/toml-spec.coffee diff --git a/src/packages/toml.tmbundle/Syntaxes/toml.cson b/src/packages/toml.tmbundle/Syntaxes/toml.cson new file mode 100644 index 000000000..9eac5c13f --- /dev/null +++ b/src/packages/toml.tmbundle/Syntaxes/toml.cson @@ -0,0 +1,46 @@ +'name': 'TOML' +'scopeName': 'source.toml' +'fileTypes': ['toml'] +'patterns': [ + { + 'match': '(?:^\\s*)(\\[([^\\]]+)\\])' + 'captures': + '2': 'name': 'variable.keygroup.toml' + 'name': 'keygroup.toml' + } + { + 'match': '(?:^\\s*)(\\S+)\\s*=' + 'captures': + '1': 'name': 'entity.key.toml' + 'name': 'key.toml' + } + { + 'begin': '"' + 'beginCaptures': + '0': 'name': 'string.begin.toml' + 'end': '"' + 'endCaptures': + '0': 'name': 'string.end.toml' + 'name': 'string.toml' + } + { + 'match': '#.*$' + 'name': 'comment.toml' + } + { + 'match': 'true' + 'name': 'constant.language.boolean.true.toml' + } + { + 'match': 'false' + 'name': 'constant.language.boolean.false.toml' + } + { + 'match': '\\d{4}-\\d{2}-\\d{2}(T)\\d{2}:\\d{2}:\\d{2}(Z)' + 'name': 'support.date.toml' + } + { + 'match': '-?\\d+(\\.?\\d+)?' + 'name': 'constant.numeric.toml' + } +] diff --git a/src/packages/toml.tmbundle/spec/toml-spec.coffee b/src/packages/toml.tmbundle/spec/toml-spec.coffee new file mode 100644 index 000000000..cbfe8bf76 --- /dev/null +++ b/src/packages/toml.tmbundle/spec/toml-spec.coffee @@ -0,0 +1,58 @@ +TextMatePackage = require 'text-mate-package' + +describe "TOML grammar", -> + grammar = null + + beforeEach -> + spyOn(syntax, "addGrammar") + pack = new TextMatePackage(require.resolve("toml.tmbundle")) + pack.load() + grammar = pack.grammars[0] + + it "parses the grammar", -> + expect(grammar).toBeTruthy() + expect(grammar.scopeName).toBe "source.toml" + + it "tokenizes comments", -> + {tokens} = grammar.tokenizeLine("# I am a comment") + expect(tokens[0]).toEqual value: "# I am a comment", scopes: ["source.toml", "comment.toml"] + + it "tokenizes strings", -> + {tokens} = grammar.tokenizeLine('"I am a string"') + expect(tokens[0]).toEqual value: '"', scopes: ["source.toml", "string.toml", "string.begin.toml"] + expect(tokens[1]).toEqual value: 'I am a string', scopes: ["source.toml", "string.toml"] + expect(tokens[2]).toEqual value: '"', scopes: ["source.toml", "string.toml","string.end.toml"] + + it "tokenizes booleans", -> + {tokens} = grammar.tokenizeLine("true") + expect(tokens[0]).toEqual value: "true", scopes: ["source.toml", "constant.language.boolean.true.toml"] + {tokens} = grammar.tokenizeLine("false") + expect(tokens[0]).toEqual value: "false", scopes: ["source.toml", "constant.language.boolean.false.toml"] + + it "tokenizes numbers", -> + {tokens} = grammar.tokenizeLine("123") + expect(tokens[0]).toEqual value: "123", scopes: ["source.toml", "constant.numeric.toml"] + + {tokens} = grammar.tokenizeLine("-1") + expect(tokens[0]).toEqual value: "-1", scopes: ["source.toml", "constant.numeric.toml"] + + {tokens} = grammar.tokenizeLine("3.14") + expect(tokens[0]).toEqual value: "3.14", scopes: ["source.toml", "constant.numeric.toml"] + + {tokens} = grammar.tokenizeLine("-123.456") + expect(tokens[0]).toEqual value: "-123.456", scopes: ["source.toml", "constant.numeric.toml"] + + it "tokenizes dates", -> + {tokens} = grammar.tokenizeLine("1979-05-27T07:32:00Z") + expect(tokens[0]).toEqual value: "1979-05-27T07:32:00Z", scopes: ["source.toml", "support.date.toml"] + + it "tokenizes keygroups", -> + {tokens} = grammar.tokenizeLine("[keygroup]") + expect(tokens[0]).toEqual value: "[", scopes: ["source.toml", "keygroup.toml"] + expect(tokens[1]).toEqual value: "keygroup", scopes: ["source.toml", "keygroup.toml", "variable.keygroup.toml"] + expect(tokens[2]).toEqual value: "]", scopes: ["source.toml", "keygroup.toml"] + + it "tokenizes keys", -> + {tokens} = grammar.tokenizeLine("key =") + expect(tokens[0]).toEqual value: "key", scopes: ["source.toml", "key.toml", "entity.key.toml"] + expect(tokens[1]).toEqual value: " =", scopes: ["source.toml", "key.toml"]