From 327ac8202271e669be629efff4c815a64db46345 Mon Sep 17 00:00:00 2001 From: Maxim Sokolov Date: Mon, 18 Jan 2016 23:06:06 +0300 Subject: [PATCH 1/3] Add support for object keys --- grammars/javascript.cson | 370 +++++++++++++++++++++++++----------- spec/javascript-spec.coffee | 252 ++++++++++++++---------- 2 files changed, 414 insertions(+), 208 deletions(-) diff --git a/grammars/javascript.cson b/grammars/javascript.cson index 6659da75..ac8cb08e 100644 --- a/grammars/javascript.cson +++ b/grammars/javascript.cson @@ -16,9 +16,163 @@ 'firstLineMatch': '^#!.*\\b(node|iojs|JavaScript)' 'name': 'JavaScript' 'patterns': [ + { + 'include': '#switch_statement' + } + { + # foo: function() { … } + 'begin': '''(?x) + (\\b([a-zA-Z_?.$][\\w?.$]*)) + \\s*(:)\\s* + \\b(?:(async)(?:\\s+))? + (function\\*?) + \\s*(\\*?)\\s* + ([a-zA-Z_?$][\\w?$]*)? + \\s*(\\() + ''' + 'beginCaptures': + '1': + 'name': 'meta.object-key.js' + '2': + 'name': 'entity.name.function.js' + '3': + 'name': 'keyword.operator.assignment.js' + '4': + 'name': 'storage.modifier.js' + '5': + 'name': 'storage.type.function.js' + '6': + 'name': 'storage.type.function.js' + '7': + 'name': 'entity.name.function.js' + '8': + 'name': 'punctuation.definition.parameters.begin.js' + 'end': '(\\))' + 'endCaptures': + '1': + 'name': 'punctuation.definition.parameters.end.js' + 'name': 'meta.function.json.js' + 'patterns': [ + { + 'include': '#function-params' + } + ] + } + { + # "foo": function() { … } + 'begin': '''(?x) + ((?:((\')([^\']*?)(\'))|((")([^"]*?)(")))) + \\s*(:)\\s* + \\b(?:(async)(?:\\s+))? + (function\\*?) + \\s*(\\*?)\\s* + ([a-zA-Z_?$][\\w?$]*)? + \\s*(\\() + ''' + 'beginCaptures': + '1': + 'name': 'meta.object-key.js' + '2': + 'name': 'string.quoted.single.js' + '3': + 'name': 'punctuation.definition.string.begin.js' + '5': + 'name': 'punctuation.definition.string.end.js' + '6': + 'name': 'string.quoted.double.js' + '7': + 'name': 'punctuation.definition.string.begin.js' + '9': + 'name': 'punctuation.definition.string.end.js' + '10': + 'name': 'keyword.operator.assignment.js' + '11': + 'name': 'storage.modifier.js' + '12': + 'name': 'storage.type.function.js' + '13': + 'name': 'storage.type.function.js' + '14': + 'name': 'entity.name.function.js' + '15': + 'name': 'punctuation.definition.parameters.begin.js' + 'end': '(\\))' + 'endCaptures': + '1': + 'name': 'punctuation.definition.parameters.end.js' + 'name': 'meta.function.json.js' + 'patterns': [ + { + 'include': '#function-params' + } + ] + } + { + 'begin': '\\?' + 'beginCaptures': + '0': + 'name': 'keyword.operator.ternary.js' + 'end': ':' + 'endCaptures': + '0': + 'name': 'keyword.operator.ternary.js' + 'patterns': [ + { + 'match': '(\\w+)(?=\\s*:)' + 'captures': + '1': + 'patterns': [ + 'include': '$self' + ] + } + { + 'match': '(?:"[^"]*")|(?:\'[^\']*\')(?=\\s*:\\s*\\.*)' + 'captures': + '0': + 'patterns': [ + 'include': '#strings' + ] + } + { + 'include': '$self' + } + ] + } + { + 'match': '(?:^\\s*)([a-zA-Z_$][\\w$]*)\\s*(:)(?=\\n)' + 'captures': + '1': + 'name': 'entity.name.section.labeled-statement.js' + '2': + 'name': 'punctuation.definition.section.labeled-statement.js' + } + { + # key: value, KEY: value, 123: value, "key": "value" + 'match': '((?:"[^"]*")|(?:\'[^\']*\')|(?:[\\w$]+))(?=\\s*:\\s*\\.*)' + 'captures': + '0': + 'name': 'meta.object-key.js' + '1': + 'patterns':[ + { + 'include': '#strings' + } + { + 'include': '#numbers' + } + { + 'match': '[A-Z][A-Z0-9_$]*\\b\\$*' + 'name': 'constant.other.property.assignment.js' + } + { + 'match': '.+' + 'name': 'variable.other.property.assignment.js' + } + ] + } { 'comment': 'ES6 import' - 'begin': '(? expect(lines[2][1]).toEqual value: delim, scopes: ['source.js', scope, 'punctuation.definition.string.end.js'] describe "keywords", -> - it "tokenizes with as a keyword", -> - {tokens} = grammar.tokenizeLine('with') - expect(tokens[0]).toEqual value: 'with', scopes: ['source.js', 'keyword.control.js'] + keywords = ['await', 'break', 'catch', 'continue', 'do'] - map = - super: 'variable.language.js' - this: 'variable.language.js' - null: 'constant.language.null.js' - true: 'constant.language.boolean.true.js' - false: 'constant.language.boolean.false.js' - debugger: 'keyword.other.js' - exports: 'support.variable.js' - __filename: 'support.variable.js' - - for keyword, scope of map - do (keyword, scope) -> - it "does not tokenize `#{keyword}` when it is an object key", -> - {tokens} = grammar.tokenizeLine("#{keyword}: 1") - expect(tokens[0]).toEqual value: keyword, scopes: ['source.js'] - expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.js'] - - it "tokenizes `#{keyword}` in the middle of ternary expressions", -> - {tokens} = grammar.tokenizeLine("a ? #{keyword} : b") - expect(tokens[2]).toEqual value: ' ', scopes: ['source.js'] - expect(tokens[3]).toEqual value: keyword, scopes: ['source.js', scope] - - it "tokenizes `#{keyword}` at the end of ternary expressions", -> - {tokens} = grammar.tokenizeLine("a ? b : #{keyword}") - expect(tokens[4]).toEqual value: ' ', scopes: ['source.js'] - expect(tokens[5]).toEqual value: keyword, scopes: ['source.js', scope] + for keyword in keywords + it "tokenize `#{keyword}`", -> + {tokens} = grammar.tokenizeLine(keyword) + expect(tokens[0]).toEqual value: keyword, scopes: ['source.js', 'keyword.control.js'] describe "built-in globals", -> it "tokenizes them as support classes", -> @@ -144,21 +120,6 @@ describe "Javascript grammar", -> expect(tokens[6]).toEqual value: '/', scopes: ['source.js', 'string.regexp.js', 'punctuation.definition.string.end.js'] expect(tokens[7]).toEqual value: ']', scopes: ['source.js', 'meta.brace.square.js'] - it "tokenizes regular expressions inside ternary expressions", -> - {tokens} = grammar.tokenizeLine('a ? /b/ : /c/') - expect(tokens[ 0]).toEqual value: 'a ', scopes: ['source.js'] - expect(tokens[ 1]).toEqual value: '?', scopes: ['source.js', 'keyword.operator.js'] - expect(tokens[ 2]).toEqual value: ' ', scopes: ['source.js', 'string.regexp.js'] - expect(tokens[ 3]).toEqual value: '/', scopes: ['source.js', 'string.regexp.js', 'punctuation.definition.string.begin.js'] - expect(tokens[ 4]).toEqual value: 'b', scopes: ['source.js', 'string.regexp.js'] - expect(tokens[ 5]).toEqual value: '/', scopes: ['source.js', 'string.regexp.js', 'punctuation.definition.string.end.js'] - expect(tokens[ 6]).toEqual value: ' ', scopes: ['source.js'] - expect(tokens[ 7]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.js'] - expect(tokens[ 8]).toEqual value: ' ', scopes: ['source.js', 'string.regexp.js'] - expect(tokens[ 9]).toEqual value: '/', scopes: ['source.js', 'string.regexp.js', 'punctuation.definition.string.begin.js'] - expect(tokens[10]).toEqual value: 'c', scopes: ['source.js', 'string.regexp.js'] - expect(tokens[11]).toEqual value: '/', scopes: ['source.js', 'string.regexp.js', 'punctuation.definition.string.end.js'] - it "verifies that regular expressions have explicit count modifiers", -> source = fs.readFileSync(path.resolve(__dirname, '..', 'grammars', 'javascript.cson'), 'utf8') expect(source.search /{,/).toEqual -1 @@ -224,15 +185,6 @@ describe "Javascript grammar", -> expect(tokens[0]).toEqual value: 'i', scopes: ['source.js'] expect(tokens[1]).toEqual value: '--', scopes: ['source.js', 'keyword.operator.decrement.js'] - describe "conditional ternary", -> - it "tokenizes them", -> - {tokens} = grammar.tokenizeLine('test ? expr1 : expr2') - expect(tokens[0]).toEqual value: 'test ', scopes: ['source.js'] - expect(tokens[1]).toEqual value: '?', scopes: ['source.js', 'keyword.operator.js'] - expect(tokens[2]).toEqual value: ' expr1 ', scopes: ['source.js'] - expect(tokens[3]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.js'] - expect(tokens[4]).toEqual value: ' expr2', scopes: ['source.js'] - describe "logical", -> operators = ["&&", "||", "!"] @@ -447,19 +399,6 @@ describe "Javascript grammar", -> expect(tokens[5]).toEqual value: 'systemLanguage', scopes: ['source.js', 'support.constant.js'] expect(tokens[6]).toEqual value: ';', scopes: ['source.js', 'punctuation.terminator.statement.js'] - it "does not tokenize constants when they are object keys", -> - {tokens} = grammar.tokenizeLine('FOO: 1') - expect(tokens[0]).toEqual value: 'FOO', scopes: ['source.js'] - expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.js'] - - it "tokenizes constants in the middle of ternary expressions", -> - {tokens} = grammar.tokenizeLine('a ? FOO : b') - expect(tokens[3]).toEqual value: 'FOO', scopes: ['source.js', 'constant.other.js'] - - it "tokenizes constants at the end of ternary expressions", -> - {tokens} = grammar.tokenizeLine('a ? b : FOO') - expect(tokens[5]).toEqual value: 'FOO', scopes: ['source.js', 'constant.other.js'] - describe "ES6 string templates", -> it "tokenizes them as strings", -> {tokens} = grammar.tokenizeLine('`hey ${name}`') @@ -696,16 +635,16 @@ describe "Javascript grammar", -> {tokens} = grammar.tokenizeLine(''' export default { - 'prop': 'value' + 'key': 'value' }; ''') expect(tokens[0]).toEqual value: 'export', scopes: ['source.js', 'meta.export.js', 'keyword.control.js'] expect(tokens[2]).toEqual value: 'default', scopes: ['source.js', 'meta.export.js', 'variable.language.default.js'] expect(tokens[4]).toEqual value: '{', scopes: ['source.js', 'meta.brace.curly.js'] - expect(tokens[6]).toEqual value: "'", scopes: ['source.js', 'string.quoted.single.js', 'punctuation.definition.string.begin.js'] - expect(tokens[7]).toEqual value: "prop", scopes: ['source.js', 'string.quoted.single.js'] - expect(tokens[8]).toEqual value: "'", scopes: ['source.js', 'string.quoted.single.js', 'punctuation.definition.string.end.js'] - expect(tokens[9]).toEqual value: ":", scopes: ['source.js', 'keyword.operator.js'] + expect(tokens[6]).toEqual value: "'", scopes: ['source.js', 'meta.object-key.js', 'string.quoted.single.js', 'punctuation.definition.string.begin.js'] + expect(tokens[7]).toEqual value: "key", scopes: ['source.js', 'meta.object-key.js', 'string.quoted.single.js'] + expect(tokens[8]).toEqual value: "'", scopes: ['source.js', 'meta.object-key.js', 'string.quoted.single.js', 'punctuation.definition.string.end.js'] + expect(tokens[9]).toEqual value: ":", scopes: ['source.js', 'keyword.operator.assignment.js'] expect(tokens[11]).toEqual value: "'", scopes: ['source.js', 'string.quoted.single.js', 'punctuation.definition.string.begin.js'] expect(tokens[12]).toEqual value: "value", scopes: ['source.js', 'string.quoted.single.js'] expect(tokens[13]).toEqual value: "'", scopes: ['source.js', 'string.quoted.single.js', 'punctuation.definition.string.end.js'] @@ -797,23 +736,152 @@ describe "Javascript grammar", -> expect(tokens[0]).toEqual value: 'yield', scopes: ['source.js', 'meta.control.yield.js', 'keyword.control.js'] expect(tokens[2]).toEqual value: '*', scopes: ['source.js', 'meta.control.yield.js', 'storage.modifier.js'] - it "does not tokenize yield when it is an object key", -> - {tokens} = grammar.tokenizeLine('yield: 1') - expect(tokens[0]).toEqual value: 'yield', scopes: ['source.js'] - expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.js'] + describe "ternary expressions", -> + map = + FOO: 'constant.other.js' + super: 'variable.language.js' + this: 'variable.language.js' + null: 'constant.language.null.js' + true: 'constant.language.boolean.true.js' + false: 'constant.language.boolean.false.js' + debugger: 'keyword.other.js' + exports: 'support.variable.js' + __filename: 'support.variable.js' + + for keyword, scope of map + do (keyword, scope) -> + it "tokenizes `#{keyword}` in the middle of ternary expressions", -> + {tokens} = grammar.tokenizeLine("a ? #{keyword} : b") + expect(tokens[3]).toEqual value: keyword, scopes: ['source.js', scope] + + it "tokenizes `#{keyword}` at the end of ternary expressions", -> + {tokens} = grammar.tokenizeLine("a ? b : #{keyword}") + expect(tokens[7]).toEqual value: keyword, scopes: ['source.js', scope] + + it "tokenizes yield at the end of ternary expressions", -> + {tokens} = grammar.tokenizeLine('a ? b : yield') + expect(tokens[7]).toEqual value: 'yield', scopes: ['source.js', 'meta.control.yield.js', 'keyword.control.js'] it "tokenizes yield in the middle of ternary expressions", -> {tokens} = grammar.tokenizeLine('a ? yield : b') expect(tokens[3]).toEqual value: 'yield', scopes: ['source.js', 'meta.control.yield.js', 'keyword.control.js'] - it "tokenizes yield at the end of ternary expressions", -> - {tokens} = grammar.tokenizeLine('a ? b : yield') - expect(tokens[5]).toEqual value: 'yield', scopes: ['source.js', 'meta.control.yield.js', 'keyword.control.js'] + it "tokenizes regular expressions inside ternary expressions", -> + {tokens} = grammar.tokenizeLine('a ? /b/ : /c/') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.js'] + expect(tokens[1]).toEqual value: '?', scopes: ['source.js', 'keyword.operator.ternary.js'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.js', 'string.regexp.js'] + expect(tokens[3]).toEqual value: '/', scopes: ['source.js', 'string.regexp.js', 'punctuation.definition.string.begin.js'] + expect(tokens[4]).toEqual value: 'b', scopes: ['source.js', 'string.regexp.js'] + expect(tokens[5]).toEqual value: '/', scopes: ['source.js', 'string.regexp.js', 'punctuation.definition.string.end.js'] + expect(tokens[6]).toEqual value: ' ', scopes: ['source.js'] + expect(tokens[7]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.ternary.js'] + expect(tokens[8]).toEqual value: ' ', scopes: ['source.js', 'string.regexp.js'] + expect(tokens[9]).toEqual value: '/', scopes: ['source.js', 'string.regexp.js', 'punctuation.definition.string.begin.js'] + expect(tokens[10]).toEqual value: 'c', scopes: ['source.js', 'string.regexp.js'] + expect(tokens[11]).toEqual value: '/', scopes: ['source.js', 'string.regexp.js', 'punctuation.definition.string.end.js'] + + it "tokenizes object literals in the middle of ternary expressions", -> + {tokens} = grammar.tokenizeLine('a ? {key: value} : b') + expect(tokens[1]).toEqual value: '?', scopes: ['source.js', 'keyword.operator.ternary.js'] + expect(tokens[9]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.ternary.js'] + + describe "object literals", -> + it "tokenizes object keys", -> + {tokens} = grammar.tokenizeLine('foo: 1') + expect(tokens[0]).toEqual value: 'foo', scopes: ['source.js', 'meta.object-key.js', 'variable.other.property.assignment.js'] + expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.assignment.js'] + + {tokens} = grammar.tokenizeLine('$abc$: 1') + expect(tokens[0]).toEqual value: '$abc$', scopes: ['source.js', 'meta.object-key.js', 'variable.other.property.assignment.js'] + expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.assignment.js'] + + {tokens} = grammar.tokenizeLine('0abc: 1') + expect(tokens[0]).toEqual value: '0abc', scopes: ['source.js', 'meta.object-key.js', 'variable.other.property.assignment.js'] + expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.assignment.js'] + + {tokens} = grammar.tokenizeLine('"key": 1') + expect(tokens[0]).toEqual value: '"', scopes: ['source.js', 'meta.object-key.js', 'string.quoted.double.js', 'punctuation.definition.string.begin.js'] + expect(tokens[1]).toEqual value: 'key', scopes: ['source.js', 'meta.object-key.js', 'string.quoted.double.js'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.js', 'meta.object-key.js', 'string.quoted.double.js', 'punctuation.definition.string.end.js'] + expect(tokens[3]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.assignment.js'] + + it "tokenizes numbers when they are object keys", -> + {tokens} = grammar.tokenizeLine('123: 1') + expect(tokens[0]).toEqual value: '123', scopes: ['source.js', 'meta.object-key.js', 'constant.numeric.js'] + expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.assignment.js'] + + it "tokenizes constants when they are object keys", -> + {tokens} = grammar.tokenizeLine('FOO: 1') + expect(tokens[0]).toEqual value: 'FOO', scopes: ['source.js', 'meta.object-key.js', 'constant.other.property.assignment.js'] + expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.assignment.js'] + + it "tokenizes object functions", -> + {tokens} = grammar.tokenizeLine('foo: function nonAnonymous(') + expect(tokens[0]).toEqual value: 'foo', scopes: ['source.js', 'meta.function.json.js', 'meta.object-key.js', 'entity.name.function.js'] + expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'meta.function.json.js', 'keyword.operator.assignment.js'] + expect(tokens[3]).toEqual value: 'function', scopes: ['source.js', 'meta.function.json.js', 'storage.type.function.js'] + expect(tokens[5]).toEqual value: 'nonAnonymous', scopes: ['source.js', 'meta.function.json.js', 'entity.name.function.js'] + expect(tokens[6]).toEqual value: '(', scopes: ['source.js', 'meta.function.json.js', 'punctuation.definition.parameters.begin.js'] + + {tokens} = grammar.tokenizeLine('"foo": function nonAnonymous(') + expect(tokens[0]).toEqual value: '"', scopes: ['source.js', 'meta.function.json.js', 'meta.object-key.js', 'string.quoted.double.js', 'punctuation.definition.string.begin.js'] + expect(tokens[1]).toEqual value: 'foo', scopes: ['source.js', 'meta.function.json.js', 'meta.object-key.js', 'string.quoted.double.js'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.js', 'meta.function.json.js', 'meta.object-key.js', 'string.quoted.double.js', 'punctuation.definition.string.end.js'] + expect(tokens[3]).toEqual value: ':', scopes: ['source.js', 'meta.function.json.js', 'keyword.operator.assignment.js'] + expect(tokens[5]).toEqual value: 'function', scopes: ['source.js', 'meta.function.json.js', 'storage.type.function.js'] + expect(tokens[7]).toEqual value: 'nonAnonymous', scopes: ['source.js', 'meta.function.json.js', 'entity.name.function.js'] + expect(tokens[8]).toEqual value: '(', scopes: ['source.js', 'meta.function.json.js', 'punctuation.definition.parameters.begin.js'] + + keywords = ['yield', 'super', 'this', 'null', 'true', 'false', 'debugger', 'exports', '__filename'] + + for keyword in keywords + it "tokenizes `#{keyword}` when it is an object key", -> + {tokens} = grammar.tokenizeLine("#{keyword}: 1") + expect(tokens[0]).toEqual value: keyword, scopes: ['source.js', 'meta.object-key.js', 'variable.other.property.assignment.js'] + expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'keyword.operator.assignment.js'] + + describe "switch statements", -> + it "tokenizes `switch` keyword", -> + {tokens} = grammar.tokenizeLine('switch(){}') + expect(tokens[0]).toEqual value: 'switch', scopes: ['source.js', 'keyword.control.switch.js'] - describe "default: in a switch statement", -> - it "tokenizes it as a keyword", -> - {tokens} = grammar.tokenizeLine('default: ') - expect(tokens[0]).toEqual value: 'default', scopes: ['source.js', 'keyword.control.js'] + it "tokenizes switch expression", -> + {tokens} = grammar.tokenizeLine('switch(1+1){}') + expect(tokens[1]).toEqual value: '(', scopes: ['source.js', 'punctuation.definition.switch-expression.begin.js'] + expect(tokens[2]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.js'] + expect(tokens[3]).toEqual value: '+', scopes: ['source.js', 'keyword.operator.js'] + expect(tokens[4]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.js'] + expect(tokens[5]).toEqual value: ')', scopes: ['source.js', 'punctuation.definition.switch-expression.end.js'] + + it "tokenizes switch block", -> + lines = grammar.tokenizeLines ''' + switch (foo()) + { + case a: + case 1+1: + 2+2 + break; + default: + } + ''' + expect(lines[1][0]).toEqual value: '{', scopes: ['source.js', 'punctuation.definition.section.switch-block.begin.js'] + expect(lines[2][1]).toEqual value: 'case', scopes: ['source.js', 'keyword.control.case.js'] + expect(lines[2][3]).toEqual value: 'a', scopes: ['source.js'] + expect(lines[2][4]).toEqual value: ':', scopes: ['source.js', 'punctuation.definition.section.case-statements.js'] + expect(lines[3][1]).toEqual value: 'case', scopes: ['source.js', 'keyword.control.case.js'] + expect(lines[3][3]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.js'] + expect(lines[3][4]).toEqual value: '+', scopes: ['source.js', 'keyword.operator.js'] + expect(lines[3][5]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.js'] + expect(lines[3][6]).toEqual value: ':', scopes: ['source.js', 'punctuation.definition.section.case-statements.js'] + expect(lines[4][1]).toEqual value: '2', scopes: ['source.js', 'constant.numeric.js'] + expect(lines[4][2]).toEqual value: '+', scopes: ['source.js', 'keyword.operator.js'] + expect(lines[4][3]).toEqual value: '2', scopes: ['source.js', 'constant.numeric.js'] + expect(lines[5][1]).toEqual value: 'break', scopes: ['source.js', 'keyword.control.js'] + expect(lines[5][2]).toEqual value: ';', scopes: ['source.js', 'punctuation.terminator.statement.js'] + expect(lines[6][1]).toEqual value: 'default', scopes: ['source.js', 'keyword.control.default.js'] + expect(lines[6][2]).toEqual value: ':', scopes: ['source.js', 'punctuation.definition.section.case-statements.js'] + expect(lines[7][0]).toEqual value: '}', scopes: ['source.js', 'punctuation.definition.section.switch-block.end.js'] describe "non-anonymous functions", -> it "tokenizes regular functions", -> @@ -880,22 +948,6 @@ describe "Javascript grammar", -> expect(tokens[8]).toEqual value: 'nonAnonymous', scopes: ['source.js', 'meta.function.js', 'entity.name.function.js'] expect(tokens[9]).toEqual value: '(', scopes: ['source.js', 'meta.function.js', 'punctuation.definition.parameters.begin.js'] - it "tokenizes object functions", -> - {tokens} = grammar.tokenizeLine('foo: function nonAnonymous(') - expect(tokens[0]).toEqual value: 'foo', scopes: ['source.js', 'meta.function.json.js', 'entity.name.function.js'] - expect(tokens[1]).toEqual value: ':', scopes: ['source.js', 'meta.function.json.js', 'keyword.operator.assignment.js'] - expect(tokens[3]).toEqual value: 'function', scopes: ['source.js', 'meta.function.json.js', 'storage.type.function.js'] - expect(tokens[5]).toEqual value: 'nonAnonymous', scopes: ['source.js', 'meta.function.json.js', 'entity.name.function.js'] - expect(tokens[6]).toEqual value: '(', scopes: ['source.js', 'meta.function.json.js', 'punctuation.definition.parameters.begin.js'] - - it "tokenizes quoted object functions", -> - {tokens} = grammar.tokenizeLine('"foo": function nonAnonymous(') - expect(tokens[1]).toEqual value: 'foo', scopes: ['source.js', 'meta.function.json.js', 'string.quoted.double.js', 'entity.name.function.js'] - expect(tokens[3]).toEqual value: ':', scopes: ['source.js', 'meta.function.json.js', 'keyword.operator.assignment.js'] - expect(tokens[5]).toEqual value: 'function', scopes: ['source.js', 'meta.function.json.js', 'storage.type.function.js'] - expect(tokens[7]).toEqual value: 'nonAnonymous', scopes: ['source.js', 'meta.function.json.js', 'entity.name.function.js'] - expect(tokens[8]).toEqual value: '(', scopes: ['source.js', 'meta.function.json.js', 'punctuation.definition.parameters.begin.js'] - it "tokenizes async functions", -> {tokens} = grammar.tokenizeLine('async function f(){}') expect(tokens[0]).toEqual value: 'async', scopes: ['source.js', 'meta.function.js', 'storage.modifier.js'] @@ -988,8 +1040,8 @@ describe "Javascript grammar", -> expect(tokens[7]).toEqual value: '"', scopes: ['source.js', 'meta.function-call.js', 'string.quoted.double.js', 'punctuation.definition.string.end.js'] expect(tokens[8]).toEqual value: ',', scopes: ['source.js', 'meta.function-call.js', 'meta.delimiter.object.comma.js'] expect(tokens[10]).toEqual value: '{', scopes: ['source.js', 'meta.function-call.js', 'meta.brace.curly.js'] - expect(tokens[11]).toEqual value: 'a', scopes: ['source.js', 'meta.function-call.js'] - expect(tokens[12]).toEqual value: ':', scopes: ['source.js', 'meta.function-call.js', 'keyword.operator.js'] + expect(tokens[11]).toEqual value: 'a', scopes: ['source.js', 'meta.function-call.js', 'meta.object-key.js', 'variable.other.property.assignment.js'] + expect(tokens[12]).toEqual value: ':', scopes: ['source.js', 'meta.function-call.js', 'keyword.operator.assignment.js'] expect(tokens[14]).toEqual value: '123', scopes: ['source.js', 'meta.function-call.js', 'constant.numeric.js'] expect(tokens[15]).toEqual value: '}', scopes: ['source.js', 'meta.function-call.js', 'meta.brace.curly.js'] expect(tokens[16]).toEqual value: ')', scopes: ['source.js', 'meta.function-call.js', 'punctuation.definition.arguments.end.js'] From 1cf979ad6c52f55adbaeb38c136b10ec60552c74 Mon Sep 17 00:00:00 2001 From: Maxim Sokolov Date: Thu, 21 Jan 2016 22:27:28 +0300 Subject: [PATCH 2/3] :bug: Add missed word boundary --- grammars/javascript.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/javascript.cson b/grammars/javascript.cson index ac8cb08e..542d9f53 100644 --- a/grammars/javascript.cson +++ b/grammars/javascript.cson @@ -829,7 +829,7 @@ 'name': 'constant.language.boolean.$1.js' } { - 'match': '(? Date: Thu, 21 Jan 2016 22:40:07 +0300 Subject: [PATCH 3/3] :fire: --- grammars/javascript.cson | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/grammars/javascript.cson b/grammars/javascript.cson index 542d9f53..4b1c6f0d 100644 --- a/grammars/javascript.cson +++ b/grammars/javascript.cson @@ -798,16 +798,12 @@ ] } { - 'match': '(?