Skip to content
This repository was archived by the owner on Dec 15, 2022. It is now read-only.

Commit 4214478

Browse files
committed
fix: add numeric separators
1 parent 16b9f3e commit 4214478

File tree

2 files changed

+165
-21
lines changed

2 files changed

+165
-21
lines changed

grammars/javascript.cson

+46-21
Original file line numberDiff line numberDiff line change
@@ -1245,45 +1245,70 @@
12451245
'numbers':
12461246
'patterns': [
12471247
{
1248-
'match': '\\b(?<!\\$)0(x|X)[0-9a-fA-F]+n?\\b(?!\\$)'
1248+
'match': '\\b(?<!\\$)0(x|X)[0-9a-fA-F]+(?:_[0-9a-fA-F]+)*n?\\b(?!\\$)'
12491249
'name': 'constant.numeric.hex.js'
1250+
'captures':
1251+
'0':
1252+
'patterns': [
1253+
{
1254+
'include': '#numeric_separators'
1255+
}
1256+
]
12501257
}
12511258
{
1252-
'match': '\\b(?<!\\$)0(b|B)[01]+n?\\b(?!\\$)'
1259+
'match': '\\b(?<!\\$)0(b|B)[01]+(?:_[01]+)*n?\\b(?!\\$)'
12531260
'name': 'constant.numeric.binary.js'
1261+
'captures':
1262+
'0':
1263+
'patterns': [
1264+
{
1265+
'include': '#numeric_separators'
1266+
}
1267+
]
12541268
}
12551269
{
1256-
'match': '\\b(?<!\\$)0(o|O)?[0-7]+n?\\b(?!\\$)'
1270+
'match': '\\b(?<!\\$)0(o|O)?[0-7]+(?:_[0-7]+)*n?\\b(?!\\$)'
12571271
'name': 'constant.numeric.octal.js'
1272+
'captures':
1273+
'0':
1274+
'patterns': [
1275+
{
1276+
'include': '#numeric_separators'
1277+
}
1278+
]
12581279
}
12591280
{
12601281
'match': '''(?x)
12611282
(?<!\\$)(?:
1262-
(?:\\b[0-9]+(\\.)[0-9]+[eE][+-]?[0-9]+\\b)| # 1.1E+3
1263-
(?:\\b[0-9]+(\\.)[eE][+-]?[0-9]+\\b)| # 1.E+3
1264-
(?:\\B(\\.)[0-9]+[eE][+-]?[0-9]+\\b)| # .1E+3
1265-
(?:\\b[0-9]+[eE][+-]?[0-9]+\\b)| # 1E+3
1266-
(?:\\b[0-9]+(\\.)[0-9]+\\b)| # 1.1
1267-
(?:\\b[0-9]+(\\.)\\B)| # 1.
1268-
(?:\\B(\\.)[0-9]+\\b)| # .1
1269-
(?:\\b[0-9]+n?\\b(?!\\.)) # 1n
1283+
(?:\\b[0-9]+(?:_[0-9]+)*\\.[0-9]+(?:_[0-9]+)*[eE][+-]?[0-9]+(?:_[0-9]+)*\\b)| # 1.1E+3
1284+
(?:\\b[0-9]+(?:_[0-9]+)*\\.[eE][+-]?[0-9]+(?:_[0-9]+)*\\b)| # 1.E+3
1285+
(?:\\B\\.[0-9]+(?:_[0-9]+)*[eE][+-]?[0-9]+(?:_[0-9]+)*\\b)| # .1E+3
1286+
(?:\\b[0-9]+(?:_[0-9]+)*[eE][+-]?[0-9]+(?:_[0-9]+)*\\b)| # 1E+3
1287+
(?:\\b[0-9]+(?:_[0-9]+)*\\.[0-9]+(?:_[0-9]+)*\\b)| # 1.1
1288+
(?:\\b[0-9]+(?:_[0-9]+)*\\.\\B)| # 1.
1289+
(?:\\B\\.[0-9]+(?:_[0-9]+)*\\b)| # .1
1290+
(?:\\b[0-9]+(?:_[0-9]+)*n?\\b(?!\\.)) # 1n
12701291
)(?!\\$)
12711292
'''
1293+
'name': 'constant.numeric.decimal.js'
12721294
'captures':
12731295
'0':
1274-
'name': 'constant.numeric.decimal.js'
1296+
'patterns': [
1297+
{
1298+
'include': '#numeric_separators'
1299+
}
1300+
]
1301+
}
1302+
]
1303+
'numeric_separators':
1304+
'patterns': [
1305+
{
1306+
'match': '(_)|(\\.)'
1307+
'captures':
12751308
'1':
1276-
'name': 'meta.delimiter.decimal.period.js'
1309+
'name': 'meta.delimiter.numeric.separator.js'
12771310
'2':
12781311
'name': 'meta.delimiter.decimal.period.js'
1279-
'3':
1280-
'name': 'meta.delimiter.decimal.period.js'
1281-
'4':
1282-
'name': 'meta.delimiter.decimal.period.js'
1283-
'5':
1284-
'name': 'meta.delimiter.decimal.period.js'
1285-
'6':
1286-
'name': 'meta.delimiter.decimal.period.js'
12871312
}
12881313
]
12891314
'operators':

spec/javascript-spec.coffee

+119
Original file line numberDiff line numberDiff line change
@@ -323,6 +323,11 @@ describe "JavaScript grammar", ->
323323
{tokens} = grammar.tokenizeLine('0X1D306n')
324324
expect(tokens[0]).toEqual value: '0X1D306n', scopes: ['source.js', 'constant.numeric.hex.js']
325325

326+
{tokens} = grammar.tokenizeLine('0X1D30_69A3')
327+
expect(tokens[0]).toEqual value: '0X1D30', scopes: ['source.js', 'constant.numeric.hex.js']
328+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.hex.js', 'meta.delimiter.numeric.separator.js']
329+
expect(tokens[2]).toEqual value: '69A3', scopes: ['source.js', 'constant.numeric.hex.js']
330+
326331
it "tokenizes binary literals", ->
327332
{tokens} = grammar.tokenizeLine('0b011101110111010001100110')
328333
expect(tokens[0]).toEqual value: '0b011101110111010001100110', scopes: ['source.js', 'constant.numeric.binary.js']
@@ -336,6 +341,19 @@ describe "JavaScript grammar", ->
336341
{tokens} = grammar.tokenizeLine('0B011101110111010001100110n')
337342
expect(tokens[0]).toEqual value: '0B011101110111010001100110n', scopes: ['source.js', 'constant.numeric.binary.js']
338343

344+
{tokens} = grammar.tokenizeLine('0B0111_0111_0111_0100_0110_0110')
345+
expect(tokens[0]).toEqual value: '0B0111', scopes: ['source.js', 'constant.numeric.binary.js']
346+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.binary.js', 'meta.delimiter.numeric.separator.js']
347+
expect(tokens[2]).toEqual value: '0111', scopes: ['source.js', 'constant.numeric.binary.js']
348+
expect(tokens[3]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.binary.js', 'meta.delimiter.numeric.separator.js']
349+
expect(tokens[4]).toEqual value: '0111', scopes: ['source.js', 'constant.numeric.binary.js']
350+
expect(tokens[5]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.binary.js', 'meta.delimiter.numeric.separator.js']
351+
expect(tokens[6]).toEqual value: '0100', scopes: ['source.js', 'constant.numeric.binary.js']
352+
expect(tokens[7]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.binary.js', 'meta.delimiter.numeric.separator.js']
353+
expect(tokens[8]).toEqual value: '0110', scopes: ['source.js', 'constant.numeric.binary.js']
354+
expect(tokens[9]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.binary.js', 'meta.delimiter.numeric.separator.js']
355+
expect(tokens[10]).toEqual value: '0110', scopes: ['source.js', 'constant.numeric.binary.js']
356+
339357
it "tokenizes octal literals", ->
340358
{tokens} = grammar.tokenizeLine('0o1411')
341359
expect(tokens[0]).toEqual value: '0o1411', scopes: ['source.js', 'constant.numeric.octal.js']
@@ -352,13 +370,32 @@ describe "JavaScript grammar", ->
352370
{tokens} = grammar.tokenizeLine('0010')
353371
expect(tokens[0]).toEqual value: '0010', scopes: ['source.js', 'constant.numeric.octal.js']
354372

373+
{tokens} = grammar.tokenizeLine('0010_7201_5112')
374+
expect(tokens[0]).toEqual value: '0010', scopes: ['source.js', 'constant.numeric.octal.js']
375+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.octal.js', 'meta.delimiter.numeric.separator.js']
376+
expect(tokens[2]).toEqual value: '7201', scopes: ['source.js', 'constant.numeric.octal.js']
377+
expect(tokens[3]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.octal.js', 'meta.delimiter.numeric.separator.js']
378+
expect(tokens[4]).toEqual value: '5112', scopes: ['source.js', 'constant.numeric.octal.js']
379+
380+
{tokens} = grammar.tokenizeLine('0O1411_1236')
381+
expect(tokens[0]).toEqual value: '0O1411', scopes: ['source.js', 'constant.numeric.octal.js']
382+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.octal.js', 'meta.delimiter.numeric.separator.js']
383+
expect(tokens[2]).toEqual value: '1236', scopes: ['source.js', 'constant.numeric.octal.js']
384+
355385
it "tokenizes decimals", ->
356386
{tokens} = grammar.tokenizeLine('1234')
357387
expect(tokens[0]).toEqual value: '1234', scopes: ['source.js', 'constant.numeric.decimal.js']
358388

359389
{tokens} = grammar.tokenizeLine('123456789n')
360390
expect(tokens[0]).toEqual value: '123456789n', scopes: ['source.js', 'constant.numeric.decimal.js']
361391

392+
{tokens} = grammar.tokenizeLine('123_456_789n')
393+
expect(tokens[0]).toEqual value: '123', scopes: ['source.js', 'constant.numeric.decimal.js']
394+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
395+
expect(tokens[2]).toEqual value: '456', scopes: ['source.js', 'constant.numeric.decimal.js']
396+
expect(tokens[3]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
397+
expect(tokens[4]).toEqual value: '789n', scopes: ['source.js', 'constant.numeric.decimal.js']
398+
362399
{tokens} = grammar.tokenizeLine('5e-10')
363400
expect(tokens[0]).toEqual value: '5e-10', scopes: ['source.js', 'constant.numeric.decimal.js']
364401

@@ -369,31 +406,113 @@ describe "JavaScript grammar", ->
369406
expect(tokens[0]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
370407
expect(tokens[1]).toEqual value: '.', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.decimal.period.js']
371408

409+
{tokens} = grammar.tokenizeLine('9_9.')
410+
expect(tokens[0]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
411+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
412+
expect(tokens[2]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
413+
expect(tokens[3]).toEqual value: '.', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.decimal.period.js']
414+
372415
{tokens} = grammar.tokenizeLine('.9')
373416
expect(tokens[0]).toEqual value: '.', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.decimal.period.js']
374417
expect(tokens[1]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
375418

419+
{tokens} = grammar.tokenizeLine('.9_9')
420+
expect(tokens[0]).toEqual value: '.', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.decimal.period.js']
421+
expect(tokens[1]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
422+
expect(tokens[2]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
423+
expect(tokens[3]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
424+
376425
{tokens} = grammar.tokenizeLine('9.9')
377426
expect(tokens[0]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
378427
expect(tokens[1]).toEqual value: '.', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.decimal.period.js']
379428
expect(tokens[2]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
380429

430+
{tokens} = grammar.tokenizeLine('9_9.9_9')
431+
expect(tokens[0]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
432+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
433+
expect(tokens[2]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
434+
expect(tokens[3]).toEqual value: '.', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.decimal.period.js']
435+
expect(tokens[4]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
436+
expect(tokens[5]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
437+
expect(tokens[6]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
438+
381439
{tokens} = grammar.tokenizeLine('.1e-23')
382440
expect(tokens[0]).toEqual value: '.', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.decimal.period.js']
383441
expect(tokens[1]).toEqual value: '1e-23', scopes: ['source.js', 'constant.numeric.decimal.js']
384442

443+
{tokens} = grammar.tokenizeLine('.1_1E+1_1')
444+
expect(tokens[0]).toEqual value: '.', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.decimal.period.js']
445+
expect(tokens[1]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.decimal.js']
446+
expect(tokens[2]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
447+
expect(tokens[3]).toEqual value: '1E+1', scopes: ['source.js', 'constant.numeric.decimal.js']
448+
expect(tokens[4]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
449+
expect(tokens[5]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.decimal.js']
450+
385451
{tokens} = grammar.tokenizeLine('1.E3')
386452
expect(tokens[0]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.decimal.js']
387453
expect(tokens[1]).toEqual value: '.', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.decimal.period.js']
388454
expect(tokens[2]).toEqual value: 'E3', scopes: ['source.js', 'constant.numeric.decimal.js']
389455

456+
{tokens} = grammar.tokenizeLine('1_1.E-1_1')
457+
expect(tokens[0]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.decimal.js']
458+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
459+
expect(tokens[2]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.decimal.js']
460+
expect(tokens[3]).toEqual value: '.', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.decimal.period.js']
461+
expect(tokens[4]).toEqual value: 'E-1', scopes: ['source.js', 'constant.numeric.decimal.js']
462+
expect(tokens[5]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
463+
expect(tokens[6]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.decimal.js']
464+
465+
{tokens} = grammar.tokenizeLine('1_1.1_1E1_1')
466+
expect(tokens[0]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.decimal.js']
467+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
468+
expect(tokens[2]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.decimal.js']
469+
expect(tokens[3]).toEqual value: '.', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.decimal.period.js']
470+
expect(tokens[4]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.decimal.js']
471+
expect(tokens[5]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
472+
expect(tokens[6]).toEqual value: '1E1', scopes: ['source.js', 'constant.numeric.decimal.js']
473+
expect(tokens[7]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
474+
expect(tokens[8]).toEqual value: '1', scopes: ['source.js', 'constant.numeric.decimal.js']
475+
476+
{tokens} = grammar.tokenizeLine('9_9')
477+
expect(tokens[0]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
478+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
479+
expect(tokens[2]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
480+
481+
{tokens} = grammar.tokenizeLine('9_9_9')
482+
expect(tokens[0]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
483+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
484+
expect(tokens[2]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
485+
expect(tokens[3]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
486+
expect(tokens[4]).toEqual value: '9', scopes: ['source.js', 'constant.numeric.decimal.js']
487+
488+
{tokens} = grammar.tokenizeLine('999_999_999')
489+
expect(tokens[0]).toEqual value: '999', scopes: ['source.js', 'constant.numeric.decimal.js']
490+
expect(tokens[1]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
491+
expect(tokens[2]).toEqual value: '999', scopes: ['source.js', 'constant.numeric.decimal.js']
492+
expect(tokens[3]).toEqual value: '_', scopes: ['source.js', 'constant.numeric.decimal.js', 'meta.delimiter.numeric.separator.js']
493+
expect(tokens[4]).toEqual value: '999', scopes: ['source.js', 'constant.numeric.decimal.js']
494+
390495
it "does not tokenize numbers that are part of a variable", ->
391496
{tokens} = grammar.tokenizeLine('hi$1')
392497
expect(tokens[0]).toEqual value: 'hi$1', scopes: ['source.js']
393498

394499
{tokens} = grammar.tokenizeLine('hi_1')
395500
expect(tokens[0]).toEqual value: 'hi_1', scopes: ['source.js']
396501

502+
{tokens} = grammar.tokenizeLine('_1')
503+
expect(tokens[0]).toEqual value: '_1', scopes: ['source.js', 'constant.other.js']
504+
505+
{tokens} = grammar.tokenizeLine('1_')
506+
expect(tokens[0]).toEqual value: '1_', scopes: ['source.js', 'invalid.illegal.identifier.js']
507+
508+
{tokens} = grammar.tokenizeLine('1_._1')
509+
expect(tokens[0]).toEqual value: '1_', scopes: ['source.js', 'invalid.illegal.identifier.js']
510+
expect(tokens[1]).toEqual value: '.', scopes: ['source.js', 'meta.delimiter.property.period.js']
511+
expect(tokens[2]).toEqual value: '_1', scopes: ['source.js', 'variable.other.property.js']
512+
513+
{tokens} = grammar.tokenizeLine('1__1')
514+
expect(tokens[0]).toEqual value: '1__1', scopes: ['source.js', 'invalid.illegal.identifier.js']
515+
397516
describe "operators", ->
398517
it "tokenizes them", ->
399518
operators = ["delete", "in", "of", "instanceof", "new", "typeof", "void"]

0 commit comments

Comments
 (0)