@@ -514,6 +514,17 @@ describe("Grammar Tests", function() {
514
514
expect(tokens[1][3].scopes).toEqual(["source.python","meta.function-call.python","punctuation.definition.arguments.end.python"]);
515
515
});
516
516
517
+ it("test/builtins/builtins7.py",
518
+ function() {
519
+ tokens = grammar.tokenizeLines("breakpoint()")
520
+ expect(tokens[0][0].value).toBe("breakpoint");
521
+ expect(tokens[0][0].scopes).toEqual(["source.python","meta.function-call.python","support.function.builtin.python"]);
522
+ expect(tokens[0][1].value).toBe("(");
523
+ expect(tokens[0][1].scopes).toEqual(["source.python","meta.function-call.python","punctuation.definition.arguments.begin.python"]);
524
+ expect(tokens[0][2].value).toBe(")");
525
+ expect(tokens[0][2].scopes).toEqual(["source.python","meta.function-call.python","punctuation.definition.arguments.end.python"]);
526
+ });
527
+
517
528
it("test/calls/call1.py",
518
529
function() {
519
530
tokens = grammar.tokenizeLines("some_call(A, b, c[1], *args, FOO=lambda:{'q': 42}, **kwargs)")
@@ -6177,6 +6188,31 @@ describe("Grammar Tests", function() {
6177
6188
expect(tokens[1][0].scopes).toEqual(["source.python"]);
6178
6189
});
6179
6190
6191
+ it("test/expressions/special2.py",
6192
+ function() {
6193
+ tokens = grammar.tokenizeLines("__post_init__\ndef __class_getitem__(): pass\n__mro_entries__")
6194
+ expect(tokens[0][0].value).toBe("__post_init__");
6195
+ expect(tokens[0][0].scopes).toEqual(["source.python","support.variable.magic.python"]);
6196
+ expect(tokens[1][0].value).toBe("def");
6197
+ expect(tokens[1][0].scopes).toEqual(["source.python","meta.function.python","storage.type.function.python"]);
6198
+ expect(tokens[1][1].value).toBe(" ");
6199
+ expect(tokens[1][1].scopes).toEqual(["source.python","meta.function.python"]);
6200
+ expect(tokens[1][2].value).toBe("__class_getitem__");
6201
+ expect(tokens[1][2].scopes).toEqual(["source.python","meta.function.python","support.variable.magic.python"]);
6202
+ expect(tokens[1][3].value).toBe("(");
6203
+ expect(tokens[1][3].scopes).toEqual(["source.python","meta.function.python","meta.function.parameters.python","punctuation.definition.parameters.begin.python"]);
6204
+ expect(tokens[1][4].value).toBe(")");
6205
+ expect(tokens[1][4].scopes).toEqual(["source.python","meta.function.python","meta.function.parameters.python","punctuation.definition.parameters.end.python"]);
6206
+ expect(tokens[1][5].value).toBe(":");
6207
+ expect(tokens[1][5].scopes).toEqual(["source.python","meta.function.python","punctuation.section.function.begin.python"]);
6208
+ expect(tokens[1][6].value).toBe(" ");
6209
+ expect(tokens[1][6].scopes).toEqual(["source.python"]);
6210
+ expect(tokens[1][7].value).toBe("pass");
6211
+ expect(tokens[1][7].scopes).toEqual(["source.python","keyword.control.flow.python"]);
6212
+ expect(tokens[2][0].value).toBe("__mro_entries__");
6213
+ expect(tokens[2][0].scopes).toEqual(["source.python","support.variable.magic.python"]);
6214
+ });
6215
+
6180
6216
it("test/fstrings/comment1.py",
6181
6217
function() {
6182
6218
tokens = grammar.tokenizeLines("f'prefix{10 # comment, making the string technically illegal\ndef foo(): pass")
0 commit comments