@@ -5,7 +5,7 @@ describe("parser", () => {
5
5
6
6
it ( "schema" , ( ) => {
7
7
8
- const tokens = Parser . tokenize ( "a[a*][/a*]" )
8
+ const tokens = Parser . tagTokens ( "a[a*][/a*]" )
9
9
assert . strictEqual ( tokens instanceof Array , true )
10
10
assert . strictEqual ( tokens . length , 2 )
11
11
assert . strictEqual ( Object . keys ( tokens [ 0 ] ) . length , 3 )
@@ -42,59 +42,59 @@ describe("parser", () => {
42
42
43
43
it ( "text" , ( ) => {
44
44
45
- assert . strictEqual ( Parser . tokenize ( "This is some text" ) [ 0 ] . buffer , "This is some text" )
45
+ assert . strictEqual ( Parser . tagTokens ( "This is some text" ) [ 0 ] . buffer , "This is some text" )
46
46
47
47
} )
48
48
49
49
50
50
it ( "invalidTags" , ( ) => {
51
51
52
- assert . strictEqual ( Parser . tokenize ( "[u]" ) [ 0 ] . name , Token . NAME . TEXT )
53
- assert . strictEqual ( Parser . tokenize ( "[u][u]" ) [ 0 ] . name , Token . NAME . TEXT )
54
- assert . strictEqual ( Parser . tokenize ( "[/u]" ) [ 0 ] . name , Token . NAME . TEXT )
55
- assert . strictEqual ( Parser . tokenize ( "[/u][/u]" ) [ 0 ] . name , Token . NAME . TEXT )
56
- assert . strictEqual ( Parser . tokenize ( "[test={][/test]" ) [ 0 ] . name , Token . NAME . TEXT )
57
- assert . strictEqual ( Parser . tokenize ( "[@][/@]" ) [ 0 ] . name , Token . NAME . TEXT )
58
- assert . strictEqual ( Parser . tokenize ( "[test test=1 testt=\"2\"][/test]" ) [ 0 ] . name , Token . NAME . TEXT )
52
+ assert . strictEqual ( Parser . tagTokens ( "[u]" ) [ 0 ] . name , Token . NAME . TEXT )
53
+ assert . strictEqual ( Parser . tagTokens ( "[u][u]" ) [ 0 ] . name , Token . NAME . TEXT )
54
+ assert . strictEqual ( Parser . tagTokens ( "[/u]" ) [ 0 ] . name , Token . NAME . TEXT )
55
+ assert . strictEqual ( Parser . tagTokens ( "[/u][/u]" ) [ 0 ] . name , Token . NAME . TEXT )
56
+ assert . strictEqual ( Parser . tagTokens ( "[test={][/test]" ) [ 0 ] . name , Token . NAME . TEXT )
57
+ assert . strictEqual ( Parser . tagTokens ( "[@][/@]" ) [ 0 ] . name , Token . NAME . TEXT )
58
+ assert . strictEqual ( Parser . tagTokens ( "[test test=1 testt=\"2\"][/test]" ) [ 0 ] . name , Token . NAME . TEXT )
59
59
60
60
} )
61
61
62
62
it ( "keys" , ( ) => {
63
63
64
- assert . strictEqual ( Parser . tokenize ( "[color=#:/.rEd][/color]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "#:/.rEd" )
65
- assert . strictEqual ( Parser . tokenize ( "[list=1][/list]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "1" )
66
- assert . strictEqual ( Parser . tokenize ( "[url=http://localhost][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
67
- assert . strictEqual ( Parser . tokenize ( "[url=\"http://localhost\"][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
68
- assert . strictEqual ( Parser . tokenize ( "[url=\"http://localhost\"][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
69
- assert . strictEqual ( Parser . tokenize ( "[test test=1][/test]" ) [ 0 ] . openingTag . keys [ 1 ] . value , "1" )
70
- assert . strictEqual ( Parser . tokenize ( "[test test=1 testt=\"2\"]x[test]cvv[/test]xc[/test]" ) [ 0 ] . openingTag . keys [ 2 ] . name , "testt" )
71
- assert . strictEqual ( Parser . tokenize ( "[test test=1 testt=\"2\"]test[test][test][/test][/test]sfd[/test]" ) [ 0 ] . openingTag . keys [ 2 ] . value , "2" )
64
+ assert . strictEqual ( Parser . tagTokens ( "[color=#:/.rEd][/color]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "#:/.rEd" )
65
+ assert . strictEqual ( Parser . tagTokens ( "[list=1][/list]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "1" )
66
+ assert . strictEqual ( Parser . tagTokens ( "[url=http://localhost][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
67
+ assert . strictEqual ( Parser . tagTokens ( "[url=\"http://localhost\"][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
68
+ assert . strictEqual ( Parser . tagTokens ( "[url=\"http://localhost\"][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
69
+ assert . strictEqual ( Parser . tagTokens ( "[test test=1][/test]" ) [ 0 ] . openingTag . keys [ 1 ] . value , "1" )
70
+ assert . strictEqual ( Parser . tagTokens ( "[test test=1 testt=\"2\"]x[test]cvv[/test]xc[/test]" ) [ 0 ] . openingTag . keys [ 2 ] . name , "testt" )
71
+ assert . strictEqual ( Parser . tagTokens ( "[test test=1 testt=\"2\"]test[test][test][/test][/test]sfd[/test]" ) [ 0 ] . openingTag . keys [ 2 ] . value , "2" )
72
72
73
73
} )
74
74
75
75
it ( "buffer" , ( ) => {
76
76
77
- assert . strictEqual ( Parser . tokenize ( "[b][i]Test[/b][/i]" ) [ 0 ] . openingTag . buffer , "[b]" )
78
- assert . strictEqual ( Parser . tokenize ( "[b][i test=1]Hello World[/i][/b]" ) [ 1 ] . openingTag . buffer , "[i test=1]" )
79
- assert . strictEqual ( Parser . tokenize ( "[b][i=2]Test[/b][/i]" ) [ 1 ] . openingTag . buffer , "[i=2]" )
80
- assert . strictEqual ( Parser . tokenize ( "[b=dsadsa test=1][i]Test[/b][/i]" ) [ 1 ] . openingTag . buffer , "[i]" )
81
- assert . strictEqual ( Parser . tokenize ( "[b][i]Test[/b][/i]" ) [ 0 ] . closingTag . buffer , "[/b]" )
82
- assert . strictEqual ( Parser . tokenize ( "[b][i]Test[/b][/i]" ) [ 1 ] . closingTag . buffer , "[/i]" )
77
+ assert . strictEqual ( Parser . tagTokens ( "[b][i]Test[/b][/i]" ) [ 0 ] . openingTag . buffer , "[b]" )
78
+ assert . strictEqual ( Parser . tagTokens ( "[b][i test=1]Hello World[/i][/b]" ) [ 1 ] . openingTag . buffer , "[i test=1]" )
79
+ assert . strictEqual ( Parser . tagTokens ( "[b][i=2]Test[/b][/i]" ) [ 1 ] . openingTag . buffer , "[i=2]" )
80
+ assert . strictEqual ( Parser . tagTokens ( "[b=dsadsa test=1][i]Test[/b][/i]" ) [ 1 ] . openingTag . buffer , "[i]" )
81
+ assert . strictEqual ( Parser . tagTokens ( "[b][i]Test[/b][/i]" ) [ 0 ] . closingTag . buffer , "[/b]" )
82
+ assert . strictEqual ( Parser . tagTokens ( "[b][i]Test[/b][/i]" ) [ 1 ] . closingTag . buffer , "[/i]" )
83
83
84
84
} )
85
85
86
86
it ( "bufferIndex" , ( ) => {
87
87
88
- assert . strictEqual ( Parser . tokenize ( "test[b][/b]" ) [ 1 ] . openingTag . bufferIndex , 4 )
89
- assert . strictEqual ( Parser . tokenize ( "test[b][/b][test][/test]xoxo" ) [ 2 ] . openingTag . bufferIndex , 11 )
90
- assert . strictEqual ( Parser . tokenize ( "[b][i]Test[/b][/i]" ) [ 0 ] . closingTag . bufferIndex , 10 )
91
- assert . strictEqual ( Parser . tokenize ( "[b][i][e]Hello World[/e][/i][/b]" ) [ 2 ] . closingTag . bufferIndex , 20 )
88
+ assert . strictEqual ( Parser . tagTokens ( "test[b][/b]" ) [ 1 ] . openingTag . bufferIndex , 4 )
89
+ assert . strictEqual ( Parser . tagTokens ( "test[b][/b][test][/test]xoxo" ) [ 2 ] . openingTag . bufferIndex , 11 )
90
+ assert . strictEqual ( Parser . tagTokens ( "[b][i]Test[/b][/i]" ) [ 0 ] . closingTag . bufferIndex , 10 )
91
+ assert . strictEqual ( Parser . tagTokens ( "[b][i][e]Hello World[/e][/i][/b]" ) [ 2 ] . closingTag . bufferIndex , 20 )
92
92
93
93
} )
94
94
95
95
it ( "code" , ( ) => {
96
96
97
- const tokens = Parser . tokenize ( "[code][b][/b][i][/i]Test[code]test[/code][/code][b][/b]" )
97
+ const tokens = Parser . tagTokens ( "[code][b][/b][i][/i]Test[code]test[/code][/code][b][/b]" )
98
98
assert . strictEqual ( tokens . length , 3 )
99
99
assert . strictEqual ( tokens [ 0 ] . name , "bbcode" )
100
100
assert . strictEqual ( tokens [ 1 ] . name , Token . NAME . TEXT )
@@ -104,7 +104,7 @@ describe("parser", () => {
104
104
105
105
it ( "list" , ( ) => {
106
106
107
- const tokens = Parser . tokenize ( "[list][*]test[*][/list]" )
107
+ const tokens = Parser . tagTokens ( "[list][*]test[*][/list]" )
108
108
109
109
assert . strictEqual ( tokens . length , 4 )
110
110
assert . strictEqual ( tokens [ 0 ] . name , "bbcode" )
0 commit comments