@@ -124,6 +124,16 @@ export function generateScopes(text: string, parsedFileName: path.ParsedPath) {
124
124
) ) ;
125
125
}
126
126
127
+ function validateTokenScopeExtension ( grammar : Grammar , token : vt . IToken ) {
128
+ return ! token . scopes . some ( scope => ! isValidScopeExtension ( grammar , scope ) ) ;
129
+ }
130
+
131
+ function isValidScopeExtension ( grammar : Grammar , scope : string ) {
132
+ return scope . endsWith ( grammar . kind === GrammarKind . ts ? ".ts" : ".tsx" ) ||
133
+ scope . endsWith ( ".jsdoc" ) ||
134
+ scope . endsWith ( ".regexp" ) ;
135
+ }
136
+
127
137
function generateScopesWorker ( mainGrammar : Grammar , otherGrammar : Grammar | undefined , oriLines : string [ ] ) : string {
128
138
let cleanLines : string [ ] = [ ] ;
129
139
let baselineLines : string [ ] = [ ] ;
@@ -140,15 +150,16 @@ function generateScopesWorker(mainGrammar: Grammar, otherGrammar: Grammar | unde
140
150
otherBaselines . push ( ">" + line ) ;
141
151
142
152
for ( let token of mainLineTokens ) {
143
- writeTokenLine ( token , "" , "" , baselineLines ) ;
153
+ writeTokenLine ( mainGrammar , token , baselineLines ) ;
144
154
}
145
155
146
156
if ( otherGrammar ) {
147
157
const otherLineTokens = tokenizeLine ( otherGrammar , line ) ;
148
- if ( hasDiff ( mainLineTokens , otherLineTokens , hasDiffLineToken ) ) {
158
+ if ( otherLineTokens . some ( token => ! validateTokenScopeExtension ( otherGrammar , token ) ) ||
159
+ hasDiff ( mainLineTokens , otherLineTokens , hasDiffLineToken ) ) {
149
160
foundDiff = true ;
150
161
for ( let token of otherLineTokens ) {
151
- writeTokenLine ( token , "" , "" , otherBaselines ) ;
162
+ writeTokenLine ( otherGrammar , token , otherBaselines ) ;
152
163
}
153
164
}
154
165
}
@@ -158,7 +169,7 @@ function generateScopesWorker(mainGrammar: Grammar, otherGrammar: Grammar | unde
158
169
return getInputFile ( cleanLines ) + getBaseline ( mainGrammar , baselineLines ) + otherDiffBaseline ;
159
170
}
160
171
161
- function writeTokenLine ( token : vt . IToken , preTextForToken : string , postTextForToken : string , outputLines : string [ ] ) {
172
+ function writeTokenLine ( grammar : Grammar , token : vt . IToken , outputLines : string [ ] ) {
162
173
let startingSpaces = " " ;
163
174
for ( let j = 0 ; j < token . startIndex ; j ++ ) {
164
175
startingSpaces += " " ;
@@ -169,5 +180,5 @@ function writeTokenLine(token: vt.IToken, preTextForToken: string, postTextForTo
169
180
locatingString += "^" ;
170
181
}
171
182
outputLines . push ( startingSpaces + locatingString ) ;
172
- outputLines . push ( startingSpaces + preTextForToken + token . scopes . join ( ' ' ) + postTextForToken ) ;
183
+ outputLines . push ( ` ${ startingSpaces } ${ token . scopes . join ( ' ' ) } ${ validateTokenScopeExtension ( grammar , token ) ? "" : " INCORRECT_SCOPE_EXTENSION" } ` ) ;
173
184
}
0 commit comments