Skip to content

Commit 61fe15b

Browse files
committed
Minor refator the operation type to the unify token kind
1 parent 5e79b05 commit 61fe15b

10 files changed

+143
-154
lines changed

parser/ast.go

+3-3
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@ func (p *BinaryOperation) End() Pos {
159159
func (p *BinaryOperation) String() string {
160160
var builder strings.Builder
161161
builder.WriteString(p.LeftExpr.String())
162-
if p.Operation != opTypeCast {
162+
if p.Operation != TokenKindDash {
163163
builder.WriteByte(' ')
164164
}
165165
if p.HasNot {
@@ -168,7 +168,7 @@ func (p *BinaryOperation) String() string {
168168
builder.WriteString("GLOBAL ")
169169
}
170170
builder.WriteString(string(p.Operation))
171-
if p.Operation != opTypeCast {
171+
if p.Operation != TokenKindDash {
172172
builder.WriteByte(' ')
173173
}
174174
builder.WriteString(p.RightExpr.String())
@@ -1914,7 +1914,7 @@ func (s *SettingPair) String() string {
19141914
var builder strings.Builder
19151915
builder.WriteString(s.Name.String())
19161916
if s.Value != nil {
1917-
if s.Operation == opTypeEQ {
1917+
if s.Operation == TokenKindSingleEQ {
19181918
builder.WriteString(string(s.Operation))
19191919
} else {
19201920
builder.WriteByte(' ')

parser/lexer.go

+39-22
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,30 @@ import (
99
)
1010

1111
const (
12-
TokenEOF TokenKind = "<eof>"
13-
TokenIdent TokenKind = "<ident>"
14-
TokenKeyword TokenKind = "<keyword>"
15-
TokenInt TokenKind = "<int>"
16-
TokenFloat TokenKind = "<float>"
17-
TokenString TokenKind = "<string>"
18-
TokenDot = "."
12+
TokenKindEOF TokenKind = "<eof>"
13+
TokenKindIdent TokenKind = "<ident>"
14+
TokenKindKeyword TokenKind = "<keyword>"
15+
TokenKindInt TokenKind = "<int>"
16+
TokenKindFloat TokenKind = "<float>"
17+
TokenKindString TokenKind = "<string>"
18+
TokenKindDot = "."
19+
TokenKindSingleEQ TokenKind = "="
20+
TokenKindDoubleEQ TokenKind = "=="
21+
TokenKindNE TokenKind = "!="
22+
TokenKindLT TokenKind = "<"
23+
TokenKindLE TokenKind = "<="
24+
TokenKindGT TokenKind = ">"
25+
TokenKindGE TokenKind = ">="
26+
TokenKindQuery = "?"
27+
28+
TokenKindPlus TokenKind = "+"
29+
TokenKindMinus TokenKind = "-"
30+
TokenKindMul TokenKind = "*"
31+
TokenKindDiv TokenKind = "/"
32+
TokenKindMod TokenKind = "%"
33+
34+
TokenKindArrow TokenKind = "->"
35+
TokenKindDash TokenKind = "::"
1936
)
2037

2138
const (
@@ -33,12 +50,12 @@ type Token struct {
3350

3451
Kind TokenKind
3552
String string
36-
Base int // 10 or 16 on TokenInt
53+
Base int // 10 or 16 on TokenKindInt
3754
QuoteType int
3855
}
3956

4057
func (t *Token) ToString() string {
41-
if t.Kind == TokenKeyword {
58+
if t.Kind == TokenKindKeyword {
4259
return strings.ToUpper(t.String)
4360
}
4461
return t.String
@@ -87,7 +104,7 @@ func (l *Lexer) consumeNumber() error {
87104
}
88105

89106
hasExp := false
90-
tokenKind := TokenInt
107+
tokenKind := TokenKindInt
91108
hasNumberPart := false
92109
for l.peekOk(i) {
93110
hasNumberPart = true
@@ -100,7 +117,7 @@ func (l *Lexer) consumeNumber() error {
100117
i++
101118
continue
102119
case c == '.': // float
103-
tokenKind = TokenFloat
120+
tokenKind = TokenKindFloat
104121
i++
105122
continue
106123
case base != 16 && (c == 'e' || c == 'E' || c == 'p' || c == 'P'):
@@ -165,9 +182,9 @@ func (l *Lexer) consumeIdent(_ Pos) error {
165182
}
166183
slice := l.slice(0, i)
167184
if quoteType == Unquoted && l.isKeyword(strings.ToUpper(slice)) {
168-
token.Kind = TokenKeyword
185+
token.Kind = TokenKindKeyword
169186
} else {
170-
token.Kind = TokenIdent
187+
token.Kind = TokenKindIdent
171188
}
172189
token.Pos = Pos(l.current)
173190
token.End = Pos(l.current + i)
@@ -214,7 +231,7 @@ func (l *Lexer) consumeString() error {
214231
return errors.New("invalid string")
215232
}
216233
l.lastToken = &Token{
217-
Kind: TokenString,
234+
Kind: TokenKindString,
218235
String: l.slice(1, i),
219236
Pos: Pos(l.current + 1),
220237
End: Pos(l.current + i),
@@ -265,11 +282,11 @@ func (l *Lexer) peekToken() (*Token, error) {
265282
}
266283

267284
func (l *Lexer) hasPrecedenceToken(last *Token) bool {
268-
return last != nil && (last.Kind == TokenIdent ||
269-
last.Kind == TokenKeyword ||
270-
last.Kind == TokenInt ||
271-
last.Kind == TokenFloat ||
272-
last.Kind == TokenString)
285+
return last != nil && (last.Kind == TokenKindIdent ||
286+
last.Kind == TokenKindKeyword ||
287+
last.Kind == TokenKindInt ||
288+
last.Kind == TokenKindFloat ||
289+
last.Kind == TokenKindString)
273290
}
274291

275292
func (l *Lexer) consumeToken() error {
@@ -304,7 +321,7 @@ func (l *Lexer) consumeToken() error {
304321
} else if l.peekOk(1) && l.peekN(1) == '>' {
305322
l.lastToken = &Token{
306323
String: l.slice(0, 2),
307-
Kind: opTypeArrow,
324+
Kind: TokenKindArrow,
308325
Pos: Pos(l.current),
309326
End: Pos(l.current + 2),
310327
}
@@ -321,7 +338,7 @@ func (l *Lexer) consumeToken() error {
321338
if l.peekOk(1) && l.peekN(1) == ':' {
322339
l.lastToken = &Token{
323340
String: l.slice(0, 2),
324-
Kind: opTypeCast,
341+
Kind: TokenKindDash,
325342
Pos: Pos(l.current),
326343
End: Pos(l.current + 2),
327344
}
@@ -331,7 +348,7 @@ func (l *Lexer) consumeToken() error {
331348
case '.':
332349
l.lastToken = &Token{
333350
String: l.slice(0, 1),
334-
Kind: TokenDot,
351+
Kind: TokenKindDot,
335352
Pos: Pos(l.current),
336353
End: Pos(l.current + 1),
337354
}

parser/lexer_test.go

+6-6
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ func TestConsumeString(t *testing.T) {
4040
lexer := NewLexer(s)
4141
err := lexer.consumeToken()
4242
require.NoError(t, err)
43-
require.Equal(t, TokenString, lexer.lastToken.Kind)
43+
require.Equal(t, TokenKindString, lexer.lastToken.Kind)
4444
require.Equal(t, strings.Trim(s, "'"), lexer.lastToken.String)
4545
require.True(t, lexer.isEOF())
4646
}
@@ -61,7 +61,7 @@ func TestConsumeNumber(t *testing.T) {
6161
lexer := NewLexer(i)
6262
err := lexer.consumeToken()
6363
require.NoError(t, err)
64-
require.Equal(t, TokenInt, lexer.lastToken.Kind)
64+
require.Equal(t, TokenKindInt, lexer.lastToken.Kind)
6565
require.Equal(t, 10, lexer.lastToken.Base)
6666
require.Equal(t, i, lexer.lastToken.String)
6767
require.True(t, lexer.isEOF())
@@ -77,7 +77,7 @@ func TestConsumeNumber(t *testing.T) {
7777
lexer := NewLexer(n)
7878
err := lexer.consumeToken()
7979
require.NoError(t, err)
80-
require.Equal(t, TokenInt, lexer.lastToken.Kind)
80+
require.Equal(t, TokenKindInt, lexer.lastToken.Kind)
8181
require.Equal(t, 16, lexer.lastToken.Base)
8282
require.Equal(t, n, lexer.lastToken.String)
8383
require.True(t, lexer.isEOF())
@@ -117,7 +117,7 @@ func TestConsumeNumber(t *testing.T) {
117117
lexer := NewLexer(f)
118118
err := lexer.consumeToken()
119119
require.NoError(t, err)
120-
require.Equal(t, TokenFloat, lexer.lastToken.Kind)
120+
require.Equal(t, TokenKindFloat, lexer.lastToken.Kind)
121121
require.Equal(t, f, lexer.lastToken.String)
122122
require.True(t, lexer.isEOF())
123123
}
@@ -167,7 +167,7 @@ func TestConsumeNumber(t *testing.T) {
167167
lexer := NewLexer(i)
168168
err := lexer.consumeToken()
169169
require.NoError(t, err)
170-
require.Equal(t, TokenIdent, lexer.lastToken.Kind)
170+
require.Equal(t, TokenKindIdent, lexer.lastToken.Kind)
171171
require.Equal(t, strings.Trim(i, "`"), lexer.lastToken.String)
172172
require.True(t, lexer.isEOF())
173173
}
@@ -178,7 +178,7 @@ func TestConsumeNumber(t *testing.T) {
178178
lexer := NewLexer(k)
179179
err := lexer.consumeToken()
180180
require.NoError(t, err)
181-
require.Equal(t, TokenKeyword, lexer.lastToken.Kind)
181+
require.Equal(t, TokenKindKeyword, lexer.lastToken.Kind)
182182
require.Equal(t, k, lexer.lastToken.String)
183183
require.True(t, lexer.isEOF())
184184
}

parser/parse_system.go

+11-11
Original file line numberDiff line numberDiff line change
@@ -343,7 +343,7 @@ func (p *Parser) parseCheckStmt(pos Pos) (*CheckStmt, error) {
343343

344344
func (p *Parser) parseRoleName(_ Pos) (*RoleName, error) {
345345
switch {
346-
case p.matchTokenKind(TokenIdent):
346+
case p.matchTokenKind(TokenKindIdent):
347347
name, err := p.parseIdent()
348348
if err != nil {
349349
return nil, err
@@ -364,7 +364,7 @@ func (p *Parser) parseRoleName(_ Pos) (*RoleName, error) {
364364
Scope: scope,
365365
OnCluster: onCluster,
366366
}, nil
367-
case p.matchTokenKind(TokenString):
367+
case p.matchTokenKind(TokenKindString):
368368
name, err := p.parseString(p.Pos())
369369
if err != nil {
370370
return nil, err
@@ -391,7 +391,7 @@ func (p *Parser) tryParseRoleSettings(pos Pos) ([]*RoleSetting, error) {
391391

392392
func (p *Parser) parseRoleSetting(_ Pos) (*RoleSetting, error) {
393393
pairs := make([]*SettingPair, 0)
394-
for p.matchTokenKind(TokenIdent) {
394+
for p.matchTokenKind(TokenKindIdent) {
395395
name, err := p.parseIdent()
396396
if err != nil {
397397
return nil, err
@@ -404,12 +404,12 @@ func (p *Parser) parseRoleSetting(_ Pos) (*RoleSetting, error) {
404404
}, nil
405405
}
406406
switch {
407-
case p.matchTokenKind(opTypeEQ),
408-
p.matchTokenKind(TokenInt),
409-
p.matchTokenKind(TokenFloat),
410-
p.matchTokenKind(TokenString):
407+
case p.matchTokenKind(TokenKindSingleEQ),
408+
p.matchTokenKind(TokenKindInt),
409+
p.matchTokenKind(TokenKindFloat),
410+
p.matchTokenKind(TokenKindString):
411411
var op TokenKind
412-
if token := p.tryConsumeTokenKind(opTypeEQ); token != nil {
412+
if token := p.tryConsumeTokenKind(TokenKindSingleEQ); token != nil {
413413
op = token.Kind
414414
}
415415
value, err := p.parseLiteral(p.Pos())
@@ -419,7 +419,7 @@ func (p *Parser) parseRoleSetting(_ Pos) (*RoleSetting, error) {
419419
// docs: https://clickhouse.com/docs/en/sql-reference/statements/alter/role
420420
// the operator "=" was required if the variable name is NOT in
421421
// ["MIN", "MAX", "PROFILE"] and value is existed.
422-
if value != nil && name.Name != "MIN" && name.Name != "MAX" && name.Name != "PROFILE" && op != opTypeEQ {
422+
if value != nil && name.Name != "MIN" && name.Name != "MAX" && name.Name != "PROFILE" && op != TokenKindSingleEQ {
423423
return nil, fmt.Errorf("expected operator = or no value, but got %s", op)
424424
}
425425
pairs = append(pairs, &SettingPair{
@@ -809,7 +809,7 @@ func (p *Parser) parsePrivilegeSystem(pos Pos) (*PrivilegeClause, error) {
809809
}
810810

811811
func (p *Parser) parsePrivilegeClause(pos Pos) (*PrivilegeClause, error) {
812-
if p.matchTokenKind(TokenIdent) {
812+
if p.matchTokenKind(TokenKindIdent) {
813813
if p.last().String == "dictGet" {
814814
_ = p.lexer.consumeToken()
815815
return &PrivilegeClause{
@@ -921,7 +921,7 @@ func (p *Parser) parseGrantSource(_ Pos) (*TableIdentifier, error) {
921921
return nil, err
922922
}
923923

924-
if p.tryConsumeTokenKind(TokenDot) == nil {
924+
if p.tryConsumeTokenKind(TokenKindDot) == nil {
925925
return &TableIdentifier{
926926
Table: ident,
927927
}, nil

0 commit comments

Comments
 (0)