diff --git a/parser/ast.go b/parser/ast.go index eb62bfa..5c5b920 100644 --- a/parser/ast.go +++ b/parser/ast.go @@ -159,7 +159,7 @@ func (p *BinaryOperation) End() Pos { func (p *BinaryOperation) String() string { var builder strings.Builder builder.WriteString(p.LeftExpr.String()) - if p.Operation != opTypeCast { + if p.Operation != TokenKindDash { builder.WriteByte(' ') } if p.HasNot { @@ -168,7 +168,7 @@ func (p *BinaryOperation) String() string { builder.WriteString("GLOBAL ") } builder.WriteString(string(p.Operation)) - if p.Operation != opTypeCast { + if p.Operation != TokenKindDash { builder.WriteByte(' ') } builder.WriteString(p.RightExpr.String()) @@ -1914,7 +1914,7 @@ func (s *SettingPair) String() string { var builder strings.Builder builder.WriteString(s.Name.String()) if s.Value != nil { - if s.Operation == opTypeEQ { + if s.Operation == TokenKindSingleEQ { builder.WriteString(string(s.Operation)) } else { builder.WriteByte(' ') diff --git a/parser/lexer.go b/parser/lexer.go index 1f2654d..b9c405a 100644 --- a/parser/lexer.go +++ b/parser/lexer.go @@ -9,13 +9,30 @@ import ( ) const ( - TokenEOF TokenKind = "" - TokenIdent TokenKind = "" - TokenKeyword TokenKind = "" - TokenInt TokenKind = "" - TokenFloat TokenKind = "" - TokenString TokenKind = "" - TokenDot = "." + TokenKindEOF TokenKind = "" + TokenKindIdent TokenKind = "" + TokenKindKeyword TokenKind = "" + TokenKindInt TokenKind = "" + TokenKindFloat TokenKind = "" + TokenKindString TokenKind = "" + TokenKindDot = "." + TokenKindSingleEQ TokenKind = "=" + TokenKindDoubleEQ TokenKind = "==" + TokenKindNE TokenKind = "!=" + TokenKindLT TokenKind = "<" + TokenKindLE TokenKind = "<=" + TokenKindGT TokenKind = ">" + TokenKindGE TokenKind = ">=" + TokenKindQuery = "?" + + TokenKindPlus TokenKind = "+" + TokenKindMinus TokenKind = "-" + TokenKindMul TokenKind = "*" + TokenKindDiv TokenKind = "/" + TokenKindMod TokenKind = "%" + + TokenKindArrow TokenKind = "->" + TokenKindDash TokenKind = "::" ) const ( @@ -33,12 +50,12 @@ type Token struct { Kind TokenKind String string - Base int // 10 or 16 on TokenInt + Base int // 10 or 16 on TokenKindInt QuoteType int } func (t *Token) ToString() string { - if t.Kind == TokenKeyword { + if t.Kind == TokenKindKeyword { return strings.ToUpper(t.String) } return t.String @@ -87,7 +104,7 @@ func (l *Lexer) consumeNumber() error { } hasExp := false - tokenKind := TokenInt + tokenKind := TokenKindInt hasNumberPart := false for l.peekOk(i) { hasNumberPart = true @@ -100,7 +117,7 @@ func (l *Lexer) consumeNumber() error { i++ continue case c == '.': // float - tokenKind = TokenFloat + tokenKind = TokenKindFloat i++ continue case base != 16 && (c == 'e' || c == 'E' || c == 'p' || c == 'P'): @@ -165,9 +182,9 @@ func (l *Lexer) consumeIdent(_ Pos) error { } slice := l.slice(0, i) if quoteType == Unquoted && l.isKeyword(strings.ToUpper(slice)) { - token.Kind = TokenKeyword + token.Kind = TokenKindKeyword } else { - token.Kind = TokenIdent + token.Kind = TokenKindIdent } token.Pos = Pos(l.current) token.End = Pos(l.current + i) @@ -214,7 +231,7 @@ func (l *Lexer) consumeString() error { return errors.New("invalid string") } l.lastToken = &Token{ - Kind: TokenString, + Kind: TokenKindString, String: l.slice(1, i), Pos: Pos(l.current + 1), End: Pos(l.current + i), @@ -265,11 +282,11 @@ func (l *Lexer) peekToken() (*Token, error) { } func (l *Lexer) hasPrecedenceToken(last *Token) bool { - return last != nil && (last.Kind == TokenIdent || - last.Kind == TokenKeyword || - last.Kind == TokenInt || - last.Kind == TokenFloat || - last.Kind == TokenString) + return last != nil && (last.Kind == TokenKindIdent || + last.Kind == TokenKindKeyword || + last.Kind == TokenKindInt || + last.Kind == TokenKindFloat || + last.Kind == TokenKindString) } func (l *Lexer) consumeToken() error { @@ -304,7 +321,7 @@ func (l *Lexer) consumeToken() error { } else if l.peekOk(1) && l.peekN(1) == '>' { l.lastToken = &Token{ String: l.slice(0, 2), - Kind: opTypeArrow, + Kind: TokenKindArrow, Pos: Pos(l.current), End: Pos(l.current + 2), } @@ -321,7 +338,7 @@ func (l *Lexer) consumeToken() error { if l.peekOk(1) && l.peekN(1) == ':' { l.lastToken = &Token{ String: l.slice(0, 2), - Kind: opTypeCast, + Kind: TokenKindDash, Pos: Pos(l.current), End: Pos(l.current + 2), } @@ -331,7 +348,7 @@ func (l *Lexer) consumeToken() error { case '.': l.lastToken = &Token{ String: l.slice(0, 1), - Kind: TokenDot, + Kind: TokenKindDot, Pos: Pos(l.current), End: Pos(l.current + 1), } diff --git a/parser/lexer_test.go b/parser/lexer_test.go index addc963..5b04f8f 100644 --- a/parser/lexer_test.go +++ b/parser/lexer_test.go @@ -40,7 +40,7 @@ func TestConsumeString(t *testing.T) { lexer := NewLexer(s) err := lexer.consumeToken() require.NoError(t, err) - require.Equal(t, TokenString, lexer.lastToken.Kind) + require.Equal(t, TokenKindString, lexer.lastToken.Kind) require.Equal(t, strings.Trim(s, "'"), lexer.lastToken.String) require.True(t, lexer.isEOF()) } @@ -61,7 +61,7 @@ func TestConsumeNumber(t *testing.T) { lexer := NewLexer(i) err := lexer.consumeToken() require.NoError(t, err) - require.Equal(t, TokenInt, lexer.lastToken.Kind) + require.Equal(t, TokenKindInt, lexer.lastToken.Kind) require.Equal(t, 10, lexer.lastToken.Base) require.Equal(t, i, lexer.lastToken.String) require.True(t, lexer.isEOF()) @@ -77,7 +77,7 @@ func TestConsumeNumber(t *testing.T) { lexer := NewLexer(n) err := lexer.consumeToken() require.NoError(t, err) - require.Equal(t, TokenInt, lexer.lastToken.Kind) + require.Equal(t, TokenKindInt, lexer.lastToken.Kind) require.Equal(t, 16, lexer.lastToken.Base) require.Equal(t, n, lexer.lastToken.String) require.True(t, lexer.isEOF()) @@ -117,7 +117,7 @@ func TestConsumeNumber(t *testing.T) { lexer := NewLexer(f) err := lexer.consumeToken() require.NoError(t, err) - require.Equal(t, TokenFloat, lexer.lastToken.Kind) + require.Equal(t, TokenKindFloat, lexer.lastToken.Kind) require.Equal(t, f, lexer.lastToken.String) require.True(t, lexer.isEOF()) } @@ -167,7 +167,7 @@ func TestConsumeNumber(t *testing.T) { lexer := NewLexer(i) err := lexer.consumeToken() require.NoError(t, err) - require.Equal(t, TokenIdent, lexer.lastToken.Kind) + require.Equal(t, TokenKindIdent, lexer.lastToken.Kind) require.Equal(t, strings.Trim(i, "`"), lexer.lastToken.String) require.True(t, lexer.isEOF()) } @@ -178,7 +178,7 @@ func TestConsumeNumber(t *testing.T) { lexer := NewLexer(k) err := lexer.consumeToken() require.NoError(t, err) - require.Equal(t, TokenKeyword, lexer.lastToken.Kind) + require.Equal(t, TokenKindKeyword, lexer.lastToken.Kind) require.Equal(t, k, lexer.lastToken.String) require.True(t, lexer.isEOF()) } diff --git a/parser/parse_system.go b/parser/parse_system.go index 9974450..67e5564 100644 --- a/parser/parse_system.go +++ b/parser/parse_system.go @@ -343,7 +343,7 @@ func (p *Parser) parseCheckStmt(pos Pos) (*CheckStmt, error) { func (p *Parser) parseRoleName(_ Pos) (*RoleName, error) { switch { - case p.matchTokenKind(TokenIdent): + case p.matchTokenKind(TokenKindIdent): name, err := p.parseIdent() if err != nil { return nil, err @@ -364,7 +364,7 @@ func (p *Parser) parseRoleName(_ Pos) (*RoleName, error) { Scope: scope, OnCluster: onCluster, }, nil - case p.matchTokenKind(TokenString): + case p.matchTokenKind(TokenKindString): name, err := p.parseString(p.Pos()) if err != nil { return nil, err @@ -391,7 +391,7 @@ func (p *Parser) tryParseRoleSettings(pos Pos) ([]*RoleSetting, error) { func (p *Parser) parseRoleSetting(_ Pos) (*RoleSetting, error) { pairs := make([]*SettingPair, 0) - for p.matchTokenKind(TokenIdent) { + for p.matchTokenKind(TokenKindIdent) { name, err := p.parseIdent() if err != nil { return nil, err @@ -404,12 +404,12 @@ func (p *Parser) parseRoleSetting(_ Pos) (*RoleSetting, error) { }, nil } switch { - case p.matchTokenKind(opTypeEQ), - p.matchTokenKind(TokenInt), - p.matchTokenKind(TokenFloat), - p.matchTokenKind(TokenString): + case p.matchTokenKind(TokenKindSingleEQ), + p.matchTokenKind(TokenKindInt), + p.matchTokenKind(TokenKindFloat), + p.matchTokenKind(TokenKindString): var op TokenKind - if token := p.tryConsumeTokenKind(opTypeEQ); token != nil { + if token := p.tryConsumeTokenKind(TokenKindSingleEQ); token != nil { op = token.Kind } value, err := p.parseLiteral(p.Pos()) @@ -419,7 +419,7 @@ func (p *Parser) parseRoleSetting(_ Pos) (*RoleSetting, error) { // docs: https://clickhouse.com/docs/en/sql-reference/statements/alter/role // the operator "=" was required if the variable name is NOT in // ["MIN", "MAX", "PROFILE"] and value is existed. - if value != nil && name.Name != "MIN" && name.Name != "MAX" && name.Name != "PROFILE" && op != opTypeEQ { + if value != nil && name.Name != "MIN" && name.Name != "MAX" && name.Name != "PROFILE" && op != TokenKindSingleEQ { return nil, fmt.Errorf("expected operator = or no value, but got %s", op) } pairs = append(pairs, &SettingPair{ @@ -809,7 +809,7 @@ func (p *Parser) parsePrivilegeSystem(pos Pos) (*PrivilegeClause, error) { } func (p *Parser) parsePrivilegeClause(pos Pos) (*PrivilegeClause, error) { - if p.matchTokenKind(TokenIdent) { + if p.matchTokenKind(TokenKindIdent) { if p.last().String == "dictGet" { _ = p.lexer.consumeToken() return &PrivilegeClause{ @@ -921,7 +921,7 @@ func (p *Parser) parseGrantSource(_ Pos) (*TableIdentifier, error) { return nil, err } - if p.tryConsumeTokenKind(TokenDot) == nil { + if p.tryConsumeTokenKind(TokenKindDot) == nil { return &TableIdentifier{ Table: ident, }, nil diff --git a/parser/parser_column.go b/parser/parser_column.go index cef786a..6890747 100644 --- a/parser/parser_column.go +++ b/parser/parser_column.go @@ -41,23 +41,23 @@ func (p *Parser) getNextPrecedence() int { return PrecedenceIs case p.matchKeyword(KeywordNot): return PrecedenceNot - case p.matchTokenKind(opTypeCast): + case p.matchTokenKind(TokenKindDash): return PrecedenceDoubleColon - case p.matchTokenKind(opTypeEQ), p.matchTokenKind(opTypeLT), p.matchTokenKind(opTypeLE), - p.matchTokenKind(opTypeGE), p.matchTokenKind(opTypeGT), p.matchTokenKind(opTypeDoubleEQ), - p.matchTokenKind(opTypeNE), p.matchTokenKind("<>"): + case p.matchTokenKind(TokenKindSingleEQ), p.matchTokenKind(TokenKindLT), p.matchTokenKind(TokenKindLE), + p.matchTokenKind(TokenKindGE), p.matchTokenKind(TokenKindGT), p.matchTokenKind(TokenKindDoubleEQ), + p.matchTokenKind(TokenKindNE), p.matchTokenKind("<>"): return PrecedenceCompare - case p.matchTokenKind(opTypePlus), p.matchTokenKind(opTypeMinus): + case p.matchTokenKind(TokenKindPlus), p.matchTokenKind(TokenKindMinus): return PrecedenceAddSub - case p.matchTokenKind(opTypeMul), p.matchTokenKind(opTypeDiv), p.matchTokenKind(opTypeMod): + case p.matchTokenKind(TokenKindMul), p.matchTokenKind(TokenKindDiv), p.matchTokenKind(TokenKindMod): return PrecedenceMulDivMod - case p.matchTokenKind(opTypeArrow): + case p.matchTokenKind(TokenKindArrow): return PrecedenceArrow case p.matchTokenKind("("), p.matchTokenKind("["): return PrecedenceBracket - case p.matchTokenKind(opTypeCast): + case p.matchTokenKind(TokenKindDash): return PrecedenceDoubleColon - case p.matchTokenKind(TokenDot): + case p.matchTokenKind(TokenKindDot): return PrecedenceDot case p.matchKeyword(KeywordBetween), p.matchKeyword(KeywordLike), p.matchKeyword(KeywordIlike): return PrecedenceBetweenLike @@ -65,7 +65,7 @@ func (p *Parser) getNextPrecedence() int { return precedenceIn case p.matchKeyword(KeywordGlobal): return PrecedenceGlobal - case p.matchTokenKind(opTypeQuery): + case p.matchTokenKind(TokenKindQuery): return PrecedenceQuery default: return PrecedenceUnknown @@ -74,14 +74,14 @@ func (p *Parser) getNextPrecedence() int { func (p *Parser) parseInfix(expr Expr, precedence int) (Expr, error) { switch { - case p.matchTokenKind(opTypeEQ), p.matchTokenKind(opTypeLT), p.matchTokenKind(opTypeLE), - p.matchTokenKind(opTypeGE), p.matchTokenKind(opTypeGT), - p.matchTokenKind(opTypeNE), p.matchTokenKind("<>"), - p.matchTokenKind(opTypeMinus), p.matchTokenKind(opTypePlus), p.matchTokenKind(opTypeMul), - p.matchTokenKind(opTypeDiv), p.matchTokenKind(opTypeMod), + case p.matchTokenKind(TokenKindSingleEQ), p.matchTokenKind(TokenKindLT), p.matchTokenKind(TokenKindLE), + p.matchTokenKind(TokenKindGE), p.matchTokenKind(TokenKindGT), + p.matchTokenKind(TokenKindNE), p.matchTokenKind("<>"), + p.matchTokenKind(TokenKindMinus), p.matchTokenKind(TokenKindPlus), p.matchTokenKind(TokenKindMul), + p.matchTokenKind(TokenKindDiv), p.matchTokenKind(TokenKindMod), p.matchKeyword(KeywordIn), p.matchKeyword(KeywordLike), p.matchKeyword(KeywordIlike), p.matchKeyword(KeywordAnd), p.matchKeyword(KeywordOr), - p.matchTokenKind(opTypeArrow), p.matchTokenKind(opTypeDoubleEQ): + p.matchTokenKind(TokenKindArrow), p.matchTokenKind(TokenKindDoubleEQ): op := p.last().ToString() _ = p.lexer.consumeToken() rightExpr, err := p.parseSubExpr(p.Pos(), precedence) @@ -93,10 +93,10 @@ func (p *Parser) parseInfix(expr Expr, precedence int) (Expr, error) { Operation: TokenKind(op), RightExpr: rightExpr, }, nil - case p.matchTokenKind(opTypeCast): + case p.matchTokenKind(TokenKindDash): _ = p.lexer.consumeToken() - if p.matchTokenKind(TokenIdent) && p.last().String == "Tuple" { + if p.matchTokenKind(TokenKindIdent) && p.last().String == "Tuple" { name, err := p.parseIdent() if err != nil { return nil, err @@ -111,7 +111,7 @@ func (p *Parser) parseInfix(expr Expr, precedence int) (Expr, error) { } return &BinaryOperation{ LeftExpr: expr, - Operation: opTypeCast, + Operation: TokenKindDash, RightExpr: rightExpr, }, nil } @@ -122,7 +122,7 @@ func (p *Parser) parseInfix(expr Expr, precedence int) (Expr, error) { } return &BinaryOperation{ LeftExpr: expr, - Operation: opTypeCast, + Operation: TokenKindDash, RightExpr: rightExpr, }, nil case p.matchKeyword(KeywordBetween): @@ -141,12 +141,12 @@ func (p *Parser) parseInfix(expr Expr, precedence int) (Expr, error) { Operation: "GLOBAL IN", RightExpr: rightExpr, }, nil - case p.matchTokenKind(TokenDot): + case p.matchTokenKind(TokenKindDot): _ = p.lexer.consumeToken() // access column with dot notation var rightExpr Expr var err error - if p.matchTokenKind(TokenIdent) { + if p.matchTokenKind(TokenKindIdent) { rightExpr, err = p.parseIdent() } else { rightExpr, err = p.parseDecimal(p.Pos()) @@ -156,7 +156,7 @@ func (p *Parser) parseInfix(expr Expr, precedence int) (Expr, error) { } return &IndexOperation{ LeftExpr: expr, - Operation: TokenDot, + Operation: TokenKindDot, Index: rightExpr, }, nil case p.matchKeyword(KeywordNot): @@ -191,7 +191,7 @@ func (p *Parser) parseInfix(expr Expr, precedence int) (Expr, error) { Object: expr, Params: params, }, nil - case p.matchTokenKind(opTypeQuery): + case p.matchTokenKind(TokenKindQuery): return p.parseTernaryExpr(expr) case p.matchKeyword(KeywordIs): _ = p.lexer.consumeToken() @@ -299,8 +299,8 @@ func (p *Parser) parseColumnExtractExpr(pos Pos) (*ExtractExpr, error) { func (p *Parser) parseUnaryExpr(pos Pos) (Expr, error) { kind := p.last() switch { - case p.matchTokenKind(opTypePlus), - p.matchTokenKind(opTypeMinus), + case p.matchTokenKind(TokenKindPlus), + p.matchTokenKind(TokenKindMinus), p.matchKeyword(KeywordNot): _ = p.lexer.consumeToken() default: @@ -310,7 +310,7 @@ func (p *Parser) parseUnaryExpr(pos Pos) (Expr, error) { var expr Expr var err error switch { - case p.matchTokenKind(TokenIdent), + case p.matchTokenKind(TokenKindIdent), p.matchTokenKind("("): expr, err = p.parseExpr(p.Pos()) default: @@ -348,7 +348,7 @@ func (p *Parser) parseColumnExpr(pos Pos) (Expr, error) { //nolint:funlen if err != nil { return nil, err } - if nextToken != nil && nextToken.Kind == TokenString { + if nextToken != nil && nextToken.Kind == TokenKindString { return p.parseString(p.Pos()) } return p.parseIdentOrFunction(pos) @@ -358,16 +358,16 @@ func (p *Parser) parseColumnExpr(pos Pos) (Expr, error) { //nolint:funlen return p.parseColumnCaseExpr(pos) case p.matchKeyword(KeywordExtract): return p.parseColumnExtractExpr(pos) - case p.matchTokenKind(TokenIdent): + case p.matchTokenKind(TokenKindIdent): return p.parseIdentOrFunction(pos) - case p.matchTokenKind(TokenString): // string literal + case p.matchTokenKind(TokenKindString): // string literal return p.parseString(pos) - case p.matchTokenKind(TokenInt), - p.matchTokenKind(TokenFloat): // number literal + case p.matchTokenKind(TokenKindInt), + p.matchTokenKind(TokenKindFloat): // number literal return p.parseNumber(pos) case p.matchTokenKind("("): if peek, _ := p.lexer.peekToken(); peek != nil { - if peek.Kind == TokenKeyword && strings.EqualFold(peek.String, KeywordSelect) { + if peek.Kind == TokenKindKeyword && strings.EqualFold(peek.String, KeywordSelect) { return p.parseSubQuery(pos) } } @@ -379,19 +379,19 @@ func (p *Parser) parseColumnExpr(pos Pos) (Expr, error) { //nolint:funlen case p.matchTokenKind("{"): // The map literal string also starts with '{', so we need to check the next token // to determine if it is a map literal or a query param. - if p.peekTokenKind(TokenIdent) { + if p.peekTokenKind(TokenKindIdent) { return p.parseQueryParam(p.Pos()) } return p.parseMapLiteral(p.Pos()) - case p.matchTokenKind(TokenDot): + case p.matchTokenKind(TokenKindDot): return p.parseNumber(p.Pos()) - case p.matchTokenKind(opTypeQuery): + case p.matchTokenKind(TokenKindQuery): // Placeholder `?` _ = p.lexer.consumeToken() return &PlaceHolder{ PlaceholderPos: pos, PlaceHolderEnd: pos, - Type: opTypeQuery, + Type: TokenKindQuery, }, nil default: return nil, fmt.Errorf("unexpected token kind: %s", p.lastTokenKind()) @@ -425,7 +425,7 @@ func (p *Parser) parseColumnCastExpr(pos Pos) (Expr, error) { var asColumnType Expr // CAST(1 AS 'Float') or CAST(1 AS Float) are equivalent - if p.matchTokenKind(TokenString) { + if p.matchTokenKind(TokenKindString) { asColumnType, err = p.parseString(p.Pos()) } else { asColumnType, err = p.parseColumnType(p.Pos()) @@ -817,19 +817,19 @@ func (p *Parser) parseColumnType(_ Pos) (ColumnType, error) { // nolint:funlen } if p.tryConsumeTokenKind("(") != nil { switch { - case p.matchTokenKind(TokenIdent): + case p.matchTokenKind(TokenKindIdent): if ident.Name == "Nested" { return p.parseNestedType(ident, p.Pos()) } return p.parseComplexType(ident, p.Pos()) - case p.matchTokenKind(TokenString): - if peekToken, err := p.lexer.peekToken(); err == nil && peekToken.Kind == opTypeEQ { + case p.matchTokenKind(TokenKindString): + if peekToken, err := p.lexer.peekToken(); err == nil && peekToken.Kind == TokenKindSingleEQ { // enum values return p.parseEnumType(ident, p.Pos()) } // like Datetime('Asia/Dubai') return p.parseColumnTypeWithParams(ident, p.Pos()) - case p.matchTokenKind(TokenInt), p.matchTokenKind(TokenFloat): + case p.matchTokenKind(TokenKindInt), p.matchTokenKind(TokenKindFloat): // fixed size return p.parseColumnTypeWithParams(ident, p.Pos()) default: @@ -1012,7 +1012,7 @@ func (p *Parser) parseEnumValueExpr(pos Pos) (*EnumValue, error) { return nil, err } - if _, err := p.consumeTokenKind(opTypeEQ); err != nil { + if _, err := p.consumeTokenKind(TokenKindSingleEQ); err != nil { return nil, err } diff --git a/parser/parser_common.go b/parser/parser_common.go index c5e148c..72bf588 100644 --- a/parser/parser_common.go +++ b/parser/parser_common.go @@ -18,7 +18,7 @@ func NewParser(buffer string) *Parser { func (p *Parser) lastTokenKind() TokenKind { if p.last() == nil { - return TokenEOF + return TokenKindEOF } return p.last().Kind } @@ -37,7 +37,7 @@ func (p *Parser) Pos() Pos { func (p *Parser) matchTokenKind(kind TokenKind) bool { return p.lastTokenKind() == kind || - (kind == TokenIdent && p.lastTokenKind() == TokenKeyword) + (kind == TokenKindIdent && p.lastTokenKind() == TokenKindKeyword) } // consumeTokenKind consumes the last token if it is the given kind. @@ -58,7 +58,7 @@ func (p *Parser) tryConsumeTokenKind(kind TokenKind) *Token { } func (p *Parser) matchKeyword(keyword string) bool { - return p.matchTokenKind(TokenKeyword) && strings.EqualFold(p.last().String, keyword) + return p.matchTokenKind(TokenKindKeyword) && strings.EqualFold(p.last().String, keyword) } func (p *Parser) consumeKeyword(keyword string) error { @@ -79,7 +79,7 @@ func (p *Parser) tryConsumeKeyword(keyword string) *Token { } func (p *Parser) parseIdent() (*Ident, error) { - lastToken, err := p.consumeTokenKind(TokenIdent) + lastToken, err := p.consumeTokenKind(TokenKindIdent) if err != nil { return nil, err } @@ -94,7 +94,7 @@ func (p *Parser) parseIdent() (*Ident, error) { func (p *Parser) parseIdentOrStar() (*Ident, error) { switch { - case p.matchTokenKind(TokenIdent): + case p.matchTokenKind(TokenKindIdent): return p.parseIdent() case p.matchTokenKind("*"): lastToken := p.last() @@ -110,7 +110,7 @@ func (p *Parser) parseIdentOrStar() (*Ident, error) { } func (p *Parser) tryParseDotIdent(_ Pos) (*Ident, error) { - if p.tryConsumeTokenKind(TokenDot) == nil { + if p.tryConsumeTokenKind(TokenKindDot) == nil { return nil, nil // nolint } return p.parseIdent() @@ -207,13 +207,13 @@ func (p *Parser) parseNumber(pos Pos) (*NumberLiteral, error) { var err error switch { - case p.matchTokenKind(TokenInt): - lastToken, err = p.consumeTokenKind(TokenInt) - case p.matchTokenKind(TokenFloat): - lastToken, err = p.consumeTokenKind(TokenFloat) - case p.matchTokenKind(TokenDot): + case p.matchTokenKind(TokenKindInt): + lastToken, err = p.consumeTokenKind(TokenKindInt) + case p.matchTokenKind(TokenKindFloat): + lastToken, err = p.consumeTokenKind(TokenKindFloat) + case p.matchTokenKind(TokenKindDot): _ = p.lexer.consumeToken() - lastToken, err = p.consumeTokenKind(TokenInt) + lastToken, err = p.consumeTokenKind(TokenKindInt) if err != nil { return nil, err } @@ -221,7 +221,7 @@ func (p *Parser) parseNumber(pos Pos) (*NumberLiteral, error) { return nil, fmt.Errorf("invalid decimal literal: %q", lastToken.String) } lastToken.String = "." + lastToken.String - lastToken.Kind = TokenFloat + lastToken.Kind = TokenKindFloat default: return nil, fmt.Errorf("expected or , but got %q", p.lastTokenKind()) } @@ -238,7 +238,7 @@ func (p *Parser) parseNumber(pos Pos) (*NumberLiteral, error) { } func (p *Parser) parseString(pos Pos) (*StringLiteral, error) { - lastToken, err := p.consumeTokenKind(TokenString) + lastToken, err := p.consumeTokenKind(TokenKindString) if err != nil { return nil, err } @@ -252,9 +252,9 @@ func (p *Parser) parseString(pos Pos) (*StringLiteral, error) { func (p *Parser) parseLiteral(pos Pos) (Literal, error) { switch { - case p.matchTokenKind(TokenInt), p.matchTokenKind(TokenFloat): + case p.matchTokenKind(TokenKindInt), p.matchTokenKind(TokenKindFloat): return p.parseNumber(pos) - case p.matchTokenKind(TokenString): + case p.matchTokenKind(TokenKindString): return p.parseString(pos) case p.matchKeyword(KeywordNull): // accept the NULL keyword @@ -350,7 +350,7 @@ func (p *Parser) parseRatioExpr(pos Pos) (*RatioExpr, error) { } var denominator *NumberLiteral - if p.tryConsumeTokenKind(opTypeDiv) != nil { + if p.tryConsumeTokenKind(TokenKindDiv) != nil { denominator, err = p.parseNumber(pos) if err != nil { return nil, err diff --git a/parser/parser_query.go b/parser/parser_query.go index 1dd1e97..807e8c7 100644 --- a/parser/parser_query.go +++ b/parser/parser_query.go @@ -187,7 +187,7 @@ func (p *Parser) parseJoinOp(_ Pos) []string { func (p *Parser) parseJoinTableExpr(_ Pos) (Expr, error) { switch { - case p.matchTokenKind(TokenIdent), p.matchTokenKind("("): + case p.matchTokenKind(TokenKindIdent), p.matchTokenKind("("): tableExpr, err := p.parseTableExpr(p.Pos()) if err != nil { return nil, err @@ -283,7 +283,7 @@ func (p *Parser) parseTableExpr(pos Pos) (*TableExpr, error) { var expr Expr var err error switch { - case p.matchTokenKind(TokenString), p.matchTokenKind(TokenIdent): + case p.matchTokenKind(TokenKindString), p.matchTokenKind(TokenKindIdent): // table name tableIdentifier, err := p.parseTableIdentifier(p.Pos()) if err != nil { @@ -324,7 +324,7 @@ func (p *Parser) parseTableExpr(pos Pos) (*TableExpr, error) { Alias: alias, } tableEnd = expr.End() - } else if p.matchTokenKind(TokenIdent) && p.lastTokenKind() != TokenKeyword { + } else if p.matchTokenKind(TokenKindIdent) && p.lastTokenKind() != TokenKindKeyword { alias, err := p.parseIdent() if err != nil { return nil, err @@ -606,7 +606,7 @@ func (p *Parser) parseWindowFrameClause(pos Pos) (*WindowFrameClause, error) { UnboundedPos: unboundedPos, Direction: direction, } - case p.matchTokenKind(TokenInt): + case p.matchTokenKind(TokenKindInt): number, err := p.parseNumber(p.Pos()) if err != nil { return nil, err diff --git a/parser/parser_table.go b/parser/parser_table.go index 718c1ae..f4731f3 100644 --- a/parser/parser_table.go +++ b/parser/parser_table.go @@ -202,7 +202,7 @@ func (p *Parser) parseIdentOrFunction(_ Pos) (Expr, error) { if overToken := p.tryConsumeKeyword(KeywordOver); overToken != nil { var overExpr Expr switch { - case p.matchTokenKind(TokenIdent): + case p.matchTokenKind(TokenKindIdent): overExpr, err = p.parseIdent() case p.matchTokenKind("("): overExpr, err = p.parseWindowCondition(p.Pos()) @@ -223,14 +223,14 @@ func (p *Parser) parseIdentOrFunction(_ Pos) (Expr, error) { }, nil } return funcExpr, nil - case p.tryConsumeTokenKind(TokenDot) != nil: + case p.tryConsumeTokenKind(TokenKindDot) != nil: switch { - case p.matchTokenKind(TokenIdent): + case p.matchTokenKind(TokenKindIdent): nextIdent, err := p.parseIdent() if err != nil { return nil, err } - if p.tryConsumeTokenKind(TokenDot) != nil { + if p.tryConsumeTokenKind(TokenKindDot) != nil { thirdIdent, err := p.parseIdent() if err != nil { return nil, err @@ -304,13 +304,13 @@ func (p *Parser) parseTableSchemaClause(pos Pos) (*TableSchemaClause, error) { }, nil case p.tryConsumeKeyword(KeywordAs) != nil: switch { - case p.matchTokenKind(TokenIdent): + case p.matchTokenKind(TokenKindIdent): ident, err := p.parseIdent() if err != nil { return nil, err } switch { - case p.matchTokenKind(TokenDot): + case p.matchTokenKind(TokenKindDot): // it's a database.table dotIdent, err := p.tryParseDotIdent(p.Pos()) if err != nil { @@ -403,7 +403,7 @@ func (p *Parser) parseTableColumns() ([]Expr, error) { } func (p *Parser) tryParseTableColumnExpr(pos Pos) (*ColumnDef, error) { - if !p.matchTokenKind(TokenIdent) { + if !p.matchTokenKind(TokenKindIdent) { return nil, nil // nolint } return p.parseTableColumnExpr(pos) @@ -420,7 +420,7 @@ func (p *Parser) parseTableColumnExpr(pos Pos) (*ColumnDef, error) { column.Name = name columnEnd := name.End() - if p.matchTokenKind(TokenIdent) && !p.matchKeyword(KeywordRemove) { + if p.matchTokenKind(TokenKindIdent) && !p.matchKeyword(KeywordRemove) { columnType, err := p.parseColumnType(p.Pos()) if err != nil { return nil, err @@ -482,14 +482,14 @@ func (p *Parser) parseTableColumnExpr(pos Pos) (*ColumnDef, error) { func (p *Parser) parseTableArgExpr(pos Pos) (Expr, error) { switch { - case p.matchTokenKind(TokenIdent): + case p.matchTokenKind(TokenKindIdent): ident, err := p.parseIdent() if err != nil { return nil, err } switch { // nest identifier - case p.matchTokenKind(TokenDot): + case p.matchTokenKind(TokenKindDot): dotIdent, err := p.tryParseDotIdent(p.Pos()) if err != nil { return nil, err @@ -510,7 +510,7 @@ func (p *Parser) parseTableArgExpr(pos Pos) (Expr, error) { default: return ident, nil } - case p.matchTokenKind(TokenInt), p.matchTokenKind(TokenString), p.matchKeyword("NULL"): + case p.matchTokenKind(TokenKindInt), p.matchTokenKind(TokenKindString), p.matchKeyword("NULL"): return p.parseLiteral(p.Pos()) default: return nil, fmt.Errorf("unexpected token: %q, expected , ", p.last().String) @@ -557,9 +557,9 @@ func (p *Parser) tryParseClusterClause(pos Pos) (*ClusterClause, error) { var expr Expr var err error switch { - case p.matchTokenKind(TokenIdent): + case p.matchTokenKind(TokenKindIdent): expr, err = p.parseIdent() - case p.matchTokenKind(TokenString): + case p.matchTokenKind(TokenKindString): expr, err = p.parseString(p.Pos()) default: return nil, fmt.Errorf("unexpected token: %q, expected or ", p.last().String) @@ -794,19 +794,19 @@ func (p *Parser) parseSettingsExprList(pos Pos) (*SettingExprList, error) { return nil, err } - if _, err := p.consumeTokenKind(opTypeEQ); err != nil { + if _, err := p.consumeTokenKind(TokenKindSingleEQ); err != nil { return nil, err } var expr Expr switch { - case p.matchTokenKind(TokenInt): + case p.matchTokenKind(TokenKindInt): number, err := p.parseNumber(p.Pos()) if err != nil { return nil, err } expr = number - case p.matchTokenKind(TokenString): + case p.matchTokenKind(TokenKindString): str, err := p.parseString(p.Pos()) expr = str if err != nil { @@ -855,12 +855,12 @@ func (p *Parser) parseEngineExpr(pos Pos) (*EngineExpr, error) { if err := p.consumeKeyword(KeywordEngine); err != nil { return nil, err } - _ = p.tryConsumeTokenKind(opTypeEQ) + _ = p.tryConsumeTokenKind(TokenKindSingleEQ) engineExpr := &EngineExpr{EnginePos: pos} var engineEnd Pos switch { - case p.matchTokenKind(TokenIdent): + case p.matchTokenKind(TokenKindIdent): ident, err := p.parseIdent() if err != nil { return nil, err @@ -1316,7 +1316,7 @@ func (p *Parser) parseCreateFunction(pos Pos) (*CreateFunction, error) { if err != nil { return nil, err } - if _, err := p.consumeTokenKind(opTypeArrow); err != nil { + if _, err := p.consumeTokenKind(TokenKindArrow); err != nil { return nil, err } expr, err := p.parseExpr(p.Pos()) diff --git a/parser/parser_view.go b/parser/parser_view.go index fbc6af0..5847896 100644 --- a/parser/parser_view.go +++ b/parser/parser_view.go @@ -212,7 +212,7 @@ func (p *Parser) tryParseWithTimeout(pos Pos) (*WithTimeoutClause, error) { withTimeout := &WithTimeoutClause{WithTimeoutPos: pos} - if p.matchTokenKind(TokenInt) { + if p.matchTokenKind(TokenKindInt) { decimalNumber, err := p.parseDecimal(p.Pos()) if err != nil { return nil, err diff --git a/parser/type.go b/parser/type.go index 3960286..d40336a 100644 --- a/parser/type.go +++ b/parser/type.go @@ -1,31 +1,3 @@ package parser var intervalType = NewSet("SECOND", "MINUTE", "HOUR", "DAY", "WEEK", "MONTH", "QUARTER", "YEAR") - -type OpType string - -const ( - // Comparison operators - opTypeEQ TokenKind = "=" - opTypeDoubleEQ TokenKind = "==" - opTypeNE TokenKind = "!=" - opTypeLT TokenKind = "<" - opTypeLE TokenKind = "<=" - opTypeGT TokenKind = ">" - opTypeGE TokenKind = ">=" - opTypeQuery = "?" - - // Arithmetic operators - opTypePlus TokenKind = "+" - opTypeMinus TokenKind = "-" - opTypeMul TokenKind = "*" - opTypeDiv TokenKind = "/" - opTypeMod TokenKind = "%" - - opTypeArrow TokenKind = "->" - opTypeCast TokenKind = "::" - - // Logical operators - opTypeAnd TokenKind = "AND" - opTypeOr TokenKind = "OR" -)