forked from alecthomas/participle
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmap.go
82 lines (72 loc) · 1.74 KB
/
map.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
package participle
import (
"io"
"strconv"
"strings"
"github.com/alecthomas/participle/lexer"
)
func identityMapper(token lexer.Token) lexer.Token { return token }
// Unquote applies strconv.Unquote() to tokens of the given types.
//
// Tokens of type "String" will be unquoted if no other types are provided.
func Unquote(def lexer.Definition, types ...string) Option {
if len(types) == 0 {
types = []string{"String"}
}
table, err := lexer.MakeSymbolTable(def, types...)
if err != nil {
panic(err)
}
return Map(func(t lexer.Token) lexer.Token {
if table[t.Type] {
value, err := unquote(t.Value)
if err != nil {
lexer.Panicf(t.Pos, "invalid quoted string %q: %s", t.Value, err.Error())
}
t.Value = value
}
return t
})
}
func unquote(s string) (string, error) {
quote := s[0]
s = s[1 : len(s)-1]
out := ""
for s != "" {
value, _, tail, err := strconv.UnquoteChar(s, quote)
if err != nil {
return "", err
}
s = tail
out += string(value)
}
return out, nil
}
// Upper is an Option that upper-cases all tokens of the given type. Useful for case normalisation.
func Upper(def lexer.Definition, types ...string) Option {
table, err := lexer.MakeSymbolTable(def, types...)
if err != nil {
panic(err)
}
return Map(func(token lexer.Token) lexer.Token {
if table[token.Type] {
token.Value = strings.ToUpper(token.Value)
}
return token
})
}
// Apply a Mapping to all tokens coming out of a Lexer.
type mappingLexerDef struct {
lexer.Definition
mapper Mapper
}
func (m *mappingLexerDef) Lex(r io.Reader) lexer.Lexer {
return &mappingLexer{m.Definition.Lex(r), m.mapper}
}
type mappingLexer struct {
lexer.Lexer
mapper Mapper
}
func (m *mappingLexer) Next() lexer.Token {
return m.mapper(m.Lexer.Next())
}