Skip to content

Commit dd21fe9

Browse files
committed
Added SepToken
1 parent 85ac1a9 commit dd21fe9

File tree

6 files changed

+43
-6
lines changed

6 files changed

+43
-6
lines changed

go.mod

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ module usm
33
go 1.22.5
44

55
require (
6-
github.com/RealA10N/view v0.0.0-20240802130606-01aa81d31529
6+
github.com/RealA10N/view v0.0.0-20240802133045-44824fbd8c51
77
github.com/stretchr/testify v1.9.0
88
)
99

go.sum

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@ github.com/RealA10N/view v0.0.0-20240802072707-a42acf42c115 h1:jTDQeWDinxn1YUv8M
1414
github.com/RealA10N/view v0.0.0-20240802072707-a42acf42c115/go.mod h1:b0iBi+f/nBnJOqjmQu8+Gj02+EI8vRtajVvp8cGwCw4=
1515
github.com/RealA10N/view v0.0.0-20240802130606-01aa81d31529 h1:xwU/GuMjLN2BteW6eBRSZ9w/wtKU2+e5LJVS6+hbJa4=
1616
github.com/RealA10N/view v0.0.0-20240802130606-01aa81d31529/go.mod h1:b0iBi+f/nBnJOqjmQu8+Gj02+EI8vRtajVvp8cGwCw4=
17+
github.com/RealA10N/view v0.0.0-20240802133045-44824fbd8c51 h1:3LT499XSPEgIoli2u5yanAniGdxgU0ZWvdCJxZNGEkw=
18+
github.com/RealA10N/view v0.0.0-20240802133045-44824fbd8c51/go.mod h1:b0iBi+f/nBnJOqjmQu8+Gj02+EI8vRtajVvp8cGwCw4=
1719
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
1820
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
1921
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=

lex/token.go

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ const (
1717
RcrToken
1818
EqlToken
1919
OprToken
20+
SepToken
2021
)
2122

2223
var tokenNames = map[TokenType]string{
@@ -29,6 +30,7 @@ var tokenNames = map[TokenType]string{
2930
RcrToken: "Right Curly Brace",
3031
EqlToken: "Equal",
3132
OprToken: "Operator",
33+
SepToken: "\\n",
3234
}
3335

3436
func (tkn TokenType) String() string {
@@ -49,5 +51,10 @@ func (tkn Token) String(ctx source.SourceContext) string {
4951
if !ok {
5052
typeName = "?"
5153
}
52-
return fmt.Sprintf("<%s \"%s\">", typeName, string(tkn.View.Raw(ctx)))
54+
55+
if tkn.View.Len() > 0 {
56+
return fmt.Sprintf("<%s \"%s\">", typeName, string(tkn.View.Raw(ctx)))
57+
} else {
58+
return fmt.Sprintf(typeName)
59+
}
5360
}

lex/tokenizer.go

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,10 @@ func NewTokenizer() Tokenizer {
3636

3737
func (t tokenizer) Tokenize(view source.SourceView) (tkns []Token, err error) {
3838
for {
39-
consumeWhitespace(&view)
39+
addSep := consumeWhitespace(&view)
40+
if addSep {
41+
tkns = append(tkns, Token{Type: SepToken})
42+
}
4043
tkn, err := t.tokenizeWord(&view)
4144
if err != nil {
4245
break
@@ -71,6 +74,10 @@ func not[T any](f func(item T) bool) func(T) bool {
7174
}
7275
}
7376

74-
func consumeWhitespace(view *source.SourceView) {
75-
*view = view.Subview(view.Index(not(unicode.IsSpace)), view.Len())
77+
// consume white spaces and return true if encounterd a newline.
78+
func consumeWhitespace(view *source.SourceView) bool {
79+
idx := view.Index(not(unicode.IsSpace))
80+
before, after := view.Partition(idx)
81+
*view = after
82+
return before.Contains('\n')
7683
}

lex/tokenizer_test.go

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,13 +37,16 @@ func TestAddOne(t *testing.T) {
3737
tknDesc{"$i32", lex.TypToken},
3838
tknDesc{"%x", lex.RegToken},
3939
tknDesc{"{", lex.LcrToken},
40+
tknDesc{"", lex.SepToken},
4041
tknDesc{"%0", lex.RegToken},
4142
tknDesc{"=", lex.EqlToken},
4243
tknDesc{"add", lex.OprToken},
4344
tknDesc{"%x", lex.RegToken},
4445
tknDesc{"#1", lex.ImmToken},
46+
tknDesc{"", lex.SepToken},
4547
tknDesc{"ret", lex.OprToken},
4648
tknDesc{"%0", lex.RegToken},
49+
tknDesc{"", lex.SepToken},
4750
tknDesc{"}", lex.RcrToken},
4851
}
4952

@@ -87,51 +90,62 @@ func TestPow(t *testing.T) {
8790
tknDesc{"$u32", lex.TypToken},
8891
tknDesc{"%exp", lex.RegToken},
8992
tknDesc{"{", lex.LcrToken},
93+
tknDesc{"", lex.SepToken},
9094

9195
tknDesc{"jz", lex.OprToken},
9296
tknDesc{"%exp", lex.RegToken},
9397
tknDesc{".end", lex.LblToken},
98+
tknDesc{"", lex.SepToken},
9499

95100
tknDesc{".recurse", lex.LblToken},
101+
tknDesc{"", lex.SepToken},
96102

97103
tknDesc{"%base.new", lex.RegToken},
98104
tknDesc{"=", lex.EqlToken},
99105
tknDesc{"mul", lex.OprToken},
100106
tknDesc{"%base", lex.RegToken},
101107
tknDesc{"%base", lex.RegToken},
108+
tknDesc{"", lex.SepToken},
102109

103110
tknDesc{"%exp.new", lex.RegToken},
104111
tknDesc{"=", lex.EqlToken},
105112
tknDesc{"shr", lex.OprToken},
106113
tknDesc{"%exp", lex.RegToken},
107114
tknDesc{"#1", lex.ImmToken},
115+
tknDesc{"", lex.SepToken},
108116

109117
tknDesc{"%res.0", lex.RegToken},
110118
tknDesc{"=", lex.EqlToken},
111119
tknDesc{"call", lex.OprToken},
112120
tknDesc{"@pow", lex.GlbToken},
113121
tknDesc{"%base.new", lex.RegToken},
114122
tknDesc{"%exp.new", lex.RegToken},
123+
tknDesc{"", lex.SepToken},
115124

116125
tknDesc{"%exp.mod2", lex.RegToken},
117126
tknDesc{"=", lex.EqlToken},
118127
tknDesc{"and", lex.OprToken},
119128
tknDesc{"%exp", lex.RegToken},
120129
tknDesc{"#1", lex.ImmToken},
130+
tknDesc{"", lex.SepToken},
121131

122132
tknDesc{"jz", lex.OprToken},
123133
tknDesc{"%exp.mod2", lex.RegToken},
124134
tknDesc{".even_base", lex.LblToken},
135+
tknDesc{"", lex.SepToken},
125136

126137
tknDesc{".odd_base", lex.LblToken},
138+
tknDesc{"", lex.SepToken},
127139

128140
tknDesc{"%res.1", lex.RegToken},
129141
tknDesc{"=", lex.EqlToken},
130142
tknDesc{"mul", lex.OprToken},
131143
tknDesc{"%res.0", lex.RegToken},
132144
tknDesc{"%base", lex.RegToken},
145+
tknDesc{"", lex.SepToken},
133146

134147
tknDesc{".even_base", lex.LblToken},
148+
tknDesc{"", lex.SepToken},
135149

136150
tknDesc{"%res.2", lex.RegToken},
137151
tknDesc{"=", lex.EqlToken},
@@ -140,8 +154,10 @@ func TestPow(t *testing.T) {
140154
tknDesc{"%res.1", lex.RegToken},
141155
tknDesc{".recurse", lex.LblToken},
142156
tknDesc{"%res.0", lex.RegToken},
157+
tknDesc{"", lex.SepToken},
143158

144159
tknDesc{".end", lex.LblToken},
160+
tknDesc{"", lex.SepToken},
145161

146162
tknDesc{"%res.3", lex.RegToken},
147163
tknDesc{"=", lex.EqlToken},
@@ -150,9 +166,11 @@ func TestPow(t *testing.T) {
150166
tknDesc{"%base", lex.RegToken},
151167
tknDesc{".even_base", lex.LblToken},
152168
tknDesc{"%res.2", lex.RegToken},
169+
tknDesc{"", lex.SepToken},
153170

154171
tknDesc{"ret", lex.OprToken},
155172
tknDesc{"%res.3", lex.RegToken},
173+
tknDesc{"", lex.SepToken},
156174

157175
tknDesc{"}", lex.RcrToken},
158176
}

usm.go

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,9 @@ func main() {
2727

2828
_, ctx := view.Detach()
2929
for _, tkn := range tokens {
30-
fmt.Println(tkn.String(ctx))
30+
fmt.Printf("%s ", tkn.String(ctx))
31+
if tkn.Type == lex.SepToken {
32+
fmt.Println()
33+
}
3134
}
3235
}

0 commit comments

Comments
 (0)