Skip to content

Commit 8c52619

Browse files
craig[bot]souravcrl
andcommitted
Merge #131480
131480: hba,rulebasedscanner: handle double quotes in HBA conf option value r=pritesh-lahoti a=souravcrl fix CRDB-39812 Epic CRDB-33829 Currently, HBA configuration cluster setting value is not fully adherent to `pg_hba.conf` and we fail to handle double quotes in HBA auth method options. This needs fixes to HBA parser and tokenizer code. Release note(security, ops): HBA configuration cluster setting `server.host_based_authentication.configuration` is currently unable to handle double quotes in authentication method option values. For example for the following HBA entry: ``` host all all all ldap ldapserver=ldap.example.com ldapport=636 ldapbasedn="ou=users,dc=example,dc=com" ldapbinddn="cn=readonly,dc=example,dc=com" ldapbindpasswd=readonly_password ldapsearchattribute=uid ldapsearchfilter="(memberof=cn=cockroachdb_users,ou=groups,dc=example,dc=com)" ``` The HBA parser fails after determining `ldapbinddn="cn=readonly,dc=example,dc=com"` as 2 separate options(`ldapbinddn=` and `cn=readonly,dc=example,dc=com`). The PR fixes this, and we are able to set the above 2 tokens as key and value respectively for the same HBA configuration option. Co-authored-by: souravcrl <[email protected]>
2 parents b6c1368 + 1f6ed61 commit 8c52619

File tree

6 files changed

+249
-21
lines changed

6 files changed

+249
-21
lines changed

pkg/settings/rulebasedscanner/scanned_input.go

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@
1010

1111
package rulebasedscanner
1212

13+
import "strings"
14+
1315
// ScannedInput represents the result of tokenizing the input
1416
// configuration data.
1517
//
@@ -52,3 +54,14 @@ func (s String) Empty() bool { return s.IsKeyword("") }
5254
func (s String) IsKeyword(v string) bool {
5355
return !s.Quoted && s.Value == v
5456
}
57+
58+
// Join concatenates the elements of its first argument to create a single
59+
// string. The separator string sep is placed between elements in the resulting
60+
// string.
61+
func Join(elems []String, sep string) string {
62+
values := make([]string, len(elems))
63+
for idx := range elems {
64+
values[idx] = elems[idx].Value
65+
}
66+
return strings.Join(values, sep)
67+
}

pkg/settings/rulebasedscanner/scanner.go

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,10 @@ type lex struct {
4242
// comma.
4343
comma bool
4444

45+
// equals is set to true if the last found token was succeeded by a
46+
// comma.
47+
equals bool
48+
4549
// lexed is set to the portion of the text matched by the current
4650
// rule, and is provided as input to the rule's action function.
4751
lexed string
@@ -84,7 +88,7 @@ var rules = []struct {
8488
}{
8589
{r: rule{`[ \t\r,]*` /***********/, func(l *lex) (bool, error) { return false, nil }}},
8690
{r: rule{`#.*$` /****************/, func(l *lex) (bool, error) { return false, nil }}},
87-
{r: rule{`[^[:cntrl:] ",]+,?` /**/, func(l *lex) (bool, error) { l.checkComma(); l.Value = l.lexed; return true, nil }}},
91+
{r: rule{`[^[:cntrl:] ",]+,?` /**/, func(l *lex) (bool, error) { l.checkComma(); l.checkEquals(); l.Value = l.lexed; return true, nil }}},
8892
{r: rule{`"[^[:cntrl:]"]*",?` /**/, func(l *lex) (bool, error) { l.checkComma(); l.stripQuotes(); l.Value = l.lexed; return true, nil }}},
8993
{r: rule{`"[^"]*$` /*************/, func(l *lex) (bool, error) { return false, errors.New("unterminated quoted string") }}},
9094
{r: rule{`"[^"]*"` /*************/, func(l *lex) (bool, error) { return false, errors.New("invalid characters in quoted string") }}},
@@ -98,6 +102,10 @@ func (l *lex) checkComma() {
98102
}
99103
}
100104

105+
func (l *lex) checkEquals() {
106+
l.equals = l.lexed[len(l.lexed)-1] == '='
107+
}
108+
101109
func (l *lex) stripQuotes() {
102110
l.Quoted = true
103111
l.lexed = l.lexed[1 : len(l.lexed)-1]
@@ -115,7 +123,9 @@ func init() {
115123
// is immediately followed by a comma.
116124
//
117125
// Inspired from pg's src/backend/libpq/hba.c, next_token().
118-
func NextToken(buf string) (remaining string, tok String, trailingComma bool, err error) {
126+
func NextToken(
127+
buf string,
128+
) (remaining string, tok String, trailingComma bool, trailingEquals bool, err error) {
119129
remaining = buf
120130
var l lex
121131
outer:
@@ -135,7 +145,7 @@ outer:
135145
}
136146
}
137147
}
138-
return remaining, l.String, l.comma, err
148+
return remaining, l.String, l.comma, l.equals, err
139149
}
140150

141151
// nextFieldExpand reads the next comma-separated list of string from buf.
@@ -145,14 +155,14 @@ outer:
145155
func nextFieldExpand(buf string) (remaining string, field []String, err error) {
146156
remaining = buf
147157
for {
148-
var trailingComma bool
158+
var trailingComma, trailingEquals bool
149159
var tok String
150-
remaining, tok, trailingComma, err = NextToken(remaining)
160+
remaining, tok, trailingComma, trailingEquals, err = NextToken(remaining)
151161
if tok.Empty() || err != nil {
152162
return
153163
}
154164
field = append(field, tok)
155-
if !trailingComma {
165+
if !(trailingComma || trailingEquals) {
156166
break
157167
}
158168
}

pkg/settings/rulebasedscanner/scanner_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,11 +45,11 @@ func TestScanner(t *testing.T) {
4545
datadriven.RunTest(t, datapathutils.TestDataPath(t, "scan"), func(t *testing.T, td *datadriven.TestData) string {
4646
switch td.Cmd {
4747
case "token":
48-
remaining, tok, trailingComma, err := NextToken(td.Input)
48+
remaining, tok, trailingComma, trailingEqualsOp, err := NextToken(td.Input)
4949
if err != nil {
5050
return fmt.Sprintf("error: %v", err)
5151
}
52-
return fmt.Sprintf("%# v %v %q", pretty.Formatter(tok), trailingComma, remaining)
52+
return fmt.Sprintf("%# v %v %v %q", pretty.Formatter(tok), trailingComma, trailingEqualsOp, remaining)
5353

5454
case "field":
5555
remaining, field, err := nextFieldExpand(td.Input)

pkg/settings/rulebasedscanner/testdata/scan

Lines changed: 161 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,66 +2,91 @@ subtest token
22

33
token
44
----
5-
rulebasedscanner.String{} false ""
5+
rulebasedscanner.String{} false false ""
66

77

88
token
99
# Just a comment.
1010
----
11-
rulebasedscanner.String{} false ""
11+
rulebasedscanner.String{} false false ""
1212

1313
token
1414
a b
1515
----
16-
rulebasedscanner.String{Value:"a", Quoted:false} false " b"
16+
rulebasedscanner.String{Value:"a", Quoted:false} false false " b"
1717

1818
token
1919
a,b
2020
----
21-
rulebasedscanner.String{Value:"a", Quoted:false} true "b"
21+
rulebasedscanner.String{Value:"a", Quoted:false} true false "b"
2222

2323
token
2424
a, b
2525
----
26-
rulebasedscanner.String{Value:"a", Quoted:false} true " b"
26+
rulebasedscanner.String{Value:"a", Quoted:false} true false " b"
2727

2828

2929
token
3030
a ,b
3131
----
32-
rulebasedscanner.String{Value:"a", Quoted:false} false " ,b"
32+
rulebasedscanner.String{Value:"a", Quoted:false} false false " ,b"
3333

3434
token
3535
abc,def
3636
----
37-
rulebasedscanner.String{Value:"abc", Quoted:false} true "def"
37+
rulebasedscanner.String{Value:"abc", Quoted:false} true false "def"
3838

3939
token
4040
"abc",def
4141
----
42-
rulebasedscanner.String{Value:"abc", Quoted:true} true "def"
42+
rulebasedscanner.String{Value:"abc", Quoted:true} true false "def"
4343

4444
token
4545
"abc"def
4646
----
47-
rulebasedscanner.String{Value:"abc", Quoted:true} false "def"
47+
rulebasedscanner.String{Value:"abc", Quoted:true} false false "def"
4848

4949
token
5050
# abc,def
5151
----
52-
rulebasedscanner.String{} false ""
52+
rulebasedscanner.String{} false false ""
5353

5454
token
5555
# "abc
5656
----
57-
rulebasedscanner.String{} false ""
57+
rulebasedscanner.String{} false false ""
5858

5959

6060
token
6161
"abc
6262
----
6363
error: unterminated quoted string
6464

65+
token
66+
"abc=def"ghi
67+
----
68+
rulebasedscanner.String{Value:"abc=def", Quoted:true} false false "ghi"
69+
70+
token
71+
abc="def"ghi
72+
----
73+
rulebasedscanner.String{Value:"abc=", Quoted:false} false true "\"def\"ghi"
74+
75+
token
76+
abc= "def"
77+
----
78+
rulebasedscanner.String{Value:"abc=", Quoted:false} false true " \"def\""
79+
80+
token
81+
"abc= def
82+
----
83+
error: unterminated quoted string
84+
85+
token
86+
abc=def,ghi
87+
----
88+
rulebasedscanner.String{Value:"abc=def", Quoted:false} true false "ghi"
89+
6590
subtest end
6691

6792
subtest field
@@ -149,6 +174,65 @@ field
149174
[]
150175
""
151176

177+
field
178+
abc=def
179+
----
180+
[abc=def]
181+
""
182+
183+
field
184+
"abc=def"
185+
----
186+
["abc=def"]
187+
""
188+
189+
field
190+
"abc= def "
191+
----
192+
["abc= def "]
193+
""
194+
195+
field
196+
abc=" def "
197+
----
198+
[abc= " def "]
199+
""
200+
201+
field
202+
abc= " def "
203+
----
204+
[abc= " def "]
205+
""
206+
207+
field
208+
abc="def=ghi"
209+
----
210+
[abc= "def=ghi"]
211+
""
212+
213+
field
214+
abc= "def=ghi"
215+
----
216+
[abc= "def=ghi"]
217+
""
218+
219+
field
220+
abc=def,ghi
221+
----
222+
[abc=def ghi]
223+
""
224+
225+
field
226+
abc=def, ghi
227+
----
228+
[abc=def ghi]
229+
""
230+
231+
field
232+
abc="def
233+
----
234+
error: unterminated quoted string
235+
152236
field
153237
all,"abc
154238
----
@@ -352,5 +436,71 @@ rulebasedscanner.ScannedInput{
352436
Linenos: {3, 5},
353437
}
354438

439+
file
440+
#
441+
442+
a "b=c" # c d e
443+
444+
d e="f" # b c
445+
446+
f a="e=c" c= a f= b, c # d b
447+
448+
#
449+
----
450+
rulebasedscanner.ScannedInput{
451+
Lines: {
452+
{
453+
Input: "a \"b=c\" # c d e",
454+
Tokens: {
455+
{
456+
{Value:"a", Quoted:false},
457+
},
458+
{
459+
{Value:"b=c", Quoted:true},
460+
},
461+
},
462+
},
463+
{
464+
Input: "d e=\"f\" # b c",
465+
Tokens: {
466+
{
467+
{Value:"d", Quoted:false},
468+
},
469+
{
470+
{Value:"e=", Quoted:false},
471+
{Value:"f", Quoted:true},
472+
},
473+
},
474+
},
475+
{
476+
Input: "f a=\"e=c\" c= a f= b, c # d b",
477+
Tokens: {
478+
{
479+
{Value:"f", Quoted:false},
480+
},
481+
{
482+
{Value:"a=", Quoted:false},
483+
{Value:"e=c", Quoted:true},
484+
},
485+
{
486+
{Value:"c=", Quoted:false},
487+
{Value:"a", Quoted:false},
488+
},
489+
{
490+
{Value:"f=", Quoted:false},
491+
{Value:"b", Quoted:false},
492+
{Value:"c", Quoted:false},
493+
},
494+
},
495+
},
496+
},
497+
Linenos: {3, 5, 7},
498+
}
499+
500+
file
501+
d a e="f # b c
502+
----
503+
error: line 1: unterminated quoted string
504+
355505

356506
subtest end

pkg/sql/pgwire/hba/parser.go

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,10 +157,22 @@ func parseHbaLine(inputLine rulebasedscanner.Line) (entry Entry, err error) {
157157

158158
// Parse remaining arguments.
159159
for fieldIdx++; fieldIdx < len(line); fieldIdx++ {
160-
for _, tok := range line[fieldIdx] {
160+
for tokenIdx := 0; tokenIdx < len(line[fieldIdx]); tokenIdx++ {
161+
tok := line[fieldIdx][tokenIdx]
161162
kv := strings.SplitN(tok.Value, "=", 2)
163+
// 1. Handle the case where the option does not have equal operator.
164+
// 2. Handle the case where token ends with equals operator and next token
165+
// having the value for option is absent.
166+
optionsError := errors.Newf("authentication option not in name=value format: %s", tok.Value)
162167
if len(kv) != 2 {
163-
return entry, errors.Newf("authentication option not in name=value format: %s", tok.Value)
168+
return entry, optionsError
169+
}
170+
if len(kv[1]) == 0 {
171+
if (tokenIdx + 1) == len(line[fieldIdx]) {
172+
return entry, optionsError
173+
}
174+
kv[1], tok.Quoted = rulebasedscanner.Join(line[fieldIdx][tokenIdx+1:], ", "), true
175+
tokenIdx = len(line[fieldIdx])
164176
}
165177
entry.Options = append(entry.Options, [2]string{kv[0], kv[1]})
166178
entry.OptionQuotes = append(entry.OptionQuotes, tok.Quoted)

0 commit comments

Comments
 (0)