Skip to content

Commit

Permalink
Merge #131480
Browse files Browse the repository at this point in the history
131480: hba,rulebasedscanner: handle double quotes in HBA conf option value r=pritesh-lahoti a=souravcrl

fix CRDB-39812
Epic CRDB-33829

Currently, HBA configuration cluster setting value is not fully adherent to
`pg_hba.conf` and we fail to handle double quotes in HBA auth method options.
This needs fixes to HBA parser and tokenizer code.

Release note(security, ops): HBA configuration cluster setting
`server.host_based_authentication.configuration` is currently unable to handle
double quotes in authentication method option values. For example for the
following HBA entry:
```
host all all all ldap ldapserver=ldap.example.com ldapport=636 ldapbasedn="ou=users,dc=example,dc=com" ldapbinddn="cn=readonly,dc=example,dc=com" ldapbindpasswd=readonly_password ldapsearchattribute=uid ldapsearchfilter="(memberof=cn=cockroachdb_users,ou=groups,dc=example,dc=com)"
```
The HBA parser fails after determining
`ldapbinddn="cn=readonly,dc=example,dc=com"` as 2 separate options(`ldapbinddn=`
and `cn=readonly,dc=example,dc=com`). The PR fixes this, and we are able to set
the above 2 tokens as key and value respectively for the same HBA configuration
option.

Co-authored-by: souravcrl <[email protected]>
  • Loading branch information
craig[bot] and souravcrl committed Sep 30, 2024
2 parents b6c1368 + 1f6ed61 commit 8c52619
Show file tree
Hide file tree
Showing 6 changed files with 249 additions and 21 deletions.
13 changes: 13 additions & 0 deletions pkg/settings/rulebasedscanner/scanned_input.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@

package rulebasedscanner

import "strings"

// ScannedInput represents the result of tokenizing the input
// configuration data.
//
Expand Down Expand Up @@ -52,3 +54,14 @@ func (s String) Empty() bool { return s.IsKeyword("") }
func (s String) IsKeyword(v string) bool {
return !s.Quoted && s.Value == v
}

// Join concatenates the elements of its first argument to create a single
// string. The separator string sep is placed between elements in the resulting
// string.
func Join(elems []String, sep string) string {
values := make([]string, len(elems))
for idx := range elems {
values[idx] = elems[idx].Value
}
return strings.Join(values, sep)
}
22 changes: 16 additions & 6 deletions pkg/settings/rulebasedscanner/scanner.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,10 @@ type lex struct {
// comma.
comma bool

// equals is set to true if the last found token was succeeded by a
// comma.
equals bool

// lexed is set to the portion of the text matched by the current
// rule, and is provided as input to the rule's action function.
lexed string
Expand Down Expand Up @@ -84,7 +88,7 @@ var rules = []struct {
}{
{r: rule{`[ \t\r,]*` /***********/, func(l *lex) (bool, error) { return false, nil }}},
{r: rule{`#.*$` /****************/, func(l *lex) (bool, error) { return false, nil }}},
{r: rule{`[^[:cntrl:] ",]+,?` /**/, func(l *lex) (bool, error) { l.checkComma(); l.Value = l.lexed; return true, nil }}},
{r: rule{`[^[:cntrl:] ",]+,?` /**/, func(l *lex) (bool, error) { l.checkComma(); l.checkEquals(); l.Value = l.lexed; return true, nil }}},
{r: rule{`"[^[:cntrl:]"]*",?` /**/, func(l *lex) (bool, error) { l.checkComma(); l.stripQuotes(); l.Value = l.lexed; return true, nil }}},
{r: rule{`"[^"]*$` /*************/, func(l *lex) (bool, error) { return false, errors.New("unterminated quoted string") }}},
{r: rule{`"[^"]*"` /*************/, func(l *lex) (bool, error) { return false, errors.New("invalid characters in quoted string") }}},
Expand All @@ -98,6 +102,10 @@ func (l *lex) checkComma() {
}
}

func (l *lex) checkEquals() {
l.equals = l.lexed[len(l.lexed)-1] == '='
}

func (l *lex) stripQuotes() {
l.Quoted = true
l.lexed = l.lexed[1 : len(l.lexed)-1]
Expand All @@ -115,7 +123,9 @@ func init() {
// is immediately followed by a comma.
//
// Inspired from pg's src/backend/libpq/hba.c, next_token().
func NextToken(buf string) (remaining string, tok String, trailingComma bool, err error) {
func NextToken(
buf string,
) (remaining string, tok String, trailingComma bool, trailingEquals bool, err error) {
remaining = buf
var l lex
outer:
Expand All @@ -135,7 +145,7 @@ outer:
}
}
}
return remaining, l.String, l.comma, err
return remaining, l.String, l.comma, l.equals, err
}

// nextFieldExpand reads the next comma-separated list of string from buf.
Expand All @@ -145,14 +155,14 @@ outer:
func nextFieldExpand(buf string) (remaining string, field []String, err error) {
remaining = buf
for {
var trailingComma bool
var trailingComma, trailingEquals bool
var tok String
remaining, tok, trailingComma, err = NextToken(remaining)
remaining, tok, trailingComma, trailingEquals, err = NextToken(remaining)
if tok.Empty() || err != nil {
return
}
field = append(field, tok)
if !trailingComma {
if !(trailingComma || trailingEquals) {
break
}
}
Expand Down
4 changes: 2 additions & 2 deletions pkg/settings/rulebasedscanner/scanner_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,11 @@ func TestScanner(t *testing.T) {
datadriven.RunTest(t, datapathutils.TestDataPath(t, "scan"), func(t *testing.T, td *datadriven.TestData) string {
switch td.Cmd {
case "token":
remaining, tok, trailingComma, err := NextToken(td.Input)
remaining, tok, trailingComma, trailingEqualsOp, err := NextToken(td.Input)
if err != nil {
return fmt.Sprintf("error: %v", err)
}
return fmt.Sprintf("%# v %v %q", pretty.Formatter(tok), trailingComma, remaining)
return fmt.Sprintf("%# v %v %v %q", pretty.Formatter(tok), trailingComma, trailingEqualsOp, remaining)

case "field":
remaining, field, err := nextFieldExpand(td.Input)
Expand Down
172 changes: 161 additions & 11 deletions pkg/settings/rulebasedscanner/testdata/scan
Original file line number Diff line number Diff line change
Expand Up @@ -2,66 +2,91 @@ subtest token

token
----
rulebasedscanner.String{} false ""
rulebasedscanner.String{} false false ""


token
# Just a comment.
----
rulebasedscanner.String{} false ""
rulebasedscanner.String{} false false ""

token
a b
----
rulebasedscanner.String{Value:"a", Quoted:false} false " b"
rulebasedscanner.String{Value:"a", Quoted:false} false false " b"

token
a,b
----
rulebasedscanner.String{Value:"a", Quoted:false} true "b"
rulebasedscanner.String{Value:"a", Quoted:false} true false "b"

token
a, b
----
rulebasedscanner.String{Value:"a", Quoted:false} true " b"
rulebasedscanner.String{Value:"a", Quoted:false} true false " b"


token
a ,b
----
rulebasedscanner.String{Value:"a", Quoted:false} false " ,b"
rulebasedscanner.String{Value:"a", Quoted:false} false false " ,b"

token
abc,def
----
rulebasedscanner.String{Value:"abc", Quoted:false} true "def"
rulebasedscanner.String{Value:"abc", Quoted:false} true false "def"

token
"abc",def
----
rulebasedscanner.String{Value:"abc", Quoted:true} true "def"
rulebasedscanner.String{Value:"abc", Quoted:true} true false "def"

token
"abc"def
----
rulebasedscanner.String{Value:"abc", Quoted:true} false "def"
rulebasedscanner.String{Value:"abc", Quoted:true} false false "def"

token
# abc,def
----
rulebasedscanner.String{} false ""
rulebasedscanner.String{} false false ""

token
# "abc
----
rulebasedscanner.String{} false ""
rulebasedscanner.String{} false false ""


token
"abc
----
error: unterminated quoted string

token
"abc=def"ghi
----
rulebasedscanner.String{Value:"abc=def", Quoted:true} false false "ghi"

token
abc="def"ghi
----
rulebasedscanner.String{Value:"abc=", Quoted:false} false true "\"def\"ghi"

token
abc= "def"
----
rulebasedscanner.String{Value:"abc=", Quoted:false} false true " \"def\""

token
"abc= def
----
error: unterminated quoted string

token
abc=def,ghi
----
rulebasedscanner.String{Value:"abc=def", Quoted:false} true false "ghi"

subtest end

subtest field
Expand Down Expand Up @@ -149,6 +174,65 @@ field
[]
""

field
abc=def
----
[abc=def]
""

field
"abc=def"
----
["abc=def"]
""

field
"abc= def "
----
["abc= def "]
""

field
abc=" def "
----
[abc= " def "]
""

field
abc= " def "
----
[abc= " def "]
""

field
abc="def=ghi"
----
[abc= "def=ghi"]
""

field
abc= "def=ghi"
----
[abc= "def=ghi"]
""

field
abc=def,ghi
----
[abc=def ghi]
""

field
abc=def, ghi
----
[abc=def ghi]
""

field
abc="def
----
error: unterminated quoted string

field
all,"abc
----
Expand Down Expand Up @@ -352,5 +436,71 @@ rulebasedscanner.ScannedInput{
Linenos: {3, 5},
}

file
#

a "b=c" # c d e

d e="f" # b c

f a="e=c" c= a f= b, c # d b

#
----
rulebasedscanner.ScannedInput{
Lines: {
{
Input: "a \"b=c\" # c d e",
Tokens: {
{
{Value:"a", Quoted:false},
},
{
{Value:"b=c", Quoted:true},
},
},
},
{
Input: "d e=\"f\" # b c",
Tokens: {
{
{Value:"d", Quoted:false},
},
{
{Value:"e=", Quoted:false},
{Value:"f", Quoted:true},
},
},
},
{
Input: "f a=\"e=c\" c= a f= b, c # d b",
Tokens: {
{
{Value:"f", Quoted:false},
},
{
{Value:"a=", Quoted:false},
{Value:"e=c", Quoted:true},
},
{
{Value:"c=", Quoted:false},
{Value:"a", Quoted:false},
},
{
{Value:"f=", Quoted:false},
{Value:"b", Quoted:false},
{Value:"c", Quoted:false},
},
},
},
},
Linenos: {3, 5, 7},
}

file
d a e="f # b c
----
error: line 1: unterminated quoted string


subtest end
16 changes: 14 additions & 2 deletions pkg/sql/pgwire/hba/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -157,10 +157,22 @@ func parseHbaLine(inputLine rulebasedscanner.Line) (entry Entry, err error) {

// Parse remaining arguments.
for fieldIdx++; fieldIdx < len(line); fieldIdx++ {
for _, tok := range line[fieldIdx] {
for tokenIdx := 0; tokenIdx < len(line[fieldIdx]); tokenIdx++ {
tok := line[fieldIdx][tokenIdx]
kv := strings.SplitN(tok.Value, "=", 2)
// 1. Handle the case where the option does not have equal operator.
// 2. Handle the case where token ends with equals operator and next token
// having the value for option is absent.
optionsError := errors.Newf("authentication option not in name=value format: %s", tok.Value)
if len(kv) != 2 {
return entry, errors.Newf("authentication option not in name=value format: %s", tok.Value)
return entry, optionsError
}
if len(kv[1]) == 0 {
if (tokenIdx + 1) == len(line[fieldIdx]) {
return entry, optionsError
}
kv[1], tok.Quoted = rulebasedscanner.Join(line[fieldIdx][tokenIdx+1:], ", "), true
tokenIdx = len(line[fieldIdx])
}
entry.Options = append(entry.Options, [2]string{kv[0], kv[1]})
entry.OptionQuotes = append(entry.OptionQuotes, tok.Quoted)
Expand Down
Loading

0 comments on commit 8c52619

Please sign in to comment.