Merge pull request #1039 from stevvooe/filter-syntax

filters: clean up implementation
This commit is contained in:
Phil Estes 2017-06-21 12:44:10 -04:00 committed by GitHub
commit ad0c8a04f0
4 changed files with 49 additions and 33 deletions

View File

@ -56,6 +56,12 @@ func TestFilters(t *testing.T) {
Name: "bazo", Name: "bazo",
Other: "abc", Other: "abc",
}, },
{
Name: "compound",
Labels: map[string]string{
"foo": "omg_asdf.asdf-qwer",
},
},
} }
var corpus []interface{} var corpus []interface{}
@ -103,6 +109,7 @@ func TestFilters(t *testing.T) {
expected: []interface{}{ expected: []interface{}{
corpus[0], corpus[0],
corpus[2], corpus[2],
corpus[8],
}, },
}, },
{ {
@ -112,6 +119,13 @@ func TestFilters(t *testing.T) {
corpus[0], corpus[0],
}, },
}, },
{
name: "LabelValuePunctuated",
input: "labels.foo==omg_asdf.asdf-qwer",
expected: []interface{}{
corpus[8],
},
},
{ {
name: "Name", name: "Name",
input: "name==bar", input: "name==bar",
@ -130,6 +144,7 @@ func TestFilters(t *testing.T) {
corpus[5], corpus[5],
corpus[6], corpus[6],
corpus[7], corpus[7],
corpus[8],
}, },
}, },
{ {

View File

@ -72,7 +72,7 @@ loop:
switch tok { switch tok {
case ',': case ',':
pos, tok, _ := p.scanner.scan() pos, tok, _ := p.scanner.scan()
if tok != tokenSelectorSeparator { if tok != tokenSeparator {
return nil, p.mkerr(pos, "expected a separator") return nil, p.mkerr(pos, "expected a separator")
} }
@ -85,7 +85,7 @@ loop:
case tokenEOF: case tokenEOF:
break loop break loop
default: default:
panic("unconsumed input") return nil, p.mkerr(p.scanner.ppos, "unexpected input: %v", string(tok))
} }
} }
@ -99,7 +99,7 @@ func (p *parser) selector() (selector, error) {
} }
switch p.scanner.peek() { switch p.scanner.peek() {
case tokenSelectorSeparator, tokenEOF: case tokenSeparator, tokenEOF:
return selector{ return selector{
fieldpath: fieldpath, fieldpath: fieldpath,
operator: operatorPresent, operator: operatorPresent,
@ -140,7 +140,7 @@ loop:
switch tok { switch tok {
case '.': case '.':
pos, tok, _ := p.scanner.scan() // consume separator pos, tok, _ := p.scanner.scan() // consume separator
if tok != tokenFieldSeparator { if tok != tokenSeparator {
return nil, p.mkerr(pos, "expected a field separator (`.`)") return nil, p.mkerr(pos, "expected a field separator (`.`)")
} }

View File

@ -11,9 +11,8 @@ const (
tokenQuoted tokenQuoted
tokenValue tokenValue
tokenField tokenField
tokenFieldSeparator tokenSeparator
tokenOperator tokenOperator
tokenSelectorSeparator
tokenIllegal tokenIllegal
) )
@ -29,12 +28,10 @@ func (t token) String() string {
return "Value" return "Value"
case tokenField: case tokenField:
return "Field" return "Field"
case tokenSeparator:
return "Separator"
case tokenOperator: case tokenOperator:
return "Operator" return "Operator"
case tokenFieldSeparator:
return "FieldSeparator"
case tokenSelectorSeparator:
return "SelectorSeparator"
case tokenIllegal: case tokenIllegal:
return "Illegal" return "Illegal"
} }
@ -102,12 +99,10 @@ chomp:
case ch == tokenEOF: case ch == tokenEOF:
case ch == tokenIllegal: case ch == tokenIllegal:
case isQuoteRune(ch): case isQuoteRune(ch):
s.scanString(ch) s.scanQuoted(ch)
return pos, tokenQuoted, s.input[pos:s.ppos] return pos, tokenQuoted, s.input[pos:s.ppos]
case ch == ',': case isSeparatorRune(ch):
return pos, tokenSelectorSeparator, s.input[pos:s.ppos] return pos, tokenSeparator, s.input[pos:s.ppos]
case ch == '.':
return pos, tokenFieldSeparator, s.input[pos:s.ppos]
case isOperatorRune(ch): case isOperatorRune(ch):
s.scanOperator() s.scanOperator()
s.value = true s.value = true
@ -119,12 +114,6 @@ chomp:
goto chomp goto chomp
case s.value: case s.value:
s.scanValue() s.scanValue()
// TODO(stevvooe): We can get rid of the value flag by by having a
// scanUnquoted that accumulates characters. If it is a legal field,
// then we return a field token. The parser can then treat fields as
// values. This will allow the default case here to just scan value or
// field.
s.value = false s.value = false
return pos, tokenValue, s.input[pos:s.ppos] return pos, tokenValue, s.input[pos:s.ppos]
case isFieldRune(ch): case isFieldRune(ch):
@ -167,7 +156,7 @@ func (s *scanner) scanValue() {
} }
} }
func (s *scanner) scanString(quote rune) { func (s *scanner) scanQuoted(quote rune) {
ch := s.next() // read character after quote ch := s.next() // read character after quote
for ch != quote { for ch != quote {
if ch == '\n' || ch < 0 { if ch == '\n' || ch < 0 {

View File

@ -38,7 +38,7 @@ func TestScanner(t *testing.T) {
{pos: 0, token: tokenField, text: "name"}, {pos: 0, token: tokenField, text: "name"},
{pos: 4, token: tokenOperator, text: "=="}, {pos: 4, token: tokenOperator, text: "=="},
{pos: 6, token: tokenValue, text: "value"}, {pos: 6, token: tokenValue, text: "value"},
{pos: 11, token: tokenSelectorSeparator, text: ","}, {pos: 11, token: tokenSeparator, text: ","},
{pos: 12, token: tokenField, text: "foo"}, {pos: 12, token: tokenField, text: "foo"},
{pos: 15, token: tokenOperator, text: "!="}, {pos: 15, token: tokenOperator, text: "!="},
{pos: 17, token: tokenValue, text: "bar"}, {pos: 17, token: tokenValue, text: "bar"},
@ -52,15 +52,15 @@ func TestScanner(t *testing.T) {
{pos: 0, token: tokenField, text: "name"}, {pos: 0, token: tokenField, text: "name"},
{pos: 4, token: tokenOperator, text: "=="}, {pos: 4, token: tokenOperator, text: "=="},
{pos: 6, token: tokenValue, text: "value"}, {pos: 6, token: tokenValue, text: "value"},
{pos: 11, token: tokenSelectorSeparator, text: ","}, {pos: 11, token: tokenSeparator, text: ","},
{pos: 12, token: tokenField, text: "labels"}, {pos: 12, token: tokenField, text: "labels"},
{pos: 18, token: tokenFieldSeparator, text: "."}, {pos: 18, token: tokenSeparator, text: "."},
{pos: 19, token: tokenField, text: "foo"}, {pos: 19, token: tokenField, text: "foo"},
{pos: 22, token: tokenOperator, text: "="}, {pos: 22, token: tokenOperator, text: "="},
{pos: 23, token: tokenValue, text: "value"}, {pos: 23, token: tokenValue, text: "value"},
{pos: 28, token: tokenSelectorSeparator, text: ","}, {pos: 28, token: tokenSeparator, text: ","},
{pos: 29, token: tokenField, text: "other"}, {pos: 29, token: tokenField, text: "other"},
{pos: 34, token: tokenFieldSeparator, text: "."}, {pos: 34, token: tokenSeparator, text: "."},
{pos: 35, token: tokenField, text: "bar"}, {pos: 35, token: tokenField, text: "bar"},
{pos: 38, token: tokenOperator, text: "~="}, {pos: 38, token: tokenOperator, text: "~="},
{pos: 40, token: tokenValue, text: "match"}, {pos: 40, token: tokenValue, text: "match"},
@ -74,7 +74,7 @@ func TestScanner(t *testing.T) {
{pos: 0, token: tokenField, text: "name"}, {pos: 0, token: tokenField, text: "name"},
{pos: 4, token: tokenOperator, text: "~="}, {pos: 4, token: tokenOperator, text: "~="},
{pos: 6, token: tokenValue, text: "[abc]+"}, {pos: 6, token: tokenValue, text: "[abc]+"},
{pos: 12, token: tokenSelectorSeparator, text: ","}, {pos: 12, token: tokenSeparator, text: ","},
{pos: 13, token: tokenField, text: "foo"}, {pos: 13, token: tokenField, text: "foo"},
{pos: 16, token: tokenOperator, text: "="}, {pos: 16, token: tokenOperator, text: "="},
{pos: 17, token: tokenValue, text: "test"}, {pos: 17, token: tokenValue, text: "test"},
@ -88,7 +88,7 @@ func TestScanner(t *testing.T) {
{pos: 0, token: tokenField, text: "name"}, {pos: 0, token: tokenField, text: "name"},
{pos: 4, token: tokenOperator, text: "~="}, {pos: 4, token: tokenOperator, text: "~="},
{pos: 6, token: tokenValue, text: "[abc]\\+"}, {pos: 6, token: tokenValue, text: "[abc]\\+"},
{pos: 13, token: tokenSelectorSeparator, text: ","}, {pos: 13, token: tokenSeparator, text: ","},
{pos: 14, token: tokenField, text: "foo"}, {pos: 14, token: tokenField, text: "foo"},
{pos: 17, token: tokenOperator, text: "="}, {pos: 17, token: tokenOperator, text: "="},
{pos: 18, token: tokenValue, text: "test"}, {pos: 18, token: tokenValue, text: "test"},
@ -102,9 +102,9 @@ func TestScanner(t *testing.T) {
{pos: 0, token: tokenField, text: "name"}, {pos: 0, token: tokenField, text: "name"},
{pos: 4, token: tokenOperator, text: "~="}, {pos: 4, token: tokenOperator, text: "~="},
{pos: 6, token: tokenValue, text: "牛"}, {pos: 6, token: tokenValue, text: "牛"},
{pos: 9, token: tokenSelectorSeparator, text: ","}, {pos: 9, token: tokenSeparator, text: ","},
{pos: 10, token: tokenField, text: "labels"}, {pos: 10, token: tokenField, text: "labels"},
{pos: 16, token: tokenFieldSeparator, text: "."}, {pos: 16, token: tokenSeparator, text: "."},
{pos: 17, token: tokenField, text: "moo"}, {pos: 17, token: tokenField, text: "moo"},
{pos: 20, token: tokenOperator, text: "="}, {pos: 20, token: tokenOperator, text: "="},
{pos: 21, token: tokenValue, text: "true"}, {pos: 21, token: tokenValue, text: "true"},
@ -141,6 +141,18 @@ func TestScanner(t *testing.T) {
{pos: 13, token: tokenEOF}, {pos: 13, token: tokenEOF},
}, },
}, },
{
name: "ValuesPunctauted",
input: "compound.labels==punctuated_value.foo-bar",
expected: []tokenResult{
{pos: 0, token: tokenField, text: "compound"},
{pos: 8, token: tokenSeparator, text: "."},
{pos: 9, token: tokenField, text: "labels"},
{pos: 15, token: tokenOperator, text: "=="},
{pos: 17, token: tokenValue, text: "punctuated_value.foo-bar"},
{pos: 41, token: tokenEOF},
},
},
{ {
name: "PartialInput", name: "PartialInput",
input: "interrupted=", input: "interrupted=",
@ -166,7 +178,7 @@ func TestScanner(t *testing.T) {
input: `"leading quote".postquote==value`, input: `"leading quote".postquote==value`,
expected: []tokenResult{ expected: []tokenResult{
{pos: 0, token: tokenQuoted, text: "\"leading quote\""}, {pos: 0, token: tokenQuoted, text: "\"leading quote\""},
{pos: 15, token: tokenFieldSeparator, text: "."}, {pos: 15, token: tokenSeparator, text: "."},
{pos: 16, token: tokenField, text: "postquote"}, {pos: 16, token: tokenField, text: "postquote"},
{pos: 25, token: tokenOperator, text: "=="}, {pos: 25, token: tokenOperator, text: "=="},
{pos: 27, token: tokenValue, text: "value"}, {pos: 27, token: tokenValue, text: "value"},
@ -179,7 +191,7 @@ func TestScanner(t *testing.T) {
expected: []tokenResult{ expected: []tokenResult{
{pos: 0, token: tokenField, text: "input"}, {pos: 0, token: tokenField, text: "input"},
{pos: 5, token: tokenOperator, text: "=="}, {pos: 5, token: tokenOperator, text: "=="},
{pos: 7, token: tokenSelectorSeparator, text: ","}, {pos: 7, token: tokenSeparator, text: ","},
{pos: 8, token: tokenValue, text: "id?=ff"}, {pos: 8, token: tokenValue, text: "id?=ff"},
{pos: 14, token: tokenEOF}, {pos: 14, token: tokenEOF},
}, },