watch: First pass at query parsing

This commit is contained in:
Armon Dadgar 2014-08-20 11:19:43 -07:00
parent d72158b71b
commit bd89ff61ae
2 changed files with 266 additions and 0 deletions

160
watch/watch.go Normal file
View File

@ -0,0 +1,160 @@
package watch
import (
"fmt"
"strings"
)
// WatchPlan is the parsed version of a watch specification. A watch provides
// the details of a query, which generates a view into the Consul data store.
// This view is watched for changes and a handler is invoked to take any
// appropriate actions.
type WatchPlan struct {
Datacenter string
Token string
Type string
}
// Parse takes a watch query and compiles it into a WatchPlan or an error
func Parse(query string) (*WatchPlan, error) {
tokens, err := tokenize(query)
if err != nil {
return nil, fmt.Errorf("Failed to parse: %v", err)
}
params := collapse(tokens)
plan := &WatchPlan{}
if err := assignValue(params, "type", &plan.Type); err != nil {
return nil, err
}
if plan.Type == "" {
return nil, fmt.Errorf("Watch type must be specified")
}
if err := assignValue(params, "datacenter", &plan.Datacenter); err != nil {
return nil, err
}
if err := assignValue(params, "token", &plan.Token); err != nil {
return nil, err
}
return plan, nil
}
// assignValue is used to extract a value ensuring it is only
// defined once
func assignValue(params map[string][]string, name string, out *string) error {
if vals, ok := params[name]; ok {
if len(vals) != 1 {
return fmt.Errorf("Multiple definitions of %s", name)
}
*out = vals[0]
delete(params, name)
}
return nil
}
// token is used to represent a "datacenter:foobar" pair, where
// datacenter is the param and foobar is the value
type token struct {
param string
val string
}
func (t *token) GoString() string {
return fmt.Sprintf("%#v", *t)
}
// tokenize splits a query string into a slice of tokens
func tokenize(query string) ([]*token, error) {
var tokens []*token
for i := 0; i < len(query); i++ {
char := query[i]
// Ignore whitespace
if char == ' ' || char == '\t' || char == '\n' {
continue
}
// Read the next token
next, offset, err := readToken(query[i:])
if err != nil {
return nil, err
}
// Store the token
tokens = append(tokens, next)
// Increment the offset
i += offset
}
return tokens, nil
}
// readToken is used to read a single token
func readToken(query string) (*token, int, error) {
// Get the token
param, offset, err := readParameter(query)
if err != nil {
return nil, 0, err
}
// Get the value
query = query[offset:]
val, offset2, err := readValue(query)
if err != nil {
return nil, 0, err
}
// Return the new token
token := &token{
param: param,
val: val,
}
return token, offset + offset2, nil
}
// readParameter scans for the next parameter
func readParameter(query string) (string, int, error) {
for i := 0; i < len(query); i++ {
char := query[i]
if char == ':' {
if i == 0 {
return "", 0, fmt.Errorf("Missing parameter name")
} else {
return query[:i], i + 1, nil
}
}
}
return "", 0, fmt.Errorf("Parameter delimiter not found")
}
// readValue is used to scan for the next value
func readValue(query string) (string, int, error) {
// Handle quoted values
if query[0] == '\'' || query[0] == '"' {
quoteChar := query[0:1]
endChar := strings.Index(query[1:], quoteChar)
if endChar == -1 {
return "", 0, fmt.Errorf("Missing end of quotation")
}
return query[1 : endChar+1], endChar + 2, nil
}
// Look for white space
endChar := strings.IndexAny(query, " \t\n")
if endChar == -1 {
return query, len(query), nil
}
return query[:endChar], endChar, nil
}
// collapse is used to collapse a token stream into a map
// of parameter name to list of values.
func collapse(tokens []*token) map[string][]string {
out := make(map[string][]string)
for _, t := range tokens {
existing := out[t.param]
out[t.param] = append(existing, t.val)
}
return out
}

106
watch/watch_test.go Normal file
View File

@ -0,0 +1,106 @@
package watch
import (
"fmt"
"reflect"
"testing"
)
func TestTokenize(t *testing.T) {
type tcase struct {
in string
out []*token
err error
}
cases := []tcase{
tcase{
"",
nil,
nil,
},
tcase{
"foo:bar bar:baz zip:zap",
[]*token{
&token{"foo", "bar"},
&token{"bar", "baz"},
&token{"zip", "zap"},
},
nil,
},
tcase{
"foo:\"long input here\" after:this",
[]*token{
&token{"foo", "long input here"},
&token{"after", "this"},
},
nil,
},
tcase{
"foo:'long input here' after:this",
[]*token{
&token{"foo", "long input here"},
&token{"after", "this"},
},
nil,
},
tcase{
"foo:'long input here after:this",
nil,
fmt.Errorf("Missing end of quotation"),
},
tcase{
"foo",
nil,
fmt.Errorf("Parameter delimiter not found"),
},
tcase{
":val",
nil,
fmt.Errorf("Missing parameter name"),
},
}
for _, tc := range cases {
tokens, err := tokenize(tc.in)
if err != nil && tc.err == nil {
t.Fatalf("%s: err: %v", tc.in, err)
} else if tc.err != nil && (err == nil || err.Error() != tc.err.Error()) {
t.Fatalf("%s: bad err: %v", tc.in, err)
}
if !reflect.DeepEqual(tokens, tc.out) {
t.Fatalf("%s: bad: %#v %#v", tc.in, tokens, tc.out)
}
}
}
func TestCollapse(t *testing.T) {
inp := "type:key key:foo key:bar"
tokens, err := tokenize(inp)
if err != nil {
t.Fatalf("err: %v", err)
}
out := collapse(tokens)
expect := map[string][]string{
"type": []string{"key"},
"key": []string{"foo", "bar"},
}
if !reflect.DeepEqual(out, expect) {
t.Fatalf("bad: %#v", out)
}
}
func TestParseBasic(t *testing.T) {
p, err := Parse("type:key datacenter:dc2 token:12345")
if err != nil {
t.Fatalf("err: %v", err)
}
if p.Datacenter != "dc2" {
t.Fatalf("Bad: %#v", p)
}
if p.Token != "12345" {
t.Fatalf("Bad: %#v", p)
}
if p.Type != "key" {
t.Fatalf("Bad: %#v", p)
}
}