From cb61245078a0e8f14e359ed20e0582a695645a08 Mon Sep 17 00:00:00 2001 From: Miklos Vajna Date: Sun, 14 Feb 2021 16:03:51 +0100 Subject: Add ability to search by arbitrary metadata Example: ~/git/git-bug/git-bug ls --metadata github-url=https://github.com/author/myproject/issues/42 or ~/git/git-bug/git-bug ls metadata:github-url:\"https://github.com/author/myproject/issues/42\" Fixes the cmdline part of . --- query/lexer.go | 58 ++++++++++++++++++++++++++++++++++++++++++++-------- query/lexer_test.go | 8 ++++++++ query/parser.go | 15 ++++++++++++++ query/parser_test.go | 5 +++++ query/query.go | 7 +++++++ 5 files changed, 85 insertions(+), 8 deletions(-) (limited to 'query') diff --git a/query/lexer.go b/query/lexer.go index 5ca700c7..45f657df 100644 --- a/query/lexer.go +++ b/query/lexer.go @@ -11,16 +11,20 @@ type tokenKind int const ( _ tokenKind = iota tokenKindKV + tokenKindKVV tokenKindSearch ) type token struct { kind tokenKind - // KV + // KV and KVV qualifier string value string + // KVV only + subQualifier string + // Search term string } @@ -33,6 +37,15 @@ func newTokenKV(qualifier, value string) token { } } +func newTokenKVV(qualifier, subQualifier, value string) token { + return token{ + kind: tokenKindKVV, + qualifier: qualifier, + subQualifier: subQualifier, + value: value, + } +} + func newTokenSearch(term string) token { return token{ kind: tokenKindSearch, @@ -50,7 +63,23 @@ func tokenize(query string) ([]token, error) { var tokens []token for _, field := range fields { - split := strings.Split(field, ":") + // Split using ':' as separator, but separators inside '"' don't count. + quoted := false + split := strings.FieldsFunc(field, func(r rune) bool { + if r == '"' { + quoted = !quoted + } + return !quoted && r == ':' + }) + if (strings.HasPrefix(field, ":")) { + split = append([]string{""}, split...) + } + if (strings.HasSuffix(field, ":")) { + split = append(split, "") + } + if (quoted) { + return nil, fmt.Errorf("can't tokenize \"%s\": unmatched quote", field) + } // full text search if len(split) == 1 { @@ -58,18 +87,31 @@ func tokenize(query string) ([]token, error) { continue } - if len(split) != 2 { - return nil, fmt.Errorf("can't tokenize \"%s\"", field) + if len(split) > 3 { + return nil, fmt.Errorf("can't tokenize \"%s\": too many separators", field) } if len(split[0]) == 0 { return nil, fmt.Errorf("can't tokenize \"%s\": empty qualifier", field) } - if len(split[1]) == 0 { - return nil, fmt.Errorf("empty value for qualifier \"%s\"", split[0]) - } - tokens = append(tokens, newTokenKV(split[0], removeQuote(split[1]))) + if len(split) == 2 { + if len(split[1]) == 0 { + return nil, fmt.Errorf("empty value for qualifier \"%s\"", split[0]) + } + + tokens = append(tokens, newTokenKV(split[0], removeQuote(split[1]))) + } else { + if len(split[1]) == 0 { + return nil, fmt.Errorf("empty sub-qualifier for qualifier \"%s\"", split[0]) + } + + if len(split[2]) == 0 { + return nil, fmt.Errorf("empty value for qualifier \"%s:%s\"", split[0], split[1]) + } + + tokens = append(tokens, newTokenKVV(split[0], removeQuote(split[1]), removeQuote(split[2]))) + } } return tokens, nil } diff --git a/query/lexer_test.go b/query/lexer_test.go index 59f17dec..4ffb35a0 100644 --- a/query/lexer_test.go +++ b/query/lexer_test.go @@ -37,6 +37,14 @@ func TestTokenize(t *testing.T) { {`key:'value value`, nil}, {`key:value value'`, nil}, + // sub-qualifier posive testing + {`key:subkey:"value:value"`, []token{newTokenKVV("key", "subkey", "value:value")}}, + + // sub-qualifier negative testing + {`key:subkey:value:value`, nil}, + {`key:subkey:`, nil}, + {`key:subkey:"value`, nil}, + // full text search {"search", []token{newTokenSearch("search")}}, {"search more terms", []token{ diff --git a/query/parser.go b/query/parser.go index 762a47e5..e820c629 100644 --- a/query/parser.go +++ b/query/parser.go @@ -67,6 +67,21 @@ func Parse(query string) (*Query, error) { default: return nil, fmt.Errorf("unknown qualifier \"%s\"", t.qualifier) } + + case tokenKindKVV: + switch t.qualifier { + case "metadata": + if len(t.subQualifier) == 0 { + return nil, fmt.Errorf("empty value for sub-qualifier \"metadata:%s\"", t.subQualifier) + } + var pair StringPair + pair.Key = t.subQualifier + pair.Value = t.value + q.Metadata = append(q.Metadata, pair) + + default: + return nil, fmt.Errorf("unknown qualifier \"%s:%s\"", t.qualifier, t.subQualifier) + } } } return q, nil diff --git a/query/parser_test.go b/query/parser_test.go index 87dd870a..6d91d6cc 100644 --- a/query/parser_test.go +++ b/query/parser_test.go @@ -84,6 +84,11 @@ func TestParse(t *testing.T) { OrderDirection: OrderDescending, }, }, + + // Metadata + {`metadata:key:"https://www.example.com/"`, &Query{ + Filters: Filters{Metadata: []StringPair{{"key", "https://www.example.com/"}}}, + }}, } for _, tc := range tests { diff --git a/query/query.go b/query/query.go index 816d6414..3a2321cf 100644 --- a/query/query.go +++ b/query/query.go @@ -23,10 +23,17 @@ func NewQuery() *Query { type Search []string +// Used for key-value pairs when filtering based on metadata +type StringPair struct { + Key string + Value string +} + // Filters is a collection of Filter that implement a complex filter type Filters struct { Status []bug.Status Author []string + Metadata []StringPair Actor []string Participant []string Label []string -- cgit