diff options
author | Miklos Vajna <vmiklos@collabora.com> | 2021-02-14 16:03:51 +0100 |
---|---|---|
committer | Miklos Vajna <vmiklos@collabora.com> | 2021-02-21 14:15:50 +0100 |
commit | cb61245078a0e8f14e359ed20e0582a695645a08 (patch) | |
tree | aa0e8f4ea3d8a5eb2f7ac3746875b0854c1714bb /query/lexer.go | |
parent | 956f98b676ab44d19ed522061c9520a32aab1a3c (diff) | |
download | git-bug-cb61245078a0e8f14e359ed20e0582a695645a08.tar.gz |
Add ability to search by arbitrary metadata
Example:
~/git/git-bug/git-bug ls --metadata github-url=https://github.com/author/myproject/issues/42
or
~/git/git-bug/git-bug ls metadata:github-url:\"https://github.com/author/myproject/issues/42\"
Fixes the cmdline part of <https://github.com/MichaelMure/git-bug/issues/567>.
Diffstat (limited to 'query/lexer.go')
-rw-r--r-- | query/lexer.go | 58 |
1 files changed, 50 insertions, 8 deletions
diff --git a/query/lexer.go b/query/lexer.go index 5ca700c7..45f657df 100644 --- a/query/lexer.go +++ b/query/lexer.go @@ -11,16 +11,20 @@ type tokenKind int const ( _ tokenKind = iota tokenKindKV + tokenKindKVV tokenKindSearch ) type token struct { kind tokenKind - // KV + // KV and KVV qualifier string value string + // KVV only + subQualifier string + // Search term string } @@ -33,6 +37,15 @@ func newTokenKV(qualifier, value string) token { } } +func newTokenKVV(qualifier, subQualifier, value string) token { + return token{ + kind: tokenKindKVV, + qualifier: qualifier, + subQualifier: subQualifier, + value: value, + } +} + func newTokenSearch(term string) token { return token{ kind: tokenKindSearch, @@ -50,7 +63,23 @@ func tokenize(query string) ([]token, error) { var tokens []token for _, field := range fields { - split := strings.Split(field, ":") + // Split using ':' as separator, but separators inside '"' don't count. + quoted := false + split := strings.FieldsFunc(field, func(r rune) bool { + if r == '"' { + quoted = !quoted + } + return !quoted && r == ':' + }) + if (strings.HasPrefix(field, ":")) { + split = append([]string{""}, split...) + } + if (strings.HasSuffix(field, ":")) { + split = append(split, "") + } + if (quoted) { + return nil, fmt.Errorf("can't tokenize \"%s\": unmatched quote", field) + } // full text search if len(split) == 1 { @@ -58,18 +87,31 @@ func tokenize(query string) ([]token, error) { continue } - if len(split) != 2 { - return nil, fmt.Errorf("can't tokenize \"%s\"", field) + if len(split) > 3 { + return nil, fmt.Errorf("can't tokenize \"%s\": too many separators", field) } if len(split[0]) == 0 { return nil, fmt.Errorf("can't tokenize \"%s\": empty qualifier", field) } - if len(split[1]) == 0 { - return nil, fmt.Errorf("empty value for qualifier \"%s\"", split[0]) - } - tokens = append(tokens, newTokenKV(split[0], removeQuote(split[1]))) + if len(split) == 2 { + if len(split[1]) == 0 { + return nil, fmt.Errorf("empty value for qualifier \"%s\"", split[0]) + } + + tokens = append(tokens, newTokenKV(split[0], removeQuote(split[1]))) + } else { + if len(split[1]) == 0 { + return nil, fmt.Errorf("empty sub-qualifier for qualifier \"%s\"", split[0]) + } + + if len(split[2]) == 0 { + return nil, fmt.Errorf("empty value for qualifier \"%s:%s\"", split[0], split[1]) + } + + tokens = append(tokens, newTokenKVV(split[0], removeQuote(split[1]), removeQuote(split[2]))) + } } return tokens, nil } |