aboutsummaryrefslogtreecommitdiffstats
path: root/query
diff options
context:
space:
mode:
authorMichael Muré <batolettre@gmail.com>2020-08-20 12:00:34 +0200
committerMike Goldin <mike.goldin@protonmail.ch>2020-11-17 08:09:51 -0500
commitb285c57dc62caac2c1f09e74eeece406b0e7cc00 (patch)
treeaa2f15c8e3a250c8bd8d8f4a7b11a5c9819453a8 /query
parent902997f53771babb2f9ea1bb6288c2ec295c4c9e (diff)
downloadgit-bug-b285c57dc62caac2c1f09e74eeece406b0e7cc00.tar.gz
query: expand the tokenizer/parser to parse arbitrary search terms
Diffstat (limited to 'query')
-rw-r--r--query/lexer.go43
-rw-r--r--query/lexer_test.go33
-rw-r--r--query/parser.go73
-rw-r--r--query/parser_test.go14
-rw-r--r--query/query.go3
5 files changed, 116 insertions, 50 deletions
diff --git a/query/lexer.go b/query/lexer.go
index ca67d641..5ca700c7 100644
--- a/query/lexer.go
+++ b/query/lexer.go
@@ -6,9 +6,38 @@ import (
"unicode"
)
+type tokenKind int
+
+const (
+ _ tokenKind = iota
+ tokenKindKV
+ tokenKindSearch
+)
+
type token struct {
+ kind tokenKind
+
+ // KV
qualifier string
value string
+
+ // Search
+ term string
+}
+
+func newTokenKV(qualifier, value string) token {
+ return token{
+ kind: tokenKindKV,
+ qualifier: qualifier,
+ value: value,
+ }
+}
+
+func newTokenSearch(term string) token {
+ return token{
+ kind: tokenKindSearch,
+ term: term,
+ }
}
// tokenize parse and break a input into tokens ready to be
@@ -22,6 +51,13 @@ func tokenize(query string) ([]token, error) {
var tokens []token
for _, field := range fields {
split := strings.Split(field, ":")
+
+ // full text search
+ if len(split) == 1 {
+ tokens = append(tokens, newTokenSearch(removeQuote(field)))
+ continue
+ }
+
if len(split) != 2 {
return nil, fmt.Errorf("can't tokenize \"%s\"", field)
}
@@ -33,14 +69,13 @@ func tokenize(query string) ([]token, error) {
return nil, fmt.Errorf("empty value for qualifier \"%s\"", split[0])
}
- tokens = append(tokens, token{
- qualifier: split[0],
- value: removeQuote(split[1]),
- })
+ tokens = append(tokens, newTokenKV(split[0], removeQuote(split[1])))
}
return tokens, nil
}
+// split the query into chunks by splitting on whitespaces but respecting
+// quotes
func splitQuery(query string) ([]string, error) {
lastQuote := rune(0)
inQuote := false
diff --git a/query/lexer_test.go b/query/lexer_test.go
index 36b9ba10..59f17dec 100644
--- a/query/lexer_test.go
+++ b/query/lexer_test.go
@@ -11,32 +11,43 @@ func TestTokenize(t *testing.T) {
input string
tokens []token
}{
- {"gibberish", nil},
{"status:", nil},
{":value", nil},
- {"status:open", []token{{"status", "open"}}},
- {"status:closed", []token{{"status", "closed"}}},
+ {"status:open", []token{newTokenKV("status", "open")}},
+ {"status:closed", []token{newTokenKV("status", "closed")}},
- {"author:rene", []token{{"author", "rene"}}},
- {`author:"René Descartes"`, []token{{"author", "René Descartes"}}},
+ {"author:rene", []token{newTokenKV("author", "rene")}},
+ {`author:"René Descartes"`, []token{newTokenKV("author", "René Descartes")}},
{
`status:open status:closed author:rene author:"René Descartes"`,
[]token{
- {"status", "open"},
- {"status", "closed"},
- {"author", "rene"},
- {"author", "René Descartes"},
+ newTokenKV("status", "open"),
+ newTokenKV("status", "closed"),
+ newTokenKV("author", "rene"),
+ newTokenKV("author", "René Descartes"),
},
},
// quotes
- {`key:"value value"`, []token{{"key", "value value"}}},
- {`key:'value value'`, []token{{"key", "value value"}}},
+ {`key:"value value"`, []token{newTokenKV("key", "value value")}},
+ {`key:'value value'`, []token{newTokenKV("key", "value value")}},
// unmatched quotes
{`key:'value value`, nil},
{`key:value value'`, nil},
+
+ // full text search
+ {"search", []token{newTokenSearch("search")}},
+ {"search more terms", []token{
+ newTokenSearch("search"),
+ newTokenSearch("more"),
+ newTokenSearch("terms"),
+ }},
+ {"search \"more terms\"", []token{
+ newTokenSearch("search"),
+ newTokenSearch("more terms"),
+ }},
}
for _, tc := range tests {
diff --git a/query/parser.go b/query/parser.go
index a379f750..762a47e5 100644
--- a/query/parser.go
+++ b/query/parser.go
@@ -24,42 +24,49 @@ func Parse(query string) (*Query, error) {
sortingDone := false
for _, t := range tokens {
- switch t.qualifier {
- case "status", "state":
- status, err := bug.StatusFromString(t.value)
- if err != nil {
- return nil, err
- }
- q.Status = append(q.Status, status)
- case "author":
- q.Author = append(q.Author, t.value)
- case "actor":
- q.Actor = append(q.Actor, t.value)
- case "participant":
- q.Participant = append(q.Participant, t.value)
- case "label":
- q.Label = append(q.Label, t.value)
- case "title":
- q.Title = append(q.Title, t.value)
- case "no":
- switch t.value {
+ switch t.kind {
+ case tokenKindSearch:
+ q.Search = append(q.Search, t.term)
+ break
+
+ case tokenKindKV:
+ switch t.qualifier {
+ case "status", "state":
+ status, err := bug.StatusFromString(t.value)
+ if err != nil {
+ return nil, err
+ }
+ q.Status = append(q.Status, status)
+ case "author":
+ q.Author = append(q.Author, t.value)
+ case "actor":
+ q.Actor = append(q.Actor, t.value)
+ case "participant":
+ q.Participant = append(q.Participant, t.value)
case "label":
- q.NoLabel = true
+ q.Label = append(q.Label, t.value)
+ case "title":
+ q.Title = append(q.Title, t.value)
+ case "no":
+ switch t.value {
+ case "label":
+ q.NoLabel = true
+ default:
+ return nil, fmt.Errorf("unknown \"no\" filter \"%s\"", t.value)
+ }
+ case "sort":
+ if sortingDone {
+ return nil, fmt.Errorf("multiple sorting")
+ }
+ err = parseSorting(q, t.value)
+ if err != nil {
+ return nil, err
+ }
+ sortingDone = true
+
default:
- return nil, fmt.Errorf("unknown \"no\" filter \"%s\"", t.value)
+ return nil, fmt.Errorf("unknown qualifier \"%s\"", t.qualifier)
}
- case "sort":
- if sortingDone {
- return nil, fmt.Errorf("multiple sorting")
- }
- err = parseSorting(q, t.value)
- if err != nil {
- return nil, err
- }
- sortingDone = true
-
- default:
- return nil, fmt.Errorf("unknown qualifier \"%s\"", t.qualifier)
}
}
return q, nil
diff --git a/query/parser_test.go b/query/parser_test.go
index 6a509adb..87dd870a 100644
--- a/query/parser_test.go
+++ b/query/parser_test.go
@@ -13,7 +13,7 @@ func TestParse(t *testing.T) {
input string
output *Query
}{
- {"gibberish", nil},
+ // KV
{"status:", nil},
{":value", nil},
@@ -62,8 +62,18 @@ func TestParse(t *testing.T) {
}},
{"sort:unknown", nil},
- {`status:open author:"René Descartes" participant:leonhard label:hello label:"Good first issue" sort:edit-desc`,
+ // Search
+ {"search", &Query{
+ Search: []string{"search"},
+ }},
+ {"search \"more terms\"", &Query{
+ Search: []string{"search", "more terms"},
+ }},
+
+ // Complex
+ {`status:open author:"René Descartes" search participant:leonhard label:hello label:"Good first issue" sort:edit-desc "more terms"`,
&Query{
+ Search: []string{"search", "more terms"},
Filters: Filters{
Status: []bug.Status{bug.OpenStatus},
Author: []string{"René Descartes"},
diff --git a/query/query.go b/query/query.go
index a499ad38..816d6414 100644
--- a/query/query.go
+++ b/query/query.go
@@ -7,6 +7,7 @@ import "github.com/MichaelMure/git-bug/bug"
// manually. This query doesn't do anything by itself and need to be interpreted
// for the specific domain of application.
type Query struct {
+ Search
Filters
OrderBy
OrderDirection
@@ -20,6 +21,8 @@ func NewQuery() *Query {
}
}
+type Search []string
+
// Filters is a collection of Filter that implement a complex filter
type Filters struct {
Status []bug.Status