diff options
author | Michael Muré <batolettre@gmail.com> | 2020-08-20 12:00:34 +0200 |
---|---|---|
committer | Mike Goldin <mike.goldin@protonmail.ch> | 2020-11-17 08:09:51 -0500 |
commit | b285c57dc62caac2c1f09e74eeece406b0e7cc00 (patch) | |
tree | aa2f15c8e3a250c8bd8d8f4a7b11a5c9819453a8 /query/lexer_test.go | |
parent | 902997f53771babb2f9ea1bb6288c2ec295c4c9e (diff) | |
download | git-bug-b285c57dc62caac2c1f09e74eeece406b0e7cc00.tar.gz |
query: expand the tokenizer/parser to parse arbitrary search terms
Diffstat (limited to 'query/lexer_test.go')
-rw-r--r-- | query/lexer_test.go | 33 |
1 files changed, 22 insertions, 11 deletions
diff --git a/query/lexer_test.go b/query/lexer_test.go index 36b9ba10..59f17dec 100644 --- a/query/lexer_test.go +++ b/query/lexer_test.go @@ -11,32 +11,43 @@ func TestTokenize(t *testing.T) { input string tokens []token }{ - {"gibberish", nil}, {"status:", nil}, {":value", nil}, - {"status:open", []token{{"status", "open"}}}, - {"status:closed", []token{{"status", "closed"}}}, + {"status:open", []token{newTokenKV("status", "open")}}, + {"status:closed", []token{newTokenKV("status", "closed")}}, - {"author:rene", []token{{"author", "rene"}}}, - {`author:"René Descartes"`, []token{{"author", "René Descartes"}}}, + {"author:rene", []token{newTokenKV("author", "rene")}}, + {`author:"René Descartes"`, []token{newTokenKV("author", "René Descartes")}}, { `status:open status:closed author:rene author:"René Descartes"`, []token{ - {"status", "open"}, - {"status", "closed"}, - {"author", "rene"}, - {"author", "René Descartes"}, + newTokenKV("status", "open"), + newTokenKV("status", "closed"), + newTokenKV("author", "rene"), + newTokenKV("author", "René Descartes"), }, }, // quotes - {`key:"value value"`, []token{{"key", "value value"}}}, - {`key:'value value'`, []token{{"key", "value value"}}}, + {`key:"value value"`, []token{newTokenKV("key", "value value")}}, + {`key:'value value'`, []token{newTokenKV("key", "value value")}}, // unmatched quotes {`key:'value value`, nil}, {`key:value value'`, nil}, + + // full text search + {"search", []token{newTokenSearch("search")}}, + {"search more terms", []token{ + newTokenSearch("search"), + newTokenSearch("more"), + newTokenSearch("terms"), + }}, + {"search \"more terms\"", []token{ + newTokenSearch("search"), + newTokenSearch("more terms"), + }}, } for _, tc := range tests { |