aboutsummaryrefslogblamecommitdiffstats
path: root/query/lexer_test.go
blob: 4ffb35a034bdc17370e7a38f6bd727df49e70d6b (plain) (tree)
1
2
3
4
5
6
7
8
9
10
11
12
13












                                            


                                 

                                                                           
 

                                                                                               



                                                                                         



                                                                        

                          

                         

                                                                                 


                                          
 







                                                                                                   










                                                              












                                                          
package query

import (
	"testing"

	"github.com/stretchr/testify/assert"
)

func TestTokenize(t *testing.T) {
	var tests = []struct {
		input  string
		tokens []token
	}{
		{"status:", nil},
		{":value", nil},

		{"status:open", []token{newTokenKV("status", "open")}},
		{"status:closed", []token{newTokenKV("status", "closed")}},

		{"author:rene", []token{newTokenKV("author", "rene")}},
		{`author:"René Descartes"`, []token{newTokenKV("author", "René Descartes")}},

		{
			`status:open status:closed author:rene author:"René Descartes"`,
			[]token{
				newTokenKV("status", "open"),
				newTokenKV("status", "closed"),
				newTokenKV("author", "rene"),
				newTokenKV("author", "René Descartes"),
			},
		},

		// quotes
		{`key:"value value"`, []token{newTokenKV("key", "value value")}},
		{`key:'value value'`, []token{newTokenKV("key", "value value")}},
		// unmatched quotes
		{`key:'value value`, nil},
		{`key:value value'`, nil},

		// sub-qualifier posive testing
		{`key:subkey:"value:value"`, []token{newTokenKVV("key", "subkey", "value:value")}},

		// sub-qualifier negative testing
		{`key:subkey:value:value`, nil},
		{`key:subkey:`, nil},
		{`key:subkey:"value`, nil},

		// full text search
		{"search", []token{newTokenSearch("search")}},
		{"search more terms", []token{
			newTokenSearch("search"),
			newTokenSearch("more"),
			newTokenSearch("terms"),
		}},
		{"search \"more terms\"", []token{
			newTokenSearch("search"),
			newTokenSearch("more terms"),
		}},
	}

	for _, tc := range tests {
		tokens, err := tokenize(tc.input)
		if tc.tokens == nil {
			assert.Error(t, err)
			assert.Nil(t, tokens)
		} else {
			assert.NoError(t, err)
			assert.Equal(t, tc.tokens, tokens)
		}
	}
}