diff options
Diffstat (limited to 'spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js')
-rw-r--r-- | spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js | 8 |
1 files changed, 8 insertions, 0 deletions
diff --git a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js index 4f9f546cbb5..d8eb75ec000 100644 --- a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js @@ -7,6 +7,7 @@ describe('Filtered Search Tokenizer', () => { describe('processTokens', () => { it('returns for input containing only search value', () => { const results = FilteredSearchTokenizer.processTokens('searchTerm', allowedKeys); + expect(results.searchToken).toBe('searchTerm'); expect(results.tokens.length).toBe(0); expect(results.lastToken).toBe(results.searchToken); @@ -15,6 +16,7 @@ describe('Filtered Search Tokenizer', () => { it('returns for input containing only tokens', () => { const results = FilteredSearchTokenizer .processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none', allowedKeys); + expect(results.searchToken).toBe(''); expect(results.tokens.length).toBe(4); expect(results.tokens[3]).toBe(results.lastToken); @@ -39,6 +41,7 @@ describe('Filtered Search Tokenizer', () => { it('returns for input starting with search value and ending with tokens', () => { const results = FilteredSearchTokenizer .processTokens('searchTerm anotherSearchTerm milestone:none', allowedKeys); + expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); expect(results.tokens.length).toBe(1); expect(results.tokens[0]).toBe(results.lastToken); @@ -83,6 +86,7 @@ describe('Filtered Search Tokenizer', () => { it('returns for input containing search value in between tokens', () => { const results = FilteredSearchTokenizer .processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing', allowedKeys); + expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); expect(results.tokens.length).toBe(3); expect(results.tokens[2]).toBe(results.lastToken); @@ -102,6 +106,7 @@ describe('Filtered Search Tokenizer', () => { it('returns search value for invalid tokens', () => { const results = FilteredSearchTokenizer.processTokens('fake:token', allowedKeys); + expect(results.lastToken).toBe('fake:token'); expect(results.searchToken).toBe('fake:token'); expect(results.tokens.length).toEqual(0); @@ -109,6 +114,7 @@ describe('Filtered Search Tokenizer', () => { it('returns search value and token for mix of valid and invalid tokens', () => { const results = FilteredSearchTokenizer.processTokens('label:real fake:token', allowedKeys); + expect(results.tokens.length).toEqual(1); expect(results.tokens[0].key).toBe('label'); expect(results.tokens[0].value).toBe('real'); @@ -119,12 +125,14 @@ describe('Filtered Search Tokenizer', () => { it('returns search value for invalid symbols', () => { const results = FilteredSearchTokenizer.processTokens('std::includes', allowedKeys); + expect(results.lastToken).toBe('std::includes'); expect(results.searchToken).toBe('std::includes'); }); it('removes duplicated values', () => { const results = FilteredSearchTokenizer.processTokens('label:~foo label:~foo', allowedKeys); + expect(results.tokens.length).toBe(1); expect(results.tokens[0].key).toBe('label'); expect(results.tokens[0].value).toBe('foo'); |