Commit 89b50aa6 authored by Clement Ho's avatar Clement Ho

Fix karma specs

parent f644cac6
......@@ -50,8 +50,8 @@ class DropdownUtils {
return updatedItem;
}
static filterHint(options, item) {
const { input, allowedKeys } = options;
static filterHint(config, item) {
const { input, allowedKeys } = config;
const updatedItem = item;
const searchInput = gl.DropdownUtils.getSearchQuery(input);
const { lastToken, tokens } =
......
......@@ -453,7 +453,8 @@ class FilteredSearchManager {
this.saveCurrentSearchQuery();
const { tokens, searchToken }
= this.tokenizer.processTokens(searchQuery, this.filteredSearchTokenKeys.get());
= this.tokenizer.processTokens(searchQuery, this.filteredSearchTokenKeys.getKeys());
const currentState = gl.utils.getParameterByName('state') || 'opened';
paths.push(`state=${currentState}`);
......
......@@ -54,10 +54,12 @@ class FilteredSearchTokenKeysWithWeights extends gl.FilteredSearchTokenKeys {
const alternativeTokenKeys = FilteredSearchTokenKeysWithWeights.getAlternatives();
const tokenKeysWithAlternative = tokenKeys.concat(alternativeTokenKeys);
console.log(tokenKeysWithAlternative)
return tokenKeysWithAlternative.find((tokenKey) => {
let tokenKeyParam = tokenKey.key;
if (tokenKey.param) {
if (tokenKey.param !== 'weight') {
tokenKeyParam += `_${tokenKey.param}`;
}
......
......@@ -2,6 +2,8 @@ import Vue from 'vue';
import eventHub from '~/filtered_search/event_hub';
import RecentSearchesDropdownContent from '~/filtered_search/components/recent_searches_dropdown_content';
require('~/filtered_search/filtered_search_token_keys');
const createComponent = (propsData) => {
const Component = Vue.extend(RecentSearchesDropdownContent);
......@@ -17,12 +19,14 @@ const trimMarkupWhitespace = text => text.replace(/(\n|\s)+/gm, ' ').trim();
describe('RecentSearchesDropdownContent', () => {
const propsDataWithoutItems = {
items: [],
allowedKeys: gl.FilteredSearchTokenKeys.getKeys(),
};
const propsDataWithItems = {
items: [
'foo',
'author:@root label:~foo bar',
],
allowedKeys: gl.FilteredSearchTokenKeys.getKeys(),
};
let vm;
......
......@@ -12,7 +12,7 @@ describe('Dropdown User', () => {
spyOn(gl.DropdownUser.prototype, 'getProjectId').and.callFake(() => {});
spyOn(gl.DropdownUtils, 'getSearchInput').and.callFake(() => {});
dropdownUser = new gl.DropdownUser();
dropdownUser = new gl.DropdownUser(null, null, null, gl.FilteredSearchTokenKeys);
});
it('should not return the double quote found in value', () => {
......
......@@ -122,6 +122,7 @@ describe('Dropdown Utils', () => {
describe('filterHint', () => {
let input;
let allowedKeys;
beforeEach(() => {
setFixtures(`
......@@ -133,30 +134,38 @@ describe('Dropdown Utils', () => {
`);
input = document.getElementById('test');
allowedKeys = gl.FilteredSearchTokenKeys.getKeys();
});
function config() {
return {
input,
allowedKeys,
}
}
it('should filter', () => {
input.value = 'l';
let updatedItem = gl.DropdownUtils.filterHint(input, {
let updatedItem = gl.DropdownUtils.filterHint(config(), {
hint: 'label',
});
expect(updatedItem.droplab_hidden).toBe(false);
input.value = 'o';
updatedItem = gl.DropdownUtils.filterHint(input, {
updatedItem = gl.DropdownUtils.filterHint(config(), {
hint: 'label',
});
expect(updatedItem.droplab_hidden).toBe(true);
});
it('should return droplab_hidden false when item has no hint', () => {
const updatedItem = gl.DropdownUtils.filterHint(input, {}, '');
const updatedItem = gl.DropdownUtils.filterHint(config(), {}, '');
expect(updatedItem.droplab_hidden).toBe(false);
});
it('should allow multiple if item.type is array', () => {
input.value = 'label:~first la';
const updatedItem = gl.DropdownUtils.filterHint(input, {
const updatedItem = gl.DropdownUtils.filterHint(config(), {
hint: 'label',
type: 'array',
});
......@@ -165,12 +174,12 @@ describe('Dropdown Utils', () => {
it('should prevent multiple if item.type is not array', () => {
input.value = 'milestone:~first mile';
let updatedItem = gl.DropdownUtils.filterHint(input, {
let updatedItem = gl.DropdownUtils.filterHint(config(), {
hint: 'milestone',
});
expect(updatedItem.droplab_hidden).toBe(true);
updatedItem = gl.DropdownUtils.filterHint(input, {
updatedItem = gl.DropdownUtils.filterHint(config(), {
hint: 'milestone',
type: 'string',
});
......
......@@ -81,6 +81,7 @@ describe('Filtered Search Manager', () => {
expect(RecentSearchesService.isAvailable).toHaveBeenCalled();
expect(recentSearchesStoreSrc.default).toHaveBeenCalledWith({
isLocalStorageAvailable,
allowedKeys: gl.FilteredSearchTokenKeys.getKeys(),
});
});
......
......@@ -6,8 +6,9 @@ require('~/filtered_search/filtered_search_token_keys_with_weights');
const weightTokenKey = {
key: 'weight',
type: 'string',
param: '',
param: 'weight',
symbol: '',
icon: 'balance-scale',
};
describe('get', () => {
......
......@@ -3,9 +3,11 @@ require('~/filtered_search/filtered_search_token_keys');
require('~/filtered_search/filtered_search_tokenizer');
describe('Filtered Search Tokenizer', () => {
let allowedKeys = gl.FilteredSearchTokenKeys.getKeys();
describe('processTokens', () => {
it('returns for input containing only search value', () => {
const results = gl.FilteredSearchTokenizer.processTokens('searchTerm');
const results = gl.FilteredSearchTokenizer.processTokens('searchTerm', allowedKeys);
expect(results.searchToken).toBe('searchTerm');
expect(results.tokens.length).toBe(0);
expect(results.lastToken).toBe(results.searchToken);
......@@ -13,7 +15,7 @@ describe('Filtered Search Tokenizer', () => {
it('returns for input containing only tokens', () => {
const results = gl.FilteredSearchTokenizer
.processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none');
.processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none', allowedKeys);
expect(results.searchToken).toBe('');
expect(results.tokens.length).toBe(4);
expect(results.tokens[3]).toBe(results.lastToken);
......@@ -37,7 +39,7 @@ describe('Filtered Search Tokenizer', () => {
it('returns for input starting with search value and ending with tokens', () => {
const results = gl.FilteredSearchTokenizer
.processTokens('searchTerm anotherSearchTerm milestone:none');
.processTokens('searchTerm anotherSearchTerm milestone:none', allowedKeys);
expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
expect(results.tokens.length).toBe(1);
expect(results.tokens[0]).toBe(results.lastToken);
......@@ -48,7 +50,7 @@ describe('Filtered Search Tokenizer', () => {
it('returns for input starting with tokens and ending with search value', () => {
const results = gl.FilteredSearchTokenizer
.processTokens('assignee:@user searchTerm');
.processTokens('assignee:@user searchTerm', allowedKeys);
expect(results.searchToken).toBe('searchTerm');
expect(results.tokens.length).toBe(1);
......@@ -60,7 +62,7 @@ describe('Filtered Search Tokenizer', () => {
it('returns for input containing search value wrapped between tokens', () => {
const results = gl.FilteredSearchTokenizer
.processTokens('author:@root label:~"Won\'t fix" searchTerm anotherSearchTerm milestone:none');
.processTokens('author:@root label:~"Won\'t fix" searchTerm anotherSearchTerm milestone:none', allowedKeys);
expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
expect(results.tokens.length).toBe(3);
......@@ -81,7 +83,7 @@ describe('Filtered Search Tokenizer', () => {
it('returns for input containing search value in between tokens', () => {
const results = gl.FilteredSearchTokenizer
.processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing');
.processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing', allowedKeys);
expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
expect(results.tokens.length).toBe(3);
expect(results.tokens[2]).toBe(results.lastToken);
......@@ -100,14 +102,14 @@ describe('Filtered Search Tokenizer', () => {
});
it('returns search value for invalid tokens', () => {
const results = gl.FilteredSearchTokenizer.processTokens('fake:token');
const results = gl.FilteredSearchTokenizer.processTokens('fake:token', allowedKeys);
expect(results.lastToken).toBe('fake:token');
expect(results.searchToken).toBe('fake:token');
expect(results.tokens.length).toEqual(0);
});
it('returns search value and token for mix of valid and invalid tokens', () => {
const results = gl.FilteredSearchTokenizer.processTokens('label:real fake:token');
const results = gl.FilteredSearchTokenizer.processTokens('label:real fake:token', allowedKeys);
expect(results.tokens.length).toEqual(1);
expect(results.tokens[0].key).toBe('label');
expect(results.tokens[0].value).toBe('real');
......@@ -117,13 +119,13 @@ describe('Filtered Search Tokenizer', () => {
});
it('returns search value for invalid symbols', () => {
const results = gl.FilteredSearchTokenizer.processTokens('std::includes');
const results = gl.FilteredSearchTokenizer.processTokens('std::includes', allowedKeys);
expect(results.lastToken).toBe('std::includes');
expect(results.searchToken).toBe('std::includes');
});
it('removes duplicated values', () => {
const results = gl.FilteredSearchTokenizer.processTokens('label:~foo label:~foo');
const results = gl.FilteredSearchTokenizer.processTokens('label:~foo label:~foo', allowedKeys);
expect(results.tokens.length).toBe(1);
expect(results.tokens[0].key).toBe('label');
expect(results.tokens[0].value).toBe('foo');
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment