Commit e197f27f authored by Clement Ho's avatar Clement Ho

Refactor and use regex for string processing

parent 0e40c952
......@@ -3,31 +3,13 @@
/* global droplabFilter */
(() => {
const dropdownData = [{
icon: 'fa-pencil',
hint: 'author:',
tag: '<author>',
}, {
icon: 'fa-user',
hint: 'assignee:',
tag: '<assignee>',
}, {
icon: 'fa-clock-o',
hint: 'milestone:',
tag: '<milestone>',
}, {
icon: 'fa-tag',
hint: 'label:',
tag: '<label>',
}];
class DropdownHint extends gl.FilteredSearchDropdown {
constructor(droplab, dropdown, input) {
super(droplab, dropdown, input);
this.config = {
droplabFilter: {
template: 'hint',
filterFunction: gl.DropdownUtils.filterMethod,
filterFunction: gl.DropdownUtils.filterHint,
},
};
}
......@@ -43,8 +25,7 @@
const tag = selected.querySelector('.js-filter-tag').innerText.trim();
if (tag.length) {
gl.FilteredSearchDropdownManager
.addWordToInput(this.getSelectedTextWithoutEscaping(token));
gl.FilteredSearchDropdownManager.addWordToInput(token);
}
this.dismissDropdown();
this.dispatchInputEvent();
......@@ -52,24 +33,27 @@
}
}
getSelectedTextWithoutEscaping(selectedToken) {
const lastWord = this.input.value.split(' ').last();
const lastWordIndex = selectedToken.indexOf(lastWord);
return lastWordIndex === -1 ? selectedToken : selectedToken.slice(lastWord.length);
}
renderContent() {
this.droplab.changeHookList(this.hookId, this.dropdown, [droplabFilter], this.config);
// Clone dropdownData to prevent it from being
// changed due to pass by reference
const data = [];
dropdownData.forEach((item) => {
data.push(Object.assign({}, item));
});
const dropdownData = [{
icon: 'fa-pencil',
hint: 'author:',
tag: '<author>',
}, {
icon: 'fa-user',
hint: 'assignee:',
tag: '<assignee>',
}, {
icon: 'fa-clock-o',
hint: 'milestone:',
tag: '<milestone>',
}, {
icon: 'fa-tag',
hint: 'label:',
tag: '<label>',
}];
this.droplab.setData(this.hookId, data);
this.droplab.changeHookList(this.hookId, this.dropdown, [droplabFilter], this.config);
this.droplab.setData(this.hookId, dropdownData);
}
init() {
......
......@@ -37,13 +37,10 @@
}
getSearchInput() {
const query = this.input.value;
const { value } = gl.FilteredSearchTokenizer.getLastTokenObject(query);
const valueWithoutColon = value.slice(1);
const hasPrefix = valueWithoutColon[0] === '@';
const valueWithoutPrefix = valueWithoutColon.slice(1);
const query = this.input.value.trim();
const { lastToken } = gl.FilteredSearchTokenizer.processTokens(query);
return hasPrefix ? valueWithoutPrefix : valueWithoutColon;
return lastToken.value || '';
}
init() {
......
......@@ -22,30 +22,32 @@
static filterWithSymbol(filterSymbol, item, query) {
const updatedItem = item;
const { value } = gl.FilteredSearchTokenizer.getLastTokenObject(query);
const valueWithoutColon = value.slice(1).toLowerCase();
const prefix = valueWithoutColon[0];
const valueWithoutPrefix = valueWithoutColon.slice(1);
const { lastToken, searchToken } = gl.FilteredSearchTokenizer.processTokens(query);
if (lastToken !== searchToken) {
const value = lastToken.value.toLowerCase();
const title = updatedItem.title.toLowerCase();
// Eg. filterSymbol = ~ for labels
const matchWithoutPrefix =
prefix === filterSymbol && title.indexOf(valueWithoutPrefix) !== -1;
const match = title.indexOf(valueWithoutColon) !== -1;
const matchWithoutSymbol = lastToken.symbol === filterSymbol && title.indexOf(value) !== -1;
const match = title.indexOf(`${lastToken.symbol}${value}`) !== -1;
updatedItem.droplab_hidden = !match && !matchWithoutSymbol;
} else {
updatedItem.droplab_hidden = false;
}
updatedItem.droplab_hidden = !match && !matchWithoutPrefix;
return updatedItem;
}
static filterMethod(item, query) {
static filterHint(item, query) {
const updatedItem = item;
const { value } = gl.FilteredSearchTokenizer.getLastTokenObject(query);
const { lastToken } = gl.FilteredSearchTokenizer.processTokens(query);
if (value === '') {
if (!lastToken) {
updatedItem.droplab_hidden = false;
} else {
updatedItem.droplab_hidden = updatedItem.hint.indexOf(value) === -1;
updatedItem.droplab_hidden = updatedItem.hint.indexOf(lastToken.toLowerCase()) === -1;
}
return updatedItem;
......
......@@ -29,7 +29,7 @@
itemClicked(e, getValueFunction) {
const { selected } = e.detail;
if (selected.tagName === 'LI') {
if (selected.tagName === 'LI' && selected.innerHTML) {
const dataValueSet = gl.DropdownUtils.setDataValueIfSelected(selected);
if (!dataValueSet) {
......
......@@ -57,17 +57,25 @@
static addWordToInput(word, addSpace = false) {
const input = document.querySelector('.filtered-search');
input.value = input.value.trim();
const value = input.value;
const hasExistingValue = value.length !== 0;
const { lastToken } = gl.FilteredSearchTokenizer.processTokens(value);
const { lastToken, searchToken } = gl.FilteredSearchTokenizer.processTokens(value);
// Find out what part of the token value the user has typed
// and remove it from input before appending the selected token value
if (lastToken !== searchToken) {
const lastTokenString = `${lastToken.symbol}${lastToken.value}`;
if ({}.hasOwnProperty.call(lastToken, 'key')) {
// Spaces inside the token means that the token value will be escaped by quotes
const hasQuotes = lastToken.value.indexOf(' ') !== -1;
const hasQuotes = lastTokenString.indexOf(' ') !== -1;
// Add 2 length to account for the length of the front and back quotes
const lengthToRemove = hasQuotes ? lastToken.value.length + 2 : lastToken.value.length;
const lengthToRemove = hasQuotes ? lastTokenString.length + 2 : lastTokenString.length;
input.value = value.slice(0, -1 * (lengthToRemove));
} else if (searchToken !== '' && word.indexOf(searchToken) !== -1) {
input.value = value.slice(0, -1 * searchToken.length);
}
input.value += hasExistingValue && addSpace ? ` ${word}` : word;
......@@ -129,27 +137,25 @@
const match = gl.FilteredSearchTokenKeys.searchByKey(dropdownName.toLowerCase());
const shouldOpenFilterDropdown = match && this.currentDropdown !== match.key
&& {}.hasOwnProperty.call(this.mapping, match.key);
&& this.mapping[match.key];
const shouldOpenHintDropdown = !match && this.currentDropdown !== 'hint';
if (shouldOpenFilterDropdown || shouldOpenHintDropdown) {
// `hint` is not listed as a tokenKey (since it is not a real `filter`)
const key = match && {}.hasOwnProperty.call(match, 'key') ? match.key : 'hint';
const key = match && match.key ? match.key : 'hint';
this.load(key, firstLoad);
}
gl.droplab = this.droplab;
}
setDropdown() {
const { lastToken } = this.tokenizer.processTokens(this.filteredSearchInput.value);
const { lastToken, searchToken } = this.tokenizer
.processTokens(this.filteredSearchInput.value);
if (typeof lastToken === 'string') {
if (lastToken === searchToken) {
// Token is not fully initialized yet because it has no value
// Eg. token = 'label:'
const { tokenKey } = this.tokenizer.parseToken(lastToken);
this.loadDropdown(tokenKey);
} else if ({}.hasOwnProperty.call(lastToken, 'key')) {
const split = lastToken.split(':');
this.loadDropdown(split.length > 1 ? split[0] : '');
} else if (lastToken) {
// Token has been initialized into an object because it has a value
this.loadDropdown(lastToken.key);
} else {
......
......@@ -136,21 +136,13 @@
const condition = gl.FilteredSearchTokenKeys
.searchByConditionKeyValue(token.key, token.value.toLowerCase());
const { param } = gl.FilteredSearchTokenKeys.searchByKey(token.key);
const keyParam = param ? `${token.key}_${param}` : token.key;
let tokenPath = '';
let keyParam = token.key;
if (param) {
keyParam += `_${param}`;
}
if (token.wildcard && condition) {
if (condition) {
tokenPath = condition.url;
} else if (token.wildcard) {
// wildcard means that the token does not have a symbol
tokenPath = `${keyParam}=${encodeURIComponent(token.value)}`;
} else {
// Remove the token symbol
tokenPath = `${keyParam}=${encodeURIComponent(token.value.slice(1))}`;
tokenPath = `${keyParam}=${encodeURIComponent(token.value)}`;
}
paths.push(tokenPath);
......
(() => {
class FilteredSearchTokenizer {
static parseToken(input) {
const colonIndex = input.indexOf(':');
let tokenKey;
let tokenValue;
let tokenSymbol;
if (colonIndex !== -1) {
tokenKey = input.slice(0, colonIndex).toLowerCase();
tokenValue = input.slice(colonIndex + 1);
tokenSymbol = tokenValue[0];
}
return {
tokenKey,
tokenValue,
tokenSymbol,
};
}
static getLastTokenObject(input) {
const token = FilteredSearchTokenizer.getLastToken(input);
const colonIndex = token.indexOf(':');
const key = colonIndex !== -1 ? token.slice(0, colonIndex) : '';
const value = colonIndex !== -1 ? token.slice(colonIndex) : token;
return {
key,
value,
};
}
static getLastToken(input) {
let completeToken = false;
let completeQuotation = true;
let lastQuotation = '';
let i = input.length;
const doubleQuote = '"';
const singleQuote = '\'';
while (!completeToken && i >= 0) {
const isDoubleQuote = input[i] === doubleQuote;
const isSingleQuote = input[i] === singleQuote;
// If the second quotation is found
if ((lastQuotation === doubleQuote && isDoubleQuote) ||
(lastQuotation === singleQuote && isSingleQuote)) {
completeQuotation = true;
}
// Save the first quotation
if ((isDoubleQuote && lastQuotation === '') ||
(isSingleQuote && lastQuotation === '')) {
lastQuotation = input[i];
completeQuotation = false;
}
if (completeQuotation && input[i] === ' ') {
completeToken = true;
} else {
i -= 1;
}
}
// Adjust by 1 because of empty space
return input.slice(i + 1);
}
static processTokens(input) {
const tokenRegex = /(\w+):([~%@]?)(?:"(.*?)"|'(.*?)'|(\S+))/g;
const tokens = [];
let searchToken = '';
let lastToken = '';
const inputs = input.split(' ');
let searchTerms = '';
let lastQuotation = '';
let incompleteToken = false;
// Iterate through each word (broken up by spaces)
inputs.forEach((i) => {
if (incompleteToken) {
// Continue previous token as it had an escaped
// quote in the beginning
const prevToken = tokens.last();
prevToken.value += ` ${i}`;
// Remove last quotation from the value
const lastQuotationRegex = new RegExp(lastQuotation, 'g');
prevToken.value = prevToken.value.replace(lastQuotationRegex, '');
tokens[tokens.length - 1] = prevToken;
// Check to see if this quotation completes the token value
if (i.indexOf(lastQuotation) !== -1) {
lastToken = tokens.last();
incompleteToken = !incompleteToken;
}
return;
}
const colonIndex = i.indexOf(':');
if (colonIndex !== -1) {
const { tokenKey, tokenValue, tokenSymbol } = gl.FilteredSearchTokenizer.parseToken(i);
const keyMatch = gl.FilteredSearchTokenKeys.searchByKey(tokenKey);
const symbolMatch = gl.FilteredSearchTokenKeys.searchBySymbol(tokenSymbol);
const doubleQuoteOccurrences = tokenValue.split('"').length - 1;
const singleQuoteOccurrences = tokenValue.split('\'').length - 1;
let lastToken = null;
const searchToken = input.replace(tokenRegex, (match, key, symbol, v1, v2, v3) => {
let tokenValue = v1 || v2 || v3;
let tokenSymbol = symbol;
const doubleQuoteIndex = tokenValue.indexOf('"');
const singleQuoteIndex = tokenValue.indexOf('\'');
const doubleQuoteExist = doubleQuoteIndex !== -1;
const singleQuoteExist = singleQuoteIndex !== -1;
const doubleQuoteExistOnly = doubleQuoteExist && !singleQuoteExist;
const doubleQuoteIsBeforeSingleQuote =
doubleQuoteExist && singleQuoteExist && doubleQuoteIndex < singleQuoteIndex;
const singleQuoteExistOnly = singleQuoteExist && !doubleQuoteExist;
const singleQuoteIsBeforeDoubleQuote =
doubleQuoteExist && singleQuoteExist && singleQuoteIndex < doubleQuoteIndex;
if ((doubleQuoteExistOnly || doubleQuoteIsBeforeSingleQuote)
&& doubleQuoteOccurrences % 2 !== 0) {
// " is found and is in front of ' (if any)
lastQuotation = '"';
incompleteToken = true;
} else if ((singleQuoteExistOnly || singleQuoteIsBeforeDoubleQuote)
&& singleQuoteOccurrences % 2 !== 0) {
// ' is found and is in front of " (if any)
lastQuotation = '\'';
incompleteToken = true;
if (tokenValue === '~' || tokenValue === '%' || tokenValue === '@') {
tokenSymbol = tokenValue;
tokenValue = '';
}
if (keyMatch && tokenValue.length > 0) {
tokens.push({
key: keyMatch.key,
value: tokenValue,
wildcard: !symbolMatch,
key,
value: tokenValue || '',
symbol: tokenSymbol || '',
});
lastToken = tokens.last();
return;
}
return '';
}).replace(/\s{2,}/g, ' ').trim() || '';
if (tokens.length > 0) {
const last = tokens[tokens.length - 1];
const lastString = `${last.key}:${last.symbol}${last.value}`;
lastToken = input.lastIndexOf(lastString) ===
input.length - lastString.length ? last : searchToken;
} else {
lastToken = searchToken;
}
// Add space for next term
searchTerms += `${i} `;
lastToken = i;
}, this);
searchToken = searchTerms.trim();
return {
tokens,
searchToken,
lastToken,
searchToken,
};
}
}
......
......@@ -34,11 +34,6 @@
title: '@root',
};
beforeEach(() => {
spyOn(gl.FilteredSearchTokenizer, 'getLastTokenObject')
.and.callFake(query => ({ value: query }));
});
it('should filter without symbol', () => {
const updatedItem = gl.DropdownUtils.filterWithSymbol('@', item, ':roo');
expect(updatedItem.droplab_hidden).toBe(false);
......@@ -49,37 +44,27 @@
expect(updatedItem.droplab_hidden).toBe(false);
});
it('should filter with invalid symbol', () => {
const updatedItem = gl.DropdownUtils.filterWithSymbol('@', item, ':#');
expect(updatedItem.droplab_hidden).toBe(true);
});
it('should filter with colon', () => {
const updatedItem = gl.DropdownUtils.filterWithSymbol('@', item, ':');
expect(updatedItem.droplab_hidden).toBe(false);
});
});
describe('filterMethod', () => {
beforeEach(() => {
spyOn(gl.FilteredSearchTokenizer, 'getLastTokenObject')
.and.callFake(query => ({ value: query }));
});
it('should filter by hint', () => {
let updatedItem = gl.DropdownUtils.filterMethod({
describe('filterHint', () => {
it('should filter', () => {
let updatedItem = gl.DropdownUtils.filterHint({
hint: 'label',
}, 'l');
expect(updatedItem.droplab_hidden).toBe(false);
updatedItem = gl.DropdownUtils.filterMethod({
updatedItem = gl.DropdownUtils.filterHint({
hint: 'label',
}, 'o');
expect(updatedItem.droplab_hidden).toBe(true);
});
it('should return droplab_hidden false when item has no hint', () => {
const updatedItem = gl.DropdownUtils.filterMethod({}, '');
const updatedItem = gl.DropdownUtils.filterHint({}, '');
expect(updatedItem.droplab_hidden).toBe(false);
});
});
......
......@@ -21,13 +21,6 @@
});
describe('input has no existing value', () => {
beforeEach(() => {
spyOn(gl.FilteredSearchTokenizer, 'processTokens')
.and.callFake(() => ({
lastToken: {},
}));
});
it('should add word', () => {
gl.FilteredSearchDropdownManager.addWordToInput('firstWord');
expect(getInputValue()).toBe('firstWord');
......@@ -61,26 +54,13 @@
value: 'roo',
};
spyOn(gl.FilteredSearchTokenizer, 'processTokens').and.callFake(() => ({
lastToken,
}));
document.querySelector('.filtered-search').value = `${lastToken.key}:${lastToken.value}`;
gl.FilteredSearchDropdownManager.addWordToInput('root');
expect(getInputValue()).toBe('author:root');
});
it('should only add the remaining characters of the word (contains space)', () => {
const lastToken = {
key: 'label',
value: 'test me',
};
spyOn(gl.FilteredSearchTokenizer, 'processTokens').and.callFake(() => ({
lastToken,
}));
document.querySelector('.filtered-search').value = `${lastToken.key}:"${lastToken.value}"`;
document.querySelector('.filtered-search').value = 'label:~"test';
gl.FilteredSearchDropdownManager.addWordToInput('~\'"test me"\'');
expect(getInputValue()).toBe('label:~\'"test me"\'');
});
......
......@@ -4,267 +4,100 @@
(() => {
describe('Filtered Search Tokenizer', () => {
describe('parseToken', () => {
it('should return key, value and symbol', () => {
const { tokenKey, tokenValue, tokenSymbol } = gl.FilteredSearchTokenizer
.parseToken('author:@user');
expect(tokenKey).toBe('author');
expect(tokenValue).toBe('@user');
expect(tokenSymbol).toBe('@');
});
it('should return value with spaces', () => {
const { tokenKey, tokenValue, tokenSymbol } = gl.FilteredSearchTokenizer
.parseToken('label:~"test me"');
expect(tokenKey).toBe('label');
expect(tokenValue).toBe('~"test me"');
expect(tokenSymbol).toBe('~');
});
});
describe('getLastTokenObject', () => {
beforeEach(() => {
spyOn(gl.FilteredSearchTokenizer, 'getLastToken').and.callFake(input => input);
});
it('should return key and value', () => {
const { key, value } = gl.FilteredSearchTokenizer.getLastTokenObject('author:@root');
expect(key).toBe('author');
expect(value).toBe(':@root');
});
describe('string without colon', () => {
let lastTokenObject;
beforeEach(() => {
lastTokenObject = gl.FilteredSearchTokenizer.getLastTokenObject('author');
});
it('should return key as an empty string', () => {
expect(lastTokenObject.key).toBe('');
});
it('should return input as value', () => {
expect(lastTokenObject.value).toBe('author');
});
});
});
describe('getLastToken', () => {
it('returns entire string when there is only one word', () => {
const lastToken = gl.FilteredSearchTokenizer.getLastToken('input');
expect(lastToken).toBe('input');
});
it('returns last word when there are multiple words', () => {
const lastToken = gl.FilteredSearchTokenizer.getLastToken('this is a few words');
expect(lastToken).toBe('words');
});
it('returns last token when there are multiple tokens', () => {
const lastToken = gl.FilteredSearchTokenizer
.getLastToken('label:fun author:root milestone:2.0');
expect(lastToken).toBe('milestone:2.0');
});
it('returns last token containing spaces escaped by double quotes', () => {
const lastToken = gl.FilteredSearchTokenizer
.getLastToken('label:fun author:root milestone:2.0 label:~"Feature Proposal"');
expect(lastToken).toBe('label:~"Feature Proposal"');
});
it('returns last token containing spaces escaped by single quotes', () => {
const lastToken = gl.FilteredSearchTokenizer
.getLastToken('label:fun author:root milestone:2.0 label:~\'Feature Proposal\'');
expect(lastToken).toBe('label:~\'Feature Proposal\'');
});
it('returns last token containing special characters', () => {
const lastToken = gl.FilteredSearchTokenizer
.getLastToken('label:fun author:root milestone:2.0 label:~!@#$%^&*()');
expect(lastToken).toBe('label:~!@#$%^&*()');
});
});
describe('processTokens', () => {
describe('input does not contain any tokens', () => {
let results;
beforeEach(() => {
results = gl.FilteredSearchTokenizer.processTokens('searchTerm');
});
it('returns input as searchToken', () => {
it('returns for input containing only search value', () => {
const results = gl.FilteredSearchTokenizer.processTokens('searchTerm');
expect(results.searchToken).toBe('searchTerm');
});
it('returns tokens as an empty array', () => {
expect(results.tokens.length).toBe(0);
});
it('returns lastToken equal to searchToken', () => {
expect(results.lastToken).toBe(results.searchToken);
});
});
describe('input contains only tokens', () => {
let results;
beforeEach(() => {
results = gl.FilteredSearchTokenizer
it('returns for input containing only tokens', () => {
const results = gl.FilteredSearchTokenizer
.processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none');
});
it('returns searchToken as an empty string', () => {
expect(results.searchToken).toBe('');
});
it('returns tokens array of size equal to the number of tokens in input', () => {
expect(results.tokens.length).toBe(4);
});
expect(results.tokens[3]).toBe(results.lastToken);
it('returns tokens array that matches the tokens found in input', () => {
expect(results.tokens[0].key).toBe('author');
expect(results.tokens[0].value).toBe('@root');
expect(results.tokens[0].wildcard).toBe(false);
expect(results.tokens[0].value).toBe('root');
expect(results.tokens[0].symbol).toBe('@');
expect(results.tokens[1].key).toBe('label');
expect(results.tokens[1].value).toBe('~Very Important');
expect(results.tokens[1].wildcard).toBe(false);
expect(results.tokens[1].value).toBe('Very Important');
expect(results.tokens[1].symbol).toBe('~');
expect(results.tokens[2].key).toBe('milestone');
expect(results.tokens[2].value).toBe('%v1.0');
expect(results.tokens[2].wildcard).toBe(false);
expect(results.tokens[2].value).toBe('v1.0');
expect(results.tokens[2].symbol).toBe('%');
expect(results.tokens[3].key).toBe('assignee');
expect(results.tokens[3].value).toBe('none');
expect(results.tokens[3].wildcard).toBe(true);
});
it('returns lastToken equal to the last object in the tokens array', () => {
expect(results.tokens[3]).toBe(results.lastToken);
});
expect(results.tokens[3].symbol).toBe('');
});
describe('input starts with search value and ends with tokens', () => {
let results;
beforeEach(() => {
results = gl.FilteredSearchTokenizer
it('returns for input starting with search value and ending with tokens', () => {
const results = gl.FilteredSearchTokenizer
.processTokens('searchTerm anotherSearchTerm milestone:none');
});
it('returns searchToken', () => {
expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
});
it('returns correct number of tokens', () => {
expect(results.tokens.length).toBe(1);
});
it('returns correct tokens', () => {
expect(results.tokens[0]).toBe(results.lastToken);
expect(results.tokens[0].key).toBe('milestone');
expect(results.tokens[0].value).toBe('none');
expect(results.tokens[0].wildcard).toBe(true);
});
it('returns lastToken', () => {
expect(results.tokens[0]).toBe(results.lastToken);
});
expect(results.tokens[0].symbol).toBe('');
});
describe('input starts with token and ends with search value', () => {
let results;
beforeEach(() => {
results = gl.FilteredSearchTokenizer
it('returns for input starting with tokens and ending with search value', () => {
const results = gl.FilteredSearchTokenizer
.processTokens('assignee:@user searchTerm');
});
it('returns searchToken', () => {
expect(results.searchToken).toBe('searchTerm');
});
it('returns correct number of tokens', () => {
expect(results.tokens.length).toBe(1);
});
it('returns correct tokens', () => {
expect(results.tokens[0].key).toBe('assignee');
expect(results.tokens[0].value).toBe('@user');
expect(results.tokens[0].wildcard).toBe(false);
});
it('returns lastToken as the searchTerm', () => {
expect(results.tokens[0].value).toBe('user');
expect(results.tokens[0].symbol).toBe('@');
expect(results.lastToken).toBe(results.searchToken);
});
});
describe('input contains search value wrapped between tokens', () => {
let results;
beforeEach(() => {
results = gl.FilteredSearchTokenizer
it('returns for input containing search value wrapped between tokens', () => {
const results = gl.FilteredSearchTokenizer
.processTokens('author:@root label:~"Won\'t fix" searchTerm anotherSearchTerm milestone:none');
});
it('returns searchToken', () => {
expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
});
it('returns correct number of tokens', () => {
expect(results.tokens.length).toBe(3);
});
expect(results.tokens[2]).toBe(results.lastToken);
it('returns tokens array in the order it was processed', () => {
expect(results.tokens[0].key).toBe('author');
expect(results.tokens[0].value).toBe('@root');
expect(results.tokens[0].wildcard).toBe(false);
expect(results.tokens[0].value).toBe('root');
expect(results.tokens[0].symbol).toBe('@');
expect(results.tokens[1].key).toBe('label');
expect(results.tokens[1].value).toBe('~Won\'t fix');
expect(results.tokens[1].wildcard).toBe(false);
expect(results.tokens[1].value).toBe('Won\'t fix');
expect(results.tokens[1].symbol).toBe('~');
expect(results.tokens[2].key).toBe('milestone');
expect(results.tokens[2].value).toBe('none');
expect(results.tokens[2].wildcard).toBe(true);
});
it('returns lastToken', () => {
expect(results.tokens[2]).toBe(results.lastToken);
});
expect(results.tokens[2].symbol).toBe('');
});
describe('input search value is spaced in between tokens', () => {
let results;
beforeEach(() => {
results = gl.FilteredSearchTokenizer
it('returns for input containing search value in between tokens', () => {
const results = gl.FilteredSearchTokenizer
.processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing');
});
it('returns searchToken', () => {
expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
});
it('returns correct number of tokens', () => {
expect(results.tokens.length).toBe(3);
});
expect(results.tokens[2]).toBe(results.lastToken);
it('returns tokens array in the order it was processed', () => {
expect(results.tokens[0].key).toBe('author');
expect(results.tokens[0].value).toBe('@root');
expect(results.tokens[0].wildcard).toBe(false);
expect(results.tokens[0].value).toBe('root');
expect(results.tokens[0].symbol).toBe('@');
expect(results.tokens[1].key).toBe('assignee');
expect(results.tokens[1].value).toBe('none');
expect(results.tokens[1].wildcard).toBe(true);
expect(results.tokens[1].symbol).toBe('');
expect(results.tokens[2].key).toBe('label');
expect(results.tokens[2].value).toBe('~Doing');
expect(results.tokens[2].wildcard).toBe(false);
});
it('returns lastToken', () => {
expect(results.tokens[2]).toBe(results.lastToken);
});
expect(results.tokens[2].value).toBe('Doing');
expect(results.tokens[2].symbol).toBe('~');
});
});
});
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment