Commit d28595e3 authored by Mike Greiling's avatar Mike Greiling

Merge branch...

Merge branch '213154-re-submitting-an-epic-filter-on-the-issues-search-bar-prepends-an-before-the-id' into 'master'

Fix failing filtered search when re-submitting epic tokens

Closes #213154

See merge request gitlab-org/gitlab!35205
parents e15045dd 1af00903
...@@ -6,7 +6,7 @@ export default class FilteredSearchTokenizer { ...@@ -6,7 +6,7 @@ export default class FilteredSearchTokenizer {
// Values that start with a double quote must end in a double quote (same for single) // Values that start with a double quote must end in a double quote (same for single)
const tokenRegex = new RegExp( const tokenRegex = new RegExp(
`(${allowedKeys.join('|')}):(=|!=)?([~%@]?)(?:('[^']*'{0,1})|("[^"]*"{0,1})|(\\S+))`, `(${allowedKeys.join('|')}):(=|!=)?([~%@&]?)(?:('[^']*'{0,1})|("[^"]*"{0,1})|(\\S+))`,
'g', 'g',
); );
const tokens = []; const tokens = [];
...@@ -15,17 +15,19 @@ export default class FilteredSearchTokenizer { ...@@ -15,17 +15,19 @@ export default class FilteredSearchTokenizer {
const searchToken = const searchToken =
input input
.replace(tokenRegex, (match, key, operator, symbol, v1, v2, v3) => { .replace(tokenRegex, (match, key, operator, symbol, v1, v2, v3) => {
const prefixedTokens = ['~', '%', '@', '&'];
const comparisonTokens = ['!=', '='];
let tokenValue = v1 || v2 || v3; let tokenValue = v1 || v2 || v3;
let tokenSymbol = symbol; let tokenSymbol = symbol;
let tokenIndex = ''; let tokenIndex = '';
let tokenOperator = operator; let tokenOperator = operator;
if (tokenValue === '~' || tokenValue === '%' || tokenValue === '@') { if (prefixedTokens.includes(tokenValue)) {
tokenSymbol = tokenValue; tokenSymbol = tokenValue;
tokenValue = ''; tokenValue = '';
} }
if (tokenValue === '!=' || tokenValue === '=') { if (comparisonTokens.includes(tokenValue)) {
tokenOperator = tokenValue; tokenOperator = tokenValue;
tokenValue = ''; tokenValue = '';
} }
......
---
title: Fix failing filtered search when re-submitting epic tokens
merge_request: 35205
author:
type: fixed
import IssuableFilteredSearchTokenKeys from 'ee/filtered_search/issuable_filtered_search_token_keys';
import FilteredSearchTokenizer from '~/filtered_search/filtered_search_tokenizer';
describe('Filtered Search Tokenizer', () => {
const allowedKeys = IssuableFilteredSearchTokenKeys.getKeys();
describe('processTokens', () => {
describe('epic tokens', () => {
it.each`
searchQuery | operator
${'epic:=&36'} | ${'='}
${'epic:!=&36'} | ${'!='}
`('returns for input containing $searchQuery', ({ searchQuery, operator }) => {
const results = FilteredSearchTokenizer.processTokens(searchQuery, allowedKeys);
expect(results.searchToken).toBe('');
expect(results.tokens).toHaveLength(1);
expect(results.tokens[0].key).toBe('epic');
expect(results.tokens[0].operator).toBe(operator);
expect(results.tokens[0].symbol).toBe('&');
expect(results.tokens[0].value).toBe('36');
});
it('returns for input containing string values', () => {
const results = FilteredSearchTokenizer.processTokens('epic:=any', allowedKeys);
expect(results.searchToken).toBe('');
expect(results.tokens).toHaveLength(1);
expect(results.tokens[0].key).toBe('epic');
expect(results.tokens[0].operator).toBe('=');
expect(results.tokens[0].symbol).toBe('');
expect(results.tokens[0].value).toBe('any');
});
});
});
});
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment