Commit 8ecc2117 authored by Clement Ho's avatar Clement Ho

Refactor validTokenKeys

parent 3d670422
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
((global) => { ((global) => {
const validTokenKeys = [{
key: 'author',
type: 'string',
param: 'username',
symbol: '@',
}, {
key: 'assignee',
type: 'string',
param: 'username',
symbol: '@',
conditions: [{
keyword: 'none',
url: 'assignee_id=0',
}],
}, {
key: 'milestone',
type: 'string',
param: 'title',
symbol: '%',
conditions: [{
keyword: 'none',
url: 'milestone_title=No+Milestone',
}, {
keyword: 'upcoming',
url: 'milestone_title=%23upcoming',
}],
}, {
key: 'label',
type: 'array',
param: 'name[]',
symbol: '~',
conditions: [{
keyword: 'none',
url: 'label_name[]=No+Label',
}],
}];
function clearSearch(e) { function clearSearch(e) {
e.stopPropagation(); e.stopPropagation();
e.preventDefault(); e.preventDefault();
...@@ -66,9 +29,9 @@ ...@@ -66,9 +29,9 @@
const key = decodeURIComponent(split[0]); const key = decodeURIComponent(split[0]);
const value = split[1]; const value = split[1];
// Check if it matches edge conditions listed in validTokenKeys // Check if it matches edge conditions listed in gl.FilteredSearchTokenKeys.get()
let conditionIndex = 0; let conditionIndex = 0;
const validCondition = validTokenKeys const validCondition = gl.FilteredSearchTokenKeys.get()
.filter(v => v.conditions && v.conditions.filter((c, index) => { .filter(v => v.conditions && v.conditions.filter((c, index) => {
if (c.url === p) { if (c.url === p) {
conditionIndex = index; conditionIndex = index;
...@@ -82,7 +45,7 @@ ...@@ -82,7 +45,7 @@
// Sanitize value since URL converts spaces into + // Sanitize value since URL converts spaces into +
// Replace before decode so that we know what was originally + versus the encoded + // Replace before decode so that we know what was originally + versus the encoded +
const sanitizedValue = value ? decodeURIComponent(value.replace(/[+]/g, ' ')) : value; const sanitizedValue = value ? decodeURIComponent(value.replace(/[+]/g, ' ')) : value;
const match = validTokenKeys.filter(t => key === `${t.key}_${t.param}`)[0]; const match = gl.FilteredSearchTokenKeys.get().filter(t => key === `${t.key}_${t.param}`)[0];
if (match) { if (match) {
const sanitizedKey = key.slice(0, key.indexOf('_')); const sanitizedKey = key.slice(0, key.indexOf('_'));
...@@ -116,7 +79,7 @@ ...@@ -116,7 +79,7 @@
class FilteredSearchManager { class FilteredSearchManager {
constructor() { constructor() {
this.tokenizer = new gl.FilteredSearchTokenizer(validTokenKeys); this.tokenizer = gl.FilteredSearchTokenizer;
this.bindEvents(); this.bindEvents();
loadSearchParamsFromURL(); loadSearchParamsFromURL();
} }
...@@ -131,6 +94,7 @@ ...@@ -131,6 +94,7 @@
document.querySelector('.clear-search').addEventListener('click', clearSearch); document.querySelector('.clear-search').addEventListener('click', clearSearch);
} }
// TODO: This is only used for testing, remove when going to PRO
processInput(e) { processInput(e) {
const input = e.target.value; const input = e.target.value;
this.tokenizer.processTokens(input); this.tokenizer.processTokens(input);
...@@ -155,8 +119,7 @@ ...@@ -155,8 +119,7 @@
const defaultState = 'opened'; const defaultState = 'opened';
let currentState = defaultState; let currentState = defaultState;
const tokens = this.tokenizer.getTokens(); const { tokens, searchToken } = this.tokenizer.processTokens(document.querySelector('.filtered-search').value);
const searchToken = this.tokenizer.getSearchToken();
if (stateIndex !== -1) { if (stateIndex !== -1) {
const remaining = currentPath.slice(stateIndex + 6); const remaining = currentPath.slice(stateIndex + 6);
...@@ -167,7 +130,7 @@ ...@@ -167,7 +130,7 @@
path += `&state=${currentState}`; path += `&state=${currentState}`;
tokens.forEach((token) => { tokens.forEach((token) => {
const match = validTokenKeys.filter(t => t.key === token.key)[0]; const match = gl.FilteredSearchTokenKeys.get().filter(t => t.key === token.key)[0];
let tokenPath = ''; let tokenPath = '';
if (token.wildcard && match.conditions) { if (token.wildcard && match.conditions) {
......
/* eslint-disable no-param-reassign */
((global) => {
class FilteredSearchTokenKeys {
static get() {
return [{
key: 'author',
type: 'string',
param: 'username',
symbol: '@',
}, {
key: 'assignee',
type: 'string',
param: 'username',
symbol: '@',
conditions: [{
keyword: 'none',
url: 'assignee_id=0',
}],
}, {
key: 'milestone',
type: 'string',
param: 'title',
symbol: '%',
conditions: [{
keyword: 'none',
url: 'milestone_title=No+Milestone',
}, {
keyword: 'upcoming',
url: 'milestone_title=%23upcoming',
}],
}, {
key: 'label',
type: 'array',
param: 'name[]',
symbol: '~',
conditions: [{
keyword: 'none',
url: 'label_name[]=No+Label',
}],
}];
}
}
global.FilteredSearchTokenKeys = FilteredSearchTokenKeys;
})(window.gl || (window.gl = {}));
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
((global) => { ((global) => {
class FilteredSearchTokenizer { class FilteredSearchTokenizer {
constructor(validTokenKeys) { // TODO: Remove when going to pro
this.validTokenKeys = validTokenKeys; static printTokens(tokens, searchToken, lastToken) {
this.resetTokens();
}
getTokens() {
return this.tokens;
}
getSearchToken() {
return this.searchToken;
}
resetTokens() {
this.tokens = [];
this.searchToken = '';
}
printTokens() {
console.log('tokens:'); console.log('tokens:');
this.tokens.forEach(token => console.log(token)); tokens.forEach(token => console.log(token));
console.log(`search: ${this.searchToken}`); console.log(`search: ${searchToken}`);
console.log('last token:');
console.log(lastToken);
} }
processTokens(input) { static processTokens(input) {
// Re-calculate tokens let tokens = [];
this.resetTokens(); let searchToken = '';
let lastToken = '';
const validTokenKeys = gl.FilteredSearchTokenKeys.get();
const inputs = input.split(' '); const inputs = input.split(' ');
let searchTerms = ''; let searchTerms = '';
...@@ -36,16 +23,17 @@ ...@@ -36,16 +23,17 @@
inputs.forEach((i) => { inputs.forEach((i) => {
if (incompleteToken) { if (incompleteToken) {
const prevToken = this.tokens.last(); const prevToken = tokens.last();
prevToken.value += ` ${i}`; prevToken.value += ` ${i}`;
// Remove last quotation // Remove last quotation
const lastQuotationRegex = new RegExp(lastQuotation, 'g'); const lastQuotationRegex = new RegExp(lastQuotation, 'g');
prevToken.value = prevToken.value.replace(lastQuotationRegex, ''); prevToken.value = prevToken.value.replace(lastQuotationRegex, '');
this.tokens[this.tokens.length - 1] = prevToken; tokens[tokens.length - 1] = prevToken;
// Check to see if this quotation completes the token value // Check to see if this quotation completes the token value
if (i.indexOf(lastQuotation)) { if (i.indexOf(lastQuotation)) {
lastToken = tokens.last();
incompleteToken = !incompleteToken; incompleteToken = !incompleteToken;
} }
...@@ -59,8 +47,8 @@ ...@@ -59,8 +47,8 @@
const tokenValue = i.slice(colonIndex + 1); const tokenValue = i.slice(colonIndex + 1);
const tokenSymbol = tokenValue[0]; const tokenSymbol = tokenValue[0];
console.log(tokenSymbol) console.log(tokenSymbol)
const keyMatch = this.validTokenKeys.filter(v => v.key === tokenKey)[0]; const keyMatch = validTokenKeys.filter(v => v.key === tokenKey)[0];
const symbolMatch = this.validTokenKeys.filter(v => v.symbol === tokenSymbol)[0]; const symbolMatch = validTokenKeys.filter(v => v.symbol === tokenSymbol)[0];
const doubleQuoteIndex = tokenValue.indexOf('"'); const doubleQuoteIndex = tokenValue.indexOf('"');
const singleQuoteIndex = tokenValue.indexOf('\''); const singleQuoteIndex = tokenValue.indexOf('\'');
...@@ -81,11 +69,12 @@ ...@@ -81,11 +69,12 @@
} }
if (keyMatch && tokenValue.length > 0) { if (keyMatch && tokenValue.length > 0) {
this.tokens.push({ tokens.push({
key: keyMatch.key, key: keyMatch.key,
value: tokenValue, value: tokenValue,
wildcard: symbolMatch ? false : true, wildcard: symbolMatch ? false : true,
}); });
lastToken = tokens.last();
return; return;
} }
...@@ -93,10 +82,19 @@ ...@@ -93,10 +82,19 @@
// Add space for next term // Add space for next term
searchTerms += `${i} `; searchTerms += `${i} `;
lastToken = i;
}, this); }, this);
this.searchToken = searchTerms.trim(); searchToken = searchTerms.trim();
this.printTokens();
// TODO: Remove when going to PRO
gl.FilteredSearchTokenizer.printTokens(tokens, searchToken, lastToken);
return {
tokens,
searchToken,
lastToken,
};
} }
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment