Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
G
gitlab-ce
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
1
Merge Requests
1
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
nexedi
gitlab-ce
Commits
89b50aa6
Commit
89b50aa6
authored
May 22, 2017
by
Clement Ho
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Fix karma specs
parent
f644cac6
Changes
9
Hide whitespace changes
Inline
Side-by-side
Showing
9 changed files
with
42 additions
and
22 deletions
+42
-22
app/assets/javascripts/filtered_search/dropdown_utils.js
app/assets/javascripts/filtered_search/dropdown_utils.js
+2
-2
app/assets/javascripts/filtered_search/filtered_search_manager.js
...ts/javascripts/filtered_search/filtered_search_manager.js
+2
-1
app/assets/javascripts/filtered_search/filtered_search_token_keys_with_weights.js
...iltered_search/filtered_search_token_keys_with_weights.js
+3
-1
spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js
...earch/components/recent_searches_dropdown_content_spec.js
+4
-0
spec/javascripts/filtered_search/dropdown_user_spec.js
spec/javascripts/filtered_search/dropdown_user_spec.js
+1
-1
spec/javascripts/filtered_search/dropdown_utils_spec.js
spec/javascripts/filtered_search/dropdown_utils_spec.js
+15
-6
spec/javascripts/filtered_search/filtered_search_manager_spec.js
...vascripts/filtered_search/filtered_search_manager_spec.js
+1
-0
spec/javascripts/filtered_search/filtered_search_token_keys_with_weights_spec.js
...ed_search/filtered_search_token_keys_with_weights_spec.js
+2
-1
spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
...scripts/filtered_search/filtered_search_tokenizer_spec.js
+12
-10
No files found.
app/assets/javascripts/filtered_search/dropdown_utils.js
View file @
89b50aa6
...
...
@@ -50,8 +50,8 @@ class DropdownUtils {
return
updatedItem
;
}
static
filterHint
(
options
,
item
)
{
const
{
input
,
allowedKeys
}
=
options
;
static
filterHint
(
config
,
item
)
{
const
{
input
,
allowedKeys
}
=
config
;
const
updatedItem
=
item
;
const
searchInput
=
gl
.
DropdownUtils
.
getSearchQuery
(
input
);
const
{
lastToken
,
tokens
}
=
...
...
app/assets/javascripts/filtered_search/filtered_search_manager.js
View file @
89b50aa6
...
...
@@ -453,7 +453,8 @@ class FilteredSearchManager {
this
.
saveCurrentSearchQuery
();
const
{
tokens
,
searchToken
}
=
this
.
tokenizer
.
processTokens
(
searchQuery
,
this
.
filteredSearchTokenKeys
.
get
());
=
this
.
tokenizer
.
processTokens
(
searchQuery
,
this
.
filteredSearchTokenKeys
.
getKeys
());
const
currentState
=
gl
.
utils
.
getParameterByName
(
'
state
'
)
||
'
opened
'
;
paths
.
push
(
`state=
${
currentState
}
`
);
...
...
app/assets/javascripts/filtered_search/filtered_search_token_keys_with_weights.js
View file @
89b50aa6
...
...
@@ -54,10 +54,12 @@ class FilteredSearchTokenKeysWithWeights extends gl.FilteredSearchTokenKeys {
const
alternativeTokenKeys
=
FilteredSearchTokenKeysWithWeights
.
getAlternatives
();
const
tokenKeysWithAlternative
=
tokenKeys
.
concat
(
alternativeTokenKeys
);
console
.
log
(
tokenKeysWithAlternative
)
return
tokenKeysWithAlternative
.
find
((
tokenKey
)
=>
{
let
tokenKeyParam
=
tokenKey
.
key
;
if
(
tokenKey
.
param
)
{
if
(
tokenKey
.
param
!==
'
weight
'
)
{
tokenKeyParam
+=
`_
${
tokenKey
.
param
}
`
;
}
...
...
spec/javascripts/filtered_search/components/recent_searches_dropdown_content_spec.js
View file @
89b50aa6
...
...
@@ -2,6 +2,8 @@ import Vue from 'vue';
import
eventHub
from
'
~/filtered_search/event_hub
'
;
import
RecentSearchesDropdownContent
from
'
~/filtered_search/components/recent_searches_dropdown_content
'
;
require
(
'
~/filtered_search/filtered_search_token_keys
'
);
const
createComponent
=
(
propsData
)
=>
{
const
Component
=
Vue
.
extend
(
RecentSearchesDropdownContent
);
...
...
@@ -17,12 +19,14 @@ const trimMarkupWhitespace = text => text.replace(/(\n|\s)+/gm, ' ').trim();
describe
(
'
RecentSearchesDropdownContent
'
,
()
=>
{
const
propsDataWithoutItems
=
{
items
:
[],
allowedKeys
:
gl
.
FilteredSearchTokenKeys
.
getKeys
(),
};
const
propsDataWithItems
=
{
items
:
[
'
foo
'
,
'
author:@root label:~foo bar
'
,
],
allowedKeys
:
gl
.
FilteredSearchTokenKeys
.
getKeys
(),
};
let
vm
;
...
...
spec/javascripts/filtered_search/dropdown_user_spec.js
View file @
89b50aa6
...
...
@@ -12,7 +12,7 @@ describe('Dropdown User', () => {
spyOn
(
gl
.
DropdownUser
.
prototype
,
'
getProjectId
'
).
and
.
callFake
(()
=>
{});
spyOn
(
gl
.
DropdownUtils
,
'
getSearchInput
'
).
and
.
callFake
(()
=>
{});
dropdownUser
=
new
gl
.
DropdownUser
();
dropdownUser
=
new
gl
.
DropdownUser
(
null
,
null
,
null
,
gl
.
FilteredSearchTokenKeys
);
});
it
(
'
should not return the double quote found in value
'
,
()
=>
{
...
...
spec/javascripts/filtered_search/dropdown_utils_spec.js
View file @
89b50aa6
...
...
@@ -122,6 +122,7 @@ describe('Dropdown Utils', () => {
describe
(
'
filterHint
'
,
()
=>
{
let
input
;
let
allowedKeys
;
beforeEach
(()
=>
{
setFixtures
(
`
...
...
@@ -133,30 +134,38 @@ describe('Dropdown Utils', () => {
`
);
input
=
document
.
getElementById
(
'
test
'
);
allowedKeys
=
gl
.
FilteredSearchTokenKeys
.
getKeys
();
});
function
config
()
{
return
{
input
,
allowedKeys
,
}
}
it
(
'
should filter
'
,
()
=>
{
input
.
value
=
'
l
'
;
let
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
input
,
{
let
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
config
()
,
{
hint
:
'
label
'
,
});
expect
(
updatedItem
.
droplab_hidden
).
toBe
(
false
);
input
.
value
=
'
o
'
;
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
input
,
{
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
config
()
,
{
hint
:
'
label
'
,
});
expect
(
updatedItem
.
droplab_hidden
).
toBe
(
true
);
});
it
(
'
should return droplab_hidden false when item has no hint
'
,
()
=>
{
const
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
input
,
{},
''
);
const
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
config
()
,
{},
''
);
expect
(
updatedItem
.
droplab_hidden
).
toBe
(
false
);
});
it
(
'
should allow multiple if item.type is array
'
,
()
=>
{
input
.
value
=
'
label:~first la
'
;
const
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
input
,
{
const
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
config
()
,
{
hint
:
'
label
'
,
type
:
'
array
'
,
});
...
...
@@ -165,12 +174,12 @@ describe('Dropdown Utils', () => {
it
(
'
should prevent multiple if item.type is not array
'
,
()
=>
{
input
.
value
=
'
milestone:~first mile
'
;
let
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
input
,
{
let
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
config
()
,
{
hint
:
'
milestone
'
,
});
expect
(
updatedItem
.
droplab_hidden
).
toBe
(
true
);
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
input
,
{
updatedItem
=
gl
.
DropdownUtils
.
filterHint
(
config
()
,
{
hint
:
'
milestone
'
,
type
:
'
string
'
,
});
...
...
spec/javascripts/filtered_search/filtered_search_manager_spec.js
View file @
89b50aa6
...
...
@@ -81,6 +81,7 @@ describe('Filtered Search Manager', () => {
expect
(
RecentSearchesService
.
isAvailable
).
toHaveBeenCalled
();
expect
(
recentSearchesStoreSrc
.
default
).
toHaveBeenCalledWith
({
isLocalStorageAvailable
,
allowedKeys
:
gl
.
FilteredSearchTokenKeys
.
getKeys
(),
});
});
...
...
spec/javascripts/filtered_search/filtered_search_token_keys_with_weights_spec.js
View file @
89b50aa6
...
...
@@ -6,8 +6,9 @@ require('~/filtered_search/filtered_search_token_keys_with_weights');
const
weightTokenKey
=
{
key
:
'
weight
'
,
type
:
'
string
'
,
param
:
''
,
param
:
'
weight
'
,
symbol
:
''
,
icon
:
'
balance-scale
'
,
};
describe
(
'
get
'
,
()
=>
{
...
...
spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
View file @
89b50aa6
...
...
@@ -3,9 +3,11 @@ require('~/filtered_search/filtered_search_token_keys');
require
(
'
~/filtered_search/filtered_search_tokenizer
'
);
describe
(
'
Filtered Search Tokenizer
'
,
()
=>
{
let
allowedKeys
=
gl
.
FilteredSearchTokenKeys
.
getKeys
();
describe
(
'
processTokens
'
,
()
=>
{
it
(
'
returns for input containing only search value
'
,
()
=>
{
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
searchTerm
'
);
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
searchTerm
'
,
allowedKeys
);
expect
(
results
.
searchToken
).
toBe
(
'
searchTerm
'
);
expect
(
results
.
tokens
.
length
).
toBe
(
0
);
expect
(
results
.
lastToken
).
toBe
(
results
.
searchToken
);
...
...
@@ -13,7 +15,7 @@ describe('Filtered Search Tokenizer', () => {
it
(
'
returns for input containing only tokens
'
,
()
=>
{
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
author:@root label:~"Very Important" milestone:%v1.0 assignee:none
'
);
.
processTokens
(
'
author:@root label:~"Very Important" milestone:%v1.0 assignee:none
'
,
allowedKeys
);
expect
(
results
.
searchToken
).
toBe
(
''
);
expect
(
results
.
tokens
.
length
).
toBe
(
4
);
expect
(
results
.
tokens
[
3
]).
toBe
(
results
.
lastToken
);
...
...
@@ -37,7 +39,7 @@ describe('Filtered Search Tokenizer', () => {
it
(
'
returns for input starting with search value and ending with tokens
'
,
()
=>
{
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
searchTerm anotherSearchTerm milestone:none
'
);
.
processTokens
(
'
searchTerm anotherSearchTerm milestone:none
'
,
allowedKeys
);
expect
(
results
.
searchToken
).
toBe
(
'
searchTerm anotherSearchTerm
'
);
expect
(
results
.
tokens
.
length
).
toBe
(
1
);
expect
(
results
.
tokens
[
0
]).
toBe
(
results
.
lastToken
);
...
...
@@ -48,7 +50,7 @@ describe('Filtered Search Tokenizer', () => {
it
(
'
returns for input starting with tokens and ending with search value
'
,
()
=>
{
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
assignee:@user searchTerm
'
);
.
processTokens
(
'
assignee:@user searchTerm
'
,
allowedKeys
);
expect
(
results
.
searchToken
).
toBe
(
'
searchTerm
'
);
expect
(
results
.
tokens
.
length
).
toBe
(
1
);
...
...
@@ -60,7 +62,7 @@ describe('Filtered Search Tokenizer', () => {
it
(
'
returns for input containing search value wrapped between tokens
'
,
()
=>
{
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
author:@root label:~"Won
\'
t fix" searchTerm anotherSearchTerm milestone:none
'
);
.
processTokens
(
'
author:@root label:~"Won
\'
t fix" searchTerm anotherSearchTerm milestone:none
'
,
allowedKeys
);
expect
(
results
.
searchToken
).
toBe
(
'
searchTerm anotherSearchTerm
'
);
expect
(
results
.
tokens
.
length
).
toBe
(
3
);
...
...
@@ -81,7 +83,7 @@ describe('Filtered Search Tokenizer', () => {
it
(
'
returns for input containing search value in between tokens
'
,
()
=>
{
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
author:@root searchTerm assignee:none anotherSearchTerm label:~Doing
'
);
.
processTokens
(
'
author:@root searchTerm assignee:none anotherSearchTerm label:~Doing
'
,
allowedKeys
);
expect
(
results
.
searchToken
).
toBe
(
'
searchTerm anotherSearchTerm
'
);
expect
(
results
.
tokens
.
length
).
toBe
(
3
);
expect
(
results
.
tokens
[
2
]).
toBe
(
results
.
lastToken
);
...
...
@@ -100,14 +102,14 @@ describe('Filtered Search Tokenizer', () => {
});
it
(
'
returns search value for invalid tokens
'
,
()
=>
{
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
fake:token
'
);
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
fake:token
'
,
allowedKeys
);
expect
(
results
.
lastToken
).
toBe
(
'
fake:token
'
);
expect
(
results
.
searchToken
).
toBe
(
'
fake:token
'
);
expect
(
results
.
tokens
.
length
).
toEqual
(
0
);
});
it
(
'
returns search value and token for mix of valid and invalid tokens
'
,
()
=>
{
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
label:real fake:token
'
);
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
label:real fake:token
'
,
allowedKeys
);
expect
(
results
.
tokens
.
length
).
toEqual
(
1
);
expect
(
results
.
tokens
[
0
].
key
).
toBe
(
'
label
'
);
expect
(
results
.
tokens
[
0
].
value
).
toBe
(
'
real
'
);
...
...
@@ -117,13 +119,13 @@ describe('Filtered Search Tokenizer', () => {
});
it
(
'
returns search value for invalid symbols
'
,
()
=>
{
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
std::includes
'
);
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
std::includes
'
,
allowedKeys
);
expect
(
results
.
lastToken
).
toBe
(
'
std::includes
'
);
expect
(
results
.
searchToken
).
toBe
(
'
std::includes
'
);
});
it
(
'
removes duplicated values
'
,
()
=>
{
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
label:~foo label:~foo
'
);
const
results
=
gl
.
FilteredSearchTokenizer
.
processTokens
(
'
label:~foo label:~foo
'
,
allowedKeys
);
expect
(
results
.
tokens
.
length
).
toBe
(
1
);
expect
(
results
.
tokens
[
0
].
key
).
toBe
(
'
label
'
);
expect
(
results
.
tokens
[
0
].
value
).
toBe
(
'
foo
'
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment