Commit 4e375367 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 99ddca0d
<script> <script>
import { mapActions, mapState } from 'vuex'; import { mapActions, mapState } from 'vuex';
import _ from 'underscore'; import _ from 'underscore';
import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui'; import { GlLoadingIcon } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
import Item from './item.vue'; import Item from './item.vue';
export default { export default {
......
<script> <script>
import $ from 'jquery'; import $ from 'jquery';
import { mapActions, mapState } from 'vuex'; import { mapActions, mapState } from 'vuex';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
import { GlLoadingIcon } from '@gitlab/ui'; import { GlLoadingIcon } from '@gitlab/ui';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
export default { export default {
components: { components: {
......
<script> <script>
import { mapActions, mapGetters, mapState } from 'vuex'; import { mapActions, mapGetters, mapState } from 'vuex';
import Icon from '~/vue_shared/components/icon.vue';
import { GlSkeletonLoading } from '@gitlab/ui'; import { GlSkeletonLoading } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
import FileRow from '~/vue_shared/components/file_row.vue'; import FileRow from '~/vue_shared/components/file_row.vue';
import NavDropdown from './nav_dropdown.vue'; import NavDropdown from './nav_dropdown.vue';
import FileRowExtra from './file_row_extra.vue'; import FileRowExtra from './file_row_extra.vue';
......
<script> <script>
import { mapActions, mapState } from 'vuex'; import { mapActions, mapState } from 'vuex';
import _ from 'underscore'; import _ from 'underscore';
import { GlLoadingIcon } from '@gitlab/ui';
import { __ } from '~/locale'; import { __ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue'; import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import Item from './item.vue'; import Item from './item.vue';
import TokenedInput from '../shared/tokened_input.vue'; import TokenedInput from '../shared/tokened_input.vue';
......
<script> <script>
import $ from 'jquery'; import $ from 'jquery';
import { mapActions, mapState, mapGetters } from 'vuex';
import flash from '~/flash'; import flash from '~/flash';
import { __, sprintf, s__ } from '~/locale'; import { __, sprintf, s__ } from '~/locale';
import { mapActions, mapState, mapGetters } from 'vuex';
import DeprecatedModal2 from '~/vue_shared/components/deprecated_modal_2.vue'; import DeprecatedModal2 from '~/vue_shared/components/deprecated_modal_2.vue';
import { modalTypes } from '../../constants'; import { modalTypes } from '../../constants';
......
<script> <script>
import { listen } from 'codesandbox-api'; import { listen } from 'codesandbox-api';
import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui'; import { GlLoadingIcon } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
export default { export default {
components: { components: {
......
<script> <script>
import { __, sprintf } from '~/locale';
import { mapActions } from 'vuex'; import { mapActions } from 'vuex';
import { __, sprintf } from '~/locale';
import FileIcon from '~/vue_shared/components/file_icon.vue'; import FileIcon from '~/vue_shared/components/file_icon.vue';
import Icon from '~/vue_shared/components/icon.vue'; import Icon from '~/vue_shared/components/icon.vue';
......
import $ from 'jquery'; import $ from 'jquery';
import Vue from 'vue'; import Vue from 'vue';
import _ from 'underscore';
import { __, sprintf } from '~/locale'; import { __, sprintf } from '~/locale';
import { visitUrl } from '~/lib/utils/url_utility'; import { visitUrl } from '~/lib/utils/url_utility';
import flash from '~/flash'; import flash from '~/flash';
import _ from 'underscore';
import * as types from './mutation_types'; import * as types from './mutation_types';
import { decorateFiles } from '../lib/files'; import { decorateFiles } from '../lib/files';
import { stageKeys } from '../constants'; import { stageKeys } from '../constants';
......
...@@ -246,7 +246,7 @@ class Commit ...@@ -246,7 +246,7 @@ class Commit
def lazy_author def lazy_author
BatchLoader.for(author_email.downcase).batch do |emails, loader| BatchLoader.for(author_email.downcase).batch do |emails, loader|
users = User.by_any_email(emails).includes(:emails) users = User.by_any_email(emails, confirmed: true).includes(:emails)
emails.each do |email| emails.each do |email|
user = users.find { |u| u.any_email?(email) } user = users.find { |u| u.any_email?(email) }
...@@ -263,8 +263,8 @@ class Commit ...@@ -263,8 +263,8 @@ class Commit
end end
request_cache(:author) { author_email.downcase } request_cache(:author) { author_email.downcase }
def committer def committer(confirmed: true)
@committer ||= User.find_by_any_email(committer_email) @committer ||= User.find_by_any_email(committer_email, confirmed: confirmed)
end end
def parents def parents
......
---
title: Add nonunique indexes to Labels
merge_request: 21230
author:
type: fixed
---
title: Do not attribute unverified commit e-mails to GitLab users
merge_request: 21214
author:
type: fixed
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddLabelProjectGroupPartialIndexes < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
PROJECT_AND_TITLE = [:project_id, :title]
GROUP_AND_TITLE = [:group_id, :title]
def up
add_concurrent_index :labels, PROJECT_AND_TITLE, unique: false, where: "labels.group_id = null"
add_concurrent_index :labels, GROUP_AND_TITLE, unique: false, where: "labels.project_id = null"
end
def down
remove_concurrent_index :labels, PROJECT_AND_TITLE
remove_concurrent_index :labels, GROUP_AND_TITLE
end
end
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2019_12_04_070713) do ActiveRecord::Schema.define(version: 2019_12_04_093410) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm" enable_extension "pg_trgm"
...@@ -2228,6 +2228,8 @@ ActiveRecord::Schema.define(version: 2019_12_04_070713) do ...@@ -2228,6 +2228,8 @@ ActiveRecord::Schema.define(version: 2019_12_04_070713) do
t.integer "group_id" t.integer "group_id"
t.integer "cached_markdown_version" t.integer "cached_markdown_version"
t.index ["group_id", "project_id", "title"], name: "index_labels_on_group_id_and_project_id_and_title", unique: true t.index ["group_id", "project_id", "title"], name: "index_labels_on_group_id_and_project_id_and_title", unique: true
t.index ["group_id", "title"], name: "index_labels_on_group_id_and_title", where: "(project_id = NULL::integer)"
t.index ["project_id", "title"], name: "index_labels_on_project_id_and_title", where: "(group_id = NULL::integer)"
t.index ["project_id"], name: "index_labels_on_project_id" t.index ["project_id"], name: "index_labels_on_project_id"
t.index ["template"], name: "index_labels_on_template", where: "template" t.index ["template"], name: "index_labels_on_template", where: "template"
t.index ["title"], name: "index_labels_on_title" t.index ["title"], name: "index_labels_on_title"
......
...@@ -61,7 +61,7 @@ describe 'Member autocomplete', :js do ...@@ -61,7 +61,7 @@ describe 'Member autocomplete', :js do
before do before do
allow(User).to receive(:find_by_any_email) allow(User).to receive(:find_by_any_email)
.with(noteable.author_email.downcase).and_return(author) .with(noteable.author_email.downcase, confirmed: true).and_return(author)
visit project_commit_path(project, noteable) visit project_commit_path(project, noteable)
end end
......
...@@ -76,16 +76,23 @@ describe 'User browses commits' do ...@@ -76,16 +76,23 @@ describe 'User browses commits' do
end end
context 'secondary email' do context 'secondary email' do
let(:user) { create(:user) }
it 'finds a commit by a secondary email' do it 'finds a commit by a secondary email' do
user = create(:email, :confirmed, user: user, email: 'dmitriy.zaporozhets@gmail.com')
create(:user) do |user|
create(:email, { user: user, email: 'dmitriy.zaporozhets@gmail.com' })
end
visit(project_commit_path(project, sample_commit.parent_id)) visit(project_commit_path(project, sample_commit.parent_id))
check_author_link(sample_commit.author_email, user) check_author_link(sample_commit.author_email, user)
end end
it 'links to an unverified e-mail address instead of the user' do
create(:email, user: user, email: 'dmitriy.zaporozhets@gmail.com')
visit(project_commit_path(project, sample_commit.parent_id))
check_author_email(sample_commit.author_email)
end
end end
context 'when the blob does not exist' do context 'when the blob does not exist' do
...@@ -263,3 +270,9 @@ def check_author_link(email, author) ...@@ -263,3 +270,9 @@ def check_author_link(email, author)
expect(author_link['href']).to eq(user_path(author)) expect(author_link['href']).to eq(user_path(author))
expect(find('.commit-author-name').text).to eq(author.name) expect(find('.commit-author-name').text).to eq(author.name)
end end
def check_author_email(email)
author_link = find('.commit-author-link')
expect(author_link['href']).to eq("mailto:#{email}")
end
import Vue from 'vue'; import Vue from 'vue';
import mountCompontent from 'spec/helpers/vue_mount_component_helper'; import mountCompontent from 'helpers/vue_mount_component_helper';
import router from '~/ide/ide_router'; import router from '~/ide/ide_router';
import Item from '~/ide/components/branches/item.vue'; import Item from '~/ide/components/branches/item.vue';
import { getTimeago } from '~/lib/utils/datetime_utility'; import { getTimeago } from '~/lib/utils/datetime_utility';
...@@ -30,7 +30,7 @@ describe('IDE branch item', () => { ...@@ -30,7 +30,7 @@ describe('IDE branch item', () => {
it('renders branch name and timeago', () => { it('renders branch name and timeago', () => {
const timeText = getTimeago().format(TEST_BRANCH.committedDate); const timeText = getTimeago().format(TEST_BRANCH.committedDate);
expect(vm.$el).toContainText(TEST_BRANCH.name); expect(vm.$el.textContent).toContain(TEST_BRANCH.name);
expect(vm.$el.querySelector('time')).toHaveText(timeText); expect(vm.$el.querySelector('time')).toHaveText(timeText);
expect(vm.$el.querySelector('.ic-mobile-issue-close')).toBe(null); expect(vm.$el.querySelector('.ic-mobile-issue-close')).toBe(null);
}); });
...@@ -39,7 +39,7 @@ describe('IDE branch item', () => { ...@@ -39,7 +39,7 @@ describe('IDE branch item', () => {
const expectedHref = router.resolve(`/project/${TEST_PROJECT_ID}/edit/${TEST_BRANCH.name}`) const expectedHref = router.resolve(`/project/${TEST_PROJECT_ID}/edit/${TEST_BRANCH.name}`)
.href; .href;
expect(vm.$el).toMatch('a'); expect(vm.$el.textContent).toMatch('a');
expect(vm.$el).toHaveAttr('href', expectedHref); expect(vm.$el).toHaveAttr('href', expectedHref);
}); });
......
import { shallowMount, createLocalVue } from '@vue/test-utils'; import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex'; import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import { __ } from '~/locale'; import { __ } from '~/locale';
import List from '~/ide/components/branches/search_list.vue'; import List from '~/ide/components/branches/search_list.vue';
import Item from '~/ide/components/branches/item.vue'; import Item from '~/ide/components/branches/item.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { branches } from '../../mock_data'; import { branches } from '../../mock_data';
const localVue = createLocalVue(); const localVue = createLocalVue();
......
import { shallowMount, createLocalVue } from '@vue/test-utils'; import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex'; import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import List from '~/ide/components/merge_requests/list.vue'; import List from '~/ide/components/merge_requests/list.vue';
import Item from '~/ide/components/merge_requests/item.vue'; import Item from '~/ide/components/merge_requests/item.vue';
import TokenedInput from '~/ide/components/shared/tokened_input.vue'; import TokenedInput from '~/ide/components/shared/tokened_input.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { mergeRequests as mergeRequestsMock } from '../../mock_data'; import { mergeRequests as mergeRequestsMock } from '../../mock_data';
const localVue = createLocalVue(); const localVue = createLocalVue();
......
...@@ -3,7 +3,7 @@ import '~/behaviors/markdown/render_gfm'; ...@@ -3,7 +3,7 @@ import '~/behaviors/markdown/render_gfm';
import { createStore } from '~/ide/stores'; import { createStore } from '~/ide/stores';
import RightPane from '~/ide/components/panes/right.vue'; import RightPane from '~/ide/components/panes/right.vue';
import { rightSidebarViews } from '~/ide/constants'; import { rightSidebarViews } from '~/ide/constants';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
describe('IDE right pane', () => { describe('IDE right pane', () => {
let Component; let Component;
...@@ -56,7 +56,7 @@ describe('IDE right pane', () => { ...@@ -56,7 +56,7 @@ describe('IDE right pane', () => {
describe('click', () => { describe('click', () => {
beforeEach(() => { beforeEach(() => {
spyOn(vm, 'open'); jest.spyOn(vm, 'open').mockReturnValue();
}); });
it('sets view to merge request', done => { it('sets view to merge request', done => {
...@@ -74,7 +74,9 @@ describe('IDE right pane', () => { ...@@ -74,7 +74,9 @@ describe('IDE right pane', () => {
describe('live preview', () => { describe('live preview', () => {
it('renders live preview button', done => { it('renders live preview button', done => {
Vue.set(vm.$store.state.entries, 'package.json', { name: 'package.json' }); Vue.set(vm.$store.state.entries, 'package.json', {
name: 'package.json',
});
vm.$store.state.clientsidePreviewEnabled = true; vm.$store.state.clientsidePreviewEnabled = true;
vm.$nextTick(() => { vm.$nextTick(() => {
......
import { shallowMount, createLocalVue } from '@vue/test-utils'; import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex'; import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import List from '~/ide/components/pipelines/list.vue'; import List from '~/ide/components/pipelines/list.vue';
import JobsList from '~/ide/components/jobs/list.vue'; import JobsList from '~/ide/components/jobs/list.vue';
import Tab from '~/vue_shared/components/tabs/tab.vue'; import Tab from '~/vue_shared/components/tabs/tab.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue'; import CiIcon from '~/vue_shared/components/ci_icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import { pipelines } from '../../../../javascripts/ide/mock_data'; import { pipelines } from '../../../../javascripts/ide/mock_data';
const localVue = createLocalVue(); const localVue = createLocalVue();
......
import Vue from 'vue'; import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper'; import mountComponent from 'helpers/vue_mount_component_helper';
import { TEST_HOST } from 'helpers/test_constants';
import ClientsideNavigator from '~/ide/components/preview/navigator.vue'; import ClientsideNavigator from '~/ide/components/preview/navigator.vue';
describe('IDE clientside preview navigator', () => { describe('IDE clientside preview navigator', () => {
...@@ -12,14 +13,9 @@ describe('IDE clientside preview navigator', () => { ...@@ -12,14 +13,9 @@ describe('IDE clientside preview navigator', () => {
}); });
beforeEach(() => { beforeEach(() => {
manager = { manager = { bundlerURL: TEST_HOST, iframe: { src: '' } };
bundlerURL: gl.TEST_HOST,
iframe: { src: '' },
};
vm = mountComponent(Component, { vm = mountComponent(Component, { manager });
manager,
});
}); });
afterEach(() => { afterEach(() => {
...@@ -47,7 +43,7 @@ describe('IDE clientside preview navigator', () => { ...@@ -47,7 +43,7 @@ describe('IDE clientside preview navigator', () => {
it('calls back method when clicking back button', done => { it('calls back method when clicking back button', done => {
vm.navigationStack.push('/test'); vm.navigationStack.push('/test');
vm.navigationStack.push('/test2'); vm.navigationStack.push('/test2');
spyOn(vm, 'back'); jest.spyOn(vm, 'back').mockReturnValue();
vm.$nextTick(() => { vm.$nextTick(() => {
vm.$el.querySelector('.ide-navigator-btn').click(); vm.$el.querySelector('.ide-navigator-btn').click();
...@@ -60,7 +56,7 @@ describe('IDE clientside preview navigator', () => { ...@@ -60,7 +56,7 @@ describe('IDE clientside preview navigator', () => {
it('calls forward method when clicking forward button', done => { it('calls forward method when clicking forward button', done => {
vm.forwardNavigationStack.push('/test'); vm.forwardNavigationStack.push('/test');
spyOn(vm, 'forward'); jest.spyOn(vm, 'forward').mockReturnValue();
vm.$nextTick(() => { vm.$nextTick(() => {
vm.$el.querySelectorAll('.ide-navigator-btn')[1].click(); vm.$el.querySelectorAll('.ide-navigator-btn')[1].click();
...@@ -73,49 +69,35 @@ describe('IDE clientside preview navigator', () => { ...@@ -73,49 +69,35 @@ describe('IDE clientside preview navigator', () => {
describe('onUrlChange', () => { describe('onUrlChange', () => {
it('updates the path', () => { it('updates the path', () => {
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
expect(vm.path).toBe('/url'); expect(vm.path).toBe('/url');
}); });
it('sets currentBrowsingIndex 0 if not already set', () => { it('sets currentBrowsingIndex 0 if not already set', () => {
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
expect(vm.currentBrowsingIndex).toBe(0); expect(vm.currentBrowsingIndex).toBe(0);
}); });
it('increases currentBrowsingIndex if path doesnt match', () => { it('increases currentBrowsingIndex if path doesnt match', () => {
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url2` });
url: `${gl.TEST_HOST}/url2`,
});
expect(vm.currentBrowsingIndex).toBe(1); expect(vm.currentBrowsingIndex).toBe(1);
}); });
it('does not increase currentBrowsingIndex if path matches', () => { it('does not increase currentBrowsingIndex if path matches', () => {
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
expect(vm.currentBrowsingIndex).toBe(0); expect(vm.currentBrowsingIndex).toBe(0);
}); });
it('pushes path into navigation stack', () => { it('pushes path into navigation stack', () => {
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
expect(vm.navigationStack).toEqual(['/url']); expect(vm.navigationStack).toEqual(['/url']);
}); });
...@@ -128,7 +110,7 @@ describe('IDE clientside preview navigator', () => { ...@@ -128,7 +110,7 @@ describe('IDE clientside preview navigator', () => {
vm.navigationStack.push('/test'); vm.navigationStack.push('/test');
vm.navigationStack.push('/test2'); vm.navigationStack.push('/test2');
spyOn(vm, 'visitPath'); jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.back(); vm.back();
}); });
...@@ -152,7 +134,7 @@ describe('IDE clientside preview navigator', () => { ...@@ -152,7 +134,7 @@ describe('IDE clientside preview navigator', () => {
describe('forward', () => { describe('forward', () => {
it('calls visitPath with first entry in forwardNavigationStack', () => { it('calls visitPath with first entry in forwardNavigationStack', () => {
spyOn(vm, 'visitPath'); jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.forwardNavigationStack.push('/test'); vm.forwardNavigationStack.push('/test');
vm.forwardNavigationStack.push('/test2'); vm.forwardNavigationStack.push('/test2');
...@@ -165,7 +147,7 @@ describe('IDE clientside preview navigator', () => { ...@@ -165,7 +147,7 @@ describe('IDE clientside preview navigator', () => {
describe('refresh', () => { describe('refresh', () => {
it('calls refresh with current path', () => { it('calls refresh with current path', () => {
spyOn(vm, 'visitPath'); jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.path = '/test'; vm.path = '/test';
...@@ -179,7 +161,7 @@ describe('IDE clientside preview navigator', () => { ...@@ -179,7 +161,7 @@ describe('IDE clientside preview navigator', () => {
it('updates iframe src with passed in path', () => { it('updates iframe src with passed in path', () => {
vm.visitPath('/testpath'); vm.visitPath('/testpath');
expect(manager.iframe.src).toBe(`${gl.TEST_HOST}/testpath`); expect(manager.iframe.src).toBe(`${TEST_HOST}/testpath`);
}); });
}); });
}); });
import * as pathUtils from 'path';
import { decorateData } from '~/ide/stores/utils';
import state from '~/ide/stores/state';
import commitState from '~/ide/stores/modules/commit/state';
import mergeRequestsState from '~/ide/stores/modules/merge_requests/state';
import pipelinesState from '~/ide/stores/modules/pipelines/state';
import branchesState from '~/ide/stores/modules/branches/state';
import fileTemplatesState from '~/ide/stores/modules/file_templates/state';
import paneState from '~/ide/stores/modules/pane/state';
export const resetStore = store => {
const newState = {
...state(),
commit: commitState(),
mergeRequests: mergeRequestsState(),
pipelines: pipelinesState(),
branches: branchesState(),
fileTemplates: fileTemplatesState(),
rightPane: paneState(),
};
store.replaceState(newState);
};
export const file = (name = 'name', id = name, type = '', parent = null) =>
decorateData({
id,
type,
icon: 'icon',
url: 'url',
name,
path: parent ? `${parent.path}/${name}` : name,
parentPath: parent ? parent.path : '',
lastCommit: {},
});
export const createEntriesFromPaths = paths =>
paths
.map(path => ({
name: pathUtils.basename(path),
dir: pathUtils.dirname(path),
ext: pathUtils.extname(path),
}))
.reduce((entries, path, idx) => {
const { name } = path;
const parent = path.dir ? entries[path.dir] : null;
const type = path.ext ? 'blob' : 'tree';
const entry = file(name, (idx + 1).toString(), type, parent);
return {
[entry.path]: entry,
...entries,
};
}, {});
...@@ -31,7 +31,7 @@ describe('IDE router', () => { ...@@ -31,7 +31,7 @@ describe('IDE router', () => {
`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}`, `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}`,
].forEach(route => { ].forEach(route => {
it(`finds project path when route is "${route}"`, () => { it(`finds project path when route is "${route}"`, () => {
spyOn(store, 'dispatch').and.returnValue(new Promise(() => {})); jest.spyOn(store, 'dispatch').mockReturnValue(new Promise(() => {}));
router.push(route); router.push(route);
......
...@@ -45,7 +45,9 @@ describe('IDE store getters', () => { ...@@ -45,7 +45,9 @@ describe('IDE store getters', () => {
localState.currentMergeRequestId = 1; localState.currentMergeRequestId = 1;
localState.projects.abcproject = { localState.projects.abcproject = {
mergeRequests: { mergeRequests: {
1: { mergeId: 1 }, 1: {
mergeId: 1,
},
}, },
}; };
...@@ -62,9 +64,21 @@ describe('IDE store getters', () => { ...@@ -62,9 +64,21 @@ describe('IDE store getters', () => {
describe('allBlobs', () => { describe('allBlobs', () => {
beforeEach(() => { beforeEach(() => {
Object.assign(localState.entries, { Object.assign(localState.entries, {
index: { type: 'blob', name: 'index', lastOpenedAt: 0 }, index: {
app: { type: 'blob', name: 'blob', lastOpenedAt: 0 }, type: 'blob',
folder: { type: 'folder', name: 'folder', lastOpenedAt: 0 }, name: 'index',
lastOpenedAt: 0,
},
app: {
type: 'blob',
name: 'blob',
lastOpenedAt: 0,
},
folder: {
type: 'folder',
name: 'folder',
lastOpenedAt: 0,
},
}); });
}); });
...@@ -174,7 +188,7 @@ describe('IDE store getters', () => { ...@@ -174,7 +188,7 @@ describe('IDE store getters', () => {
}, },
}; };
const localGetters = { const localGetters = {
findBranch: jasmine.createSpy('findBranchSpy'), findBranch: jest.fn(),
}; };
getters.currentBranch(localState, localGetters); getters.currentBranch(localState, localGetters);
...@@ -251,7 +265,9 @@ describe('IDE store getters', () => { ...@@ -251,7 +265,9 @@ describe('IDE store getters', () => {
describe('packageJson', () => { describe('packageJson', () => {
it('returns package.json entry', () => { it('returns package.json entry', () => {
localState.entries['package.json'] = { name: 'package.json' }; localState.entries['package.json'] = {
name: 'package.json',
};
expect(getters.packageJson(localState)).toEqual({ expect(getters.packageJson(localState)).toEqual({
name: 'package.json', name: 'package.json',
...@@ -273,7 +289,9 @@ describe('IDE store getters', () => { ...@@ -273,7 +289,9 @@ describe('IDE store getters', () => {
currentProject: { currentProject: {
default_branch: 'master', default_branch: 'master',
}, },
currentBranch: { can_push: true }, currentBranch: {
can_push: true,
},
}; };
expect(getters.canPushToBranch({}, localGetters)).toBeTruthy(); expect(getters.canPushToBranch({}, localGetters)).toBeTruthy();
...@@ -284,7 +302,9 @@ describe('IDE store getters', () => { ...@@ -284,7 +302,9 @@ describe('IDE store getters', () => {
currentProject: { currentProject: {
default_branch: 'master', default_branch: 'master',
}, },
currentBranch: { can_push: false }, currentBranch: {
can_push: false,
},
}; };
expect(getters.canPushToBranch({}, localGetters)).toBeFalsy(); expect(getters.canPushToBranch({}, localGetters)).toBeFalsy();
......
import MockAdapter from 'axios-mock-adapter'; import MockAdapter from 'axios-mock-adapter';
import testAction from 'spec/helpers/vuex_action_helper'; import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import state from '~/ide/stores/modules/branches/state'; import state from '~/ide/stores/modules/branches/state';
import * as types from '~/ide/stores/modules/branches/mutation_types'; import * as types from '~/ide/stores/modules/branches/mutation_types';
...@@ -21,12 +21,8 @@ describe('IDE branches actions', () => { ...@@ -21,12 +21,8 @@ describe('IDE branches actions', () => {
beforeEach(() => { beforeEach(() => {
mockedContext = { mockedContext = {
dispatch() {}, dispatch() {},
rootState: { rootState: { currentProjectId: projectData.name_with_namespace },
currentProjectId: projectData.name_with_namespace, rootGetters: { currentProject: projectData },
},
rootGetters: {
currentProject: projectData,
},
state: state(), state: state(),
}; };
...@@ -70,7 +66,7 @@ describe('IDE branches actions', () => { ...@@ -70,7 +66,7 @@ describe('IDE branches actions', () => {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
text: 'Error loading branches.', text: 'Error loading branches.',
action: jasmine.any(Function), action: expect.any(Function),
actionText: 'Please try again', actionText: 'Please try again',
actionPayload: { search: TEST_SEARCH }, actionPayload: { search: TEST_SEARCH },
}, },
...@@ -105,15 +101,12 @@ describe('IDE branches actions', () => { ...@@ -105,15 +101,12 @@ describe('IDE branches actions', () => {
}); });
it('calls API with params', () => { it('calls API with params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = jest.spyOn(axios, 'get');
fetchBranches(mockedContext, { search: TEST_SEARCH }); fetchBranches(mockedContext, { search: TEST_SEARCH });
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), { expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
params: jasmine.objectContaining({ params: expect.objectContaining({ search: TEST_SEARCH, sort: 'updated_desc' }),
search: TEST_SEARCH,
sort: 'updated_desc',
}),
}); });
}); });
...@@ -126,10 +119,7 @@ describe('IDE branches actions', () => { ...@@ -126,10 +119,7 @@ describe('IDE branches actions', () => {
[ [
{ type: 'requestBranches' }, { type: 'requestBranches' },
{ type: 'resetBranches' }, { type: 'resetBranches' },
{ { type: 'receiveBranchesSuccess', payload: branches },
type: 'receiveBranchesSuccess',
payload: branches,
},
], ],
done, done,
); );
...@@ -150,10 +140,7 @@ describe('IDE branches actions', () => { ...@@ -150,10 +140,7 @@ describe('IDE branches actions', () => {
[ [
{ type: 'requestBranches' }, { type: 'requestBranches' },
{ type: 'resetBranches' }, { type: 'resetBranches' },
{ { type: 'receiveBranchesError', payload: { search: TEST_SEARCH } },
type: 'receiveBranchesError',
payload: { search: TEST_SEARCH },
},
], ],
done, done,
); );
......
...@@ -10,7 +10,7 @@ describe('IDE branches mutations', () => { ...@@ -10,7 +10,7 @@ describe('IDE branches mutations', () => {
mockedState = state(); mockedState = state();
}); });
describe(types.REQUEST_BRANCHES, () => { describe('REQUEST_BRANCHES', () => {
it('sets loading to true', () => { it('sets loading to true', () => {
mutations[types.REQUEST_BRANCHES](mockedState); mutations[types.REQUEST_BRANCHES](mockedState);
...@@ -18,7 +18,7 @@ describe('IDE branches mutations', () => { ...@@ -18,7 +18,7 @@ describe('IDE branches mutations', () => {
}); });
}); });
describe(types.RECEIVE_BRANCHES_ERROR, () => { describe('RECEIVE_BRANCHES_ERROR', () => {
it('sets loading to false', () => { it('sets loading to false', () => {
mutations[types.RECEIVE_BRANCHES_ERROR](mockedState); mutations[types.RECEIVE_BRANCHES_ERROR](mockedState);
...@@ -26,7 +26,7 @@ describe('IDE branches mutations', () => { ...@@ -26,7 +26,7 @@ describe('IDE branches mutations', () => {
}); });
}); });
describe(types.RECEIVE_BRANCHES_SUCCESS, () => { describe('RECEIVE_BRANCHES_SUCCESS', () => {
it('sets branches', () => { it('sets branches', () => {
const expectedBranches = branches.map(branch => ({ const expectedBranches = branches.map(branch => ({
name: branch.name, name: branch.name,
...@@ -39,7 +39,7 @@ describe('IDE branches mutations', () => { ...@@ -39,7 +39,7 @@ describe('IDE branches mutations', () => {
}); });
}); });
describe(types.RESET_BRANCHES, () => { describe('RESET_BRANCHES', () => {
it('clears branches array', () => { it('clears branches array', () => {
mockedState.branches = ['test']; mockedState.branches = ['test'];
......
...@@ -19,9 +19,7 @@ describe('IDE merge requests actions', () => { ...@@ -19,9 +19,7 @@ describe('IDE merge requests actions', () => {
beforeEach(() => { beforeEach(() => {
mockedState = state(); mockedState = state();
mockedRootState = { mockedRootState = { currentProjectId: 7 };
currentProjectId: 7,
};
mock = new MockAdapter(axios); mock = new MockAdapter(axios);
}); });
...@@ -54,7 +52,7 @@ describe('IDE merge requests actions', () => { ...@@ -54,7 +52,7 @@ describe('IDE merge requests actions', () => {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
text: 'Error loading merge requests.', text: 'Error loading merge requests.',
action: jasmine.any(Function), action: expect.any(Function),
actionText: 'Please try again', actionText: 'Please try again',
actionPayload: { type: 'created', search: '' }, actionPayload: { type: 'created', search: '' },
}, },
...@@ -71,12 +69,7 @@ describe('IDE merge requests actions', () => { ...@@ -71,12 +69,7 @@ describe('IDE merge requests actions', () => {
receiveMergeRequestsSuccess, receiveMergeRequestsSuccess,
mergeRequests, mergeRequests,
mockedState, mockedState,
[ [{ type: types.RECEIVE_MERGE_REQUESTS_SUCCESS, payload: mergeRequests }],
{
type: types.RECEIVE_MERGE_REQUESTS_SUCCESS,
payload: mergeRequests,
},
],
[], [],
done, done,
); );
...@@ -94,36 +87,34 @@ describe('IDE merge requests actions', () => { ...@@ -94,36 +87,34 @@ describe('IDE merge requests actions', () => {
}); });
it('calls API with params', () => { it('calls API with params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests( fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState }, {
{ type: 'created' }, dispatch() {},
);
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), { state: mockedState,
params: { rootState: mockedRootState,
scope: 'created-by-me',
state: 'opened',
search: '',
}, },
{ type: 'created' },
);
expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
params: { scope: 'created-by-me', state: 'opened', search: '' },
}); });
}); });
it('calls API with search', () => { it('calls API with search', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests( fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState }, {
{ type: 'created', search: 'testing search' }, dispatch() {},
);
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), { state: mockedState,
params: { rootState: mockedRootState,
scope: 'created-by-me',
state: 'opened',
search: 'testing search',
}, },
{ type: 'created', search: 'testing search' },
);
expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
params: { scope: 'created-by-me', state: 'opened', search: 'testing search' },
}); });
}); });
...@@ -136,10 +127,7 @@ describe('IDE merge requests actions', () => { ...@@ -136,10 +127,7 @@ describe('IDE merge requests actions', () => {
[ [
{ type: 'requestMergeRequests' }, { type: 'requestMergeRequests' },
{ type: 'resetMergeRequests' }, { type: 'resetMergeRequests' },
{ { type: 'receiveMergeRequestsSuccess', payload: mergeRequests },
type: 'receiveMergeRequestsSuccess',
payload: mergeRequests,
},
], ],
done, done,
); );
...@@ -152,21 +140,19 @@ describe('IDE merge requests actions', () => { ...@@ -152,21 +140,19 @@ describe('IDE merge requests actions', () => {
}); });
it('calls API with project', () => { it('calls API with project', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests( fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState }, {
dispatch() {},
state: mockedState,
rootState: mockedRootState,
},
{ type: null, search: 'testing search' }, { type: null, search: 'testing search' },
); );
expect(apiSpy).toHaveBeenCalledWith( expect(apiSpy).toHaveBeenCalledWith(
jasmine.stringMatching(`projects/${mockedRootState.currentProjectId}/merge_requests`), expect.stringMatching(`projects/${mockedRootState.currentProjectId}/merge_requests`),
{ { params: { state: 'opened', search: 'testing search' } },
params: {
state: 'opened',
search: 'testing search',
},
},
); );
}); });
...@@ -179,10 +165,7 @@ describe('IDE merge requests actions', () => { ...@@ -179,10 +165,7 @@ describe('IDE merge requests actions', () => {
[ [
{ type: 'requestMergeRequests' }, { type: 'requestMergeRequests' },
{ type: 'resetMergeRequests' }, { type: 'resetMergeRequests' },
{ { type: 'receiveMergeRequestsSuccess', payload: mergeRequests },
type: 'receiveMergeRequestsSuccess',
payload: mergeRequests,
},
], ],
done, done,
); );
......
import { TEST_HOST } from 'helpers/test_constants';
import state from '~/ide/stores/modules/merge_requests/state'; import state from '~/ide/stores/modules/merge_requests/state';
import mutations from '~/ide/stores/modules/merge_requests/mutations'; import mutations from '~/ide/stores/modules/merge_requests/mutations';
import * as types from '~/ide/stores/modules/merge_requests/mutation_types'; import * as types from '~/ide/stores/modules/merge_requests/mutation_types';
...@@ -10,7 +11,7 @@ describe('IDE merge requests mutations', () => { ...@@ -10,7 +11,7 @@ describe('IDE merge requests mutations', () => {
mockedState = state(); mockedState = state();
}); });
describe(types.REQUEST_MERGE_REQUESTS, () => { describe('REQUEST_MERGE_REQUESTS', () => {
it('sets loading to true', () => { it('sets loading to true', () => {
mutations[types.REQUEST_MERGE_REQUESTS](mockedState); mutations[types.REQUEST_MERGE_REQUESTS](mockedState);
...@@ -18,7 +19,7 @@ describe('IDE merge requests mutations', () => { ...@@ -18,7 +19,7 @@ describe('IDE merge requests mutations', () => {
}); });
}); });
describe(types.RECEIVE_MERGE_REQUESTS_ERROR, () => { describe('RECEIVE_MERGE_REQUESTS_ERROR', () => {
it('sets loading to false', () => { it('sets loading to false', () => {
mutations[types.RECEIVE_MERGE_REQUESTS_ERROR](mockedState); mutations[types.RECEIVE_MERGE_REQUESTS_ERROR](mockedState);
...@@ -26,9 +27,9 @@ describe('IDE merge requests mutations', () => { ...@@ -26,9 +27,9 @@ describe('IDE merge requests mutations', () => {
}); });
}); });
describe(types.RECEIVE_MERGE_REQUESTS_SUCCESS, () => { describe('RECEIVE_MERGE_REQUESTS_SUCCESS', () => {
it('sets merge requests', () => { it('sets merge requests', () => {
gon.gitlab_url = gl.TEST_HOST; gon.gitlab_url = TEST_HOST;
mutations[types.RECEIVE_MERGE_REQUESTS_SUCCESS](mockedState, mergeRequests); mutations[types.RECEIVE_MERGE_REQUESTS_SUCCESS](mockedState, mergeRequests);
expect(mockedState.mergeRequests).toEqual([ expect(mockedState.mergeRequests).toEqual([
...@@ -43,7 +44,7 @@ describe('IDE merge requests mutations', () => { ...@@ -43,7 +44,7 @@ describe('IDE merge requests mutations', () => {
}); });
}); });
describe(types.RESET_MERGE_REQUESTS, () => { describe('RESET_MERGE_REQUESTS', () => {
it('clears merge request array', () => { it('clears merge request array', () => {
mockedState.mergeRequests = ['test']; mockedState.mergeRequests = ['test'];
......
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/ide/stores/modules/pane/actions';
import * as types from '~/ide/stores/modules/pane/mutation_types';
describe('IDE pane module actions', () => {
const TEST_VIEW = { name: 'test' };
const TEST_VIEW_KEEP_ALIVE = { name: 'test-keep-alive', keepAlive: true };
describe('toggleOpen', () => {
it('dispatches open if closed', done => {
testAction(
actions.toggleOpen,
TEST_VIEW,
{ isOpen: false },
[],
[{ type: 'open', payload: TEST_VIEW }],
done,
);
});
it('dispatches close if opened', done => {
testAction(actions.toggleOpen, TEST_VIEW, { isOpen: true }, [], [{ type: 'close' }], done);
});
});
describe('open', () => {
it('commits SET_OPEN', done => {
testAction(actions.open, null, {}, [{ type: types.SET_OPEN, payload: true }], [], done);
});
it('commits SET_CURRENT_VIEW if view is given', done => {
testAction(
actions.open,
TEST_VIEW,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW.name },
],
[],
done,
);
});
it('commits KEEP_ALIVE_VIEW if keepAlive is true', done => {
testAction(
actions.open,
TEST_VIEW_KEEP_ALIVE,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
{ type: types.KEEP_ALIVE_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
],
[],
done,
);
});
});
describe('close', () => {
it('commits SET_OPEN', done => {
testAction(actions.close, null, {}, [{ type: types.SET_OPEN, payload: false }], [], done);
});
});
});
import Visibility from 'visibilityjs'; import Visibility from 'visibilityjs';
import MockAdapter from 'axios-mock-adapter'; import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import { import {
requestLatestPipeline, requestLatestPipeline,
...@@ -78,7 +79,7 @@ describe('IDE pipelines actions', () => { ...@@ -78,7 +79,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
text: 'An error occurred whilst fetching the latest pipeline.', text: 'An error occurred whilst fetching the latest pipeline.',
action: jasmine.any(Function), action: expect.any(Function),
actionText: 'Please try again', actionText: 'Please try again',
actionPayload: null, actionPayload: null,
}, },
...@@ -91,38 +92,28 @@ describe('IDE pipelines actions', () => { ...@@ -91,38 +92,28 @@ describe('IDE pipelines actions', () => {
}); });
describe('receiveLatestPipelineSuccess', () => { describe('receiveLatestPipelineSuccess', () => {
const rootGetters = { const rootGetters = { lastCommit: { id: '123' } };
lastCommit: { id: '123' },
};
let commit; let commit;
beforeEach(() => { beforeEach(() => {
commit = jasmine.createSpy('commit'); commit = jest.fn().mockName('commit');
}); });
it('commits pipeline', () => { it('commits pipeline', () => {
receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines }); receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines });
expect(commit).toHaveBeenCalledWith(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, pipelines[0]);
expect(commit.calls.argsFor(0)).toEqual([
types.RECEIVE_LASTEST_PIPELINE_SUCCESS,
pipelines[0],
]);
}); });
it('commits false when there are no pipelines', () => { it('commits false when there are no pipelines', () => {
receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines: [] }); receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines: [] });
expect(commit).toHaveBeenCalledWith(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, false);
expect(commit.calls.argsFor(0)).toEqual([types.RECEIVE_LASTEST_PIPELINE_SUCCESS, false]);
}); });
}); });
describe('fetchLatestPipeline', () => { describe('fetchLatestPipeline', () => {
beforeEach(() => { beforeEach(() => {});
jasmine.clock().install();
});
afterEach(() => { afterEach(() => {
jasmine.clock().uninstall();
stopPipelinePolling(); stopPipelinePolling();
clearEtagPoll(); clearEtagPoll();
}); });
...@@ -135,10 +126,10 @@ describe('IDE pipelines actions', () => { ...@@ -135,10 +126,10 @@ describe('IDE pipelines actions', () => {
}); });
it('dispatches request', done => { it('dispatches request', done => {
spyOn(axios, 'get').and.callThrough(); jest.spyOn(axios, 'get');
spyOn(Visibility, 'hidden').and.returnValue(false); jest.spyOn(Visibility, 'hidden').mockReturnValue(false);
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
const rootGetters = { const rootGetters = {
lastCommit: { id: 'abc123def456ghi789jkl' }, lastCommit: { id: 'abc123def456ghi789jkl' },
currentProject: { path_with_namespace: 'abc/def' }, currentProject: { path_with_namespace: 'abc/def' },
...@@ -146,31 +137,29 @@ describe('IDE pipelines actions', () => { ...@@ -146,31 +137,29 @@ describe('IDE pipelines actions', () => {
fetchLatestPipeline({ dispatch, rootGetters }); fetchLatestPipeline({ dispatch, rootGetters });
expect(dispatch.calls.argsFor(0)).toEqual(['requestLatestPipeline']); expect(dispatch).toHaveBeenCalledWith('requestLatestPipeline');
jasmine.clock().tick(1000); jest.advanceTimersByTime(1000);
new Promise(resolve => requestAnimationFrame(resolve)) new Promise(resolve => requestAnimationFrame(resolve))
.then(() => { .then(() => {
expect(axios.get).toHaveBeenCalled(); expect(axios.get).toHaveBeenCalled();
expect(axios.get.calls.count()).toBe(1); expect(axios.get).toHaveBeenCalledTimes(1);
expect(dispatch).toHaveBeenCalledWith(
expect(dispatch.calls.argsFor(1)).toEqual([
'receiveLatestPipelineSuccess', 'receiveLatestPipelineSuccess',
jasmine.anything(), expect.anything(),
]); );
jasmine.clock().tick(10000); jest.advanceTimersByTime(10000);
}) })
.then(() => new Promise(resolve => requestAnimationFrame(resolve))) .then(() => new Promise(resolve => requestAnimationFrame(resolve)))
.then(() => { .then(() => {
expect(axios.get).toHaveBeenCalled(); expect(axios.get).toHaveBeenCalled();
expect(axios.get.calls.count()).toBe(2); expect(axios.get).toHaveBeenCalledTimes(2);
expect(dispatch).toHaveBeenCalledWith(
expect(dispatch.calls.argsFor(2)).toEqual([
'receiveLatestPipelineSuccess', 'receiveLatestPipelineSuccess',
jasmine.anything(), expect.anything(),
]); );
}) })
.then(done) .then(done)
.catch(done.fail); .catch(done.fail);
...@@ -183,7 +172,7 @@ describe('IDE pipelines actions', () => { ...@@ -183,7 +172,7 @@ describe('IDE pipelines actions', () => {
}); });
it('dispatches error', done => { it('dispatches error', done => {
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
const rootGetters = { const rootGetters = {
lastCommit: { id: 'abc123def456ghi789jkl' }, lastCommit: { id: 'abc123def456ghi789jkl' },
currentProject: { path_with_namespace: 'abc/def' }, currentProject: { path_with_namespace: 'abc/def' },
...@@ -191,14 +180,11 @@ describe('IDE pipelines actions', () => { ...@@ -191,14 +180,11 @@ describe('IDE pipelines actions', () => {
fetchLatestPipeline({ dispatch, rootGetters }); fetchLatestPipeline({ dispatch, rootGetters });
jasmine.clock().tick(1500); jest.advanceTimersByTime(1500);
new Promise(resolve => requestAnimationFrame(resolve)) new Promise(resolve => requestAnimationFrame(resolve))
.then(() => { .then(() => {
expect(dispatch.calls.argsFor(1)).toEqual([ expect(dispatch).toHaveBeenCalledWith('receiveLatestPipelineError', expect.anything());
'receiveLatestPipelineError',
jasmine.anything(),
]);
}) })
.then(done) .then(done)
.catch(done.fail); .catch(done.fail);
...@@ -224,7 +210,7 @@ describe('IDE pipelines actions', () => { ...@@ -224,7 +210,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
text: 'An error occurred whilst loading the pipelines jobs.', text: 'An error occurred whilst loading the pipelines jobs.',
action: jasmine.anything(), action: expect.anything(),
actionText: 'Please try again', actionText: 'Please try again',
actionPayload: { id: 1 }, actionPayload: { id: 1 },
}, },
...@@ -249,10 +235,7 @@ describe('IDE pipelines actions', () => { ...@@ -249,10 +235,7 @@ describe('IDE pipelines actions', () => {
}); });
describe('fetchJobs', () => { describe('fetchJobs', () => {
const stage = { const stage = { id: 1, dropdownPath: `${TEST_HOST}/jobs` };
id: 1,
dropdownPath: `${gl.TEST_HOST}/jobs`,
};
describe('success', () => { describe('success', () => {
beforeEach(() => { beforeEach(() => {
...@@ -361,7 +344,7 @@ describe('IDE pipelines actions', () => { ...@@ -361,7 +344,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
text: 'An error occurred whilst fetching the job trace.', text: 'An error occurred whilst fetching the job trace.',
action: jasmine.any(Function), action: expect.any(Function),
actionText: 'Please try again', actionText: 'Please try again',
actionPayload: null, actionPayload: null,
}, },
...@@ -387,15 +370,13 @@ describe('IDE pipelines actions', () => { ...@@ -387,15 +370,13 @@ describe('IDE pipelines actions', () => {
describe('fetchJobTrace', () => { describe('fetchJobTrace', () => {
beforeEach(() => { beforeEach(() => {
mockedState.detailJob = { mockedState.detailJob = { path: `${TEST_HOST}/project/builds` };
path: `${gl.TEST_HOST}/project/builds`,
};
}); });
describe('success', () => { describe('success', () => {
beforeEach(() => { beforeEach(() => {
spyOn(axios, 'get').and.callThrough(); jest.spyOn(axios, 'get');
mock.onGet(`${gl.TEST_HOST}/project/builds/trace`).replyOnce(200, { html: 'html' }); mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(200, { html: 'html' });
}); });
it('dispatches request', done => { it('dispatches request', done => {
...@@ -413,9 +394,12 @@ describe('IDE pipelines actions', () => { ...@@ -413,9 +394,12 @@ describe('IDE pipelines actions', () => {
}); });
it('sends get request to correct URL', () => { it('sends get request to correct URL', () => {
fetchJobTrace({ state: mockedState, dispatch() {} }); fetchJobTrace({
state: mockedState,
expect(axios.get).toHaveBeenCalledWith(`${gl.TEST_HOST}/project/builds/trace`, { dispatch() {},
});
expect(axios.get).toHaveBeenCalledWith(`${TEST_HOST}/project/builds/trace`, {
params: { format: 'json' }, params: { format: 'json' },
}); });
}); });
...@@ -423,7 +407,7 @@ describe('IDE pipelines actions', () => { ...@@ -423,7 +407,7 @@ describe('IDE pipelines actions', () => {
describe('error', () => { describe('error', () => {
beforeEach(() => { beforeEach(() => {
mock.onGet(`${gl.TEST_HOST}/project/builds/trace`).replyOnce(500); mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(500);
}); });
it('dispatches error', done => { it('dispatches error', done => {
......
...@@ -10,7 +10,7 @@ describe('IDE pipelines mutations', () => { ...@@ -10,7 +10,7 @@ describe('IDE pipelines mutations', () => {
mockedState = state(); mockedState = state();
}); });
describe(types.REQUEST_LATEST_PIPELINE, () => { describe('REQUEST_LATEST_PIPELINE', () => {
it('sets loading to true', () => { it('sets loading to true', () => {
mutations[types.REQUEST_LATEST_PIPELINE](mockedState); mutations[types.REQUEST_LATEST_PIPELINE](mockedState);
...@@ -18,7 +18,7 @@ describe('IDE pipelines mutations', () => { ...@@ -18,7 +18,7 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_LASTEST_PIPELINE_ERROR, () => { describe('RECEIVE_LASTEST_PIPELINE_ERROR', () => {
it('sets loading to false', () => { it('sets loading to false', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_ERROR](mockedState); mutations[types.RECEIVE_LASTEST_PIPELINE_ERROR](mockedState);
...@@ -26,7 +26,7 @@ describe('IDE pipelines mutations', () => { ...@@ -26,7 +26,7 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, () => { describe('RECEIVE_LASTEST_PIPELINE_SUCCESS', () => {
const itSetsPipelineLoadingStates = () => { const itSetsPipelineLoadingStates = () => {
it('sets has loaded to true', () => { it('sets has loaded to true', () => {
expect(mockedState.hasLoadedPipeline).toBe(true); expect(mockedState.hasLoadedPipeline).toBe(true);
...@@ -52,7 +52,7 @@ describe('IDE pipelines mutations', () => { ...@@ -52,7 +52,7 @@ describe('IDE pipelines mutations', () => {
id: '51', id: '51',
path: 'test', path: 'test',
commit: { id: '123' }, commit: { id: '123' },
details: { status: jasmine.any(Object) }, details: { status: expect.any(Object) },
yamlError: undefined, yamlError: undefined,
}); });
}); });
...@@ -95,12 +95,9 @@ describe('IDE pipelines mutations', () => { ...@@ -95,12 +95,9 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.REQUEST_JOBS, () => { describe('REQUEST_JOBS', () => {
beforeEach(() => { beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({ mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
...stage,
id: i,
}));
}); });
it('sets isLoading on stage', () => { it('sets isLoading on stage', () => {
...@@ -110,12 +107,9 @@ describe('IDE pipelines mutations', () => { ...@@ -110,12 +107,9 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_JOBS_ERROR, () => { describe('RECEIVE_JOBS_ERROR', () => {
beforeEach(() => { beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({ mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
...stage,
id: i,
}));
}); });
it('sets isLoading on stage after error', () => { it('sets isLoading on stage after error', () => {
...@@ -125,29 +119,22 @@ describe('IDE pipelines mutations', () => { ...@@ -125,29 +119,22 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_JOBS_SUCCESS, () => { describe('RECEIVE_JOBS_SUCCESS', () => {
let data; let data;
beforeEach(() => { beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({ mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
...stage,
id: i,
}));
data = { data = { latest_statuses: [...jobs] };
latest_statuses: [...jobs],
};
}); });
it('updates loading', () => { it('updates loading', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data }); mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data });
expect(mockedState.stages[0].isLoading).toBe(false); expect(mockedState.stages[0].isLoading).toBe(false);
}); });
it('sets jobs on stage', () => { it('sets jobs on stage', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data }); mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data });
expect(mockedState.stages[0].jobs.length).toBe(jobs.length); expect(mockedState.stages[0].jobs.length).toBe(jobs.length);
expect(mockedState.stages[0].jobs).toEqual( expect(mockedState.stages[0].jobs).toEqual(
jobs.map(job => ({ jobs.map(job => ({
...@@ -164,13 +151,9 @@ describe('IDE pipelines mutations', () => { ...@@ -164,13 +151,9 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.TOGGLE_STAGE_COLLAPSE, () => { describe('TOGGLE_STAGE_COLLAPSE', () => {
beforeEach(() => { beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({ mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i, isCollapsed: false }));
...stage,
id: i,
isCollapsed: false,
}));
}); });
it('toggles collapsed state', () => { it('toggles collapsed state', () => {
...@@ -184,7 +167,7 @@ describe('IDE pipelines mutations', () => { ...@@ -184,7 +167,7 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.SET_DETAIL_JOB, () => { describe('SET_DETAIL_JOB', () => {
it('sets detail job', () => { it('sets detail job', () => {
mutations[types.SET_DETAIL_JOB](mockedState, jobs[0]); mutations[types.SET_DETAIL_JOB](mockedState, jobs[0]);
...@@ -192,7 +175,7 @@ describe('IDE pipelines mutations', () => { ...@@ -192,7 +175,7 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.REQUEST_JOB_TRACE, () => { describe('REQUEST_JOB_TRACE', () => {
beforeEach(() => { beforeEach(() => {
mockedState.detailJob = { ...jobs[0] }; mockedState.detailJob = { ...jobs[0] };
}); });
...@@ -204,7 +187,7 @@ describe('IDE pipelines mutations', () => { ...@@ -204,7 +187,7 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_JOB_TRACE_ERROR, () => { describe('RECEIVE_JOB_TRACE_ERROR', () => {
beforeEach(() => { beforeEach(() => {
mockedState.detailJob = { ...jobs[0], isLoading: true }; mockedState.detailJob = { ...jobs[0], isLoading: true };
}); });
...@@ -216,14 +199,13 @@ describe('IDE pipelines mutations', () => { ...@@ -216,14 +199,13 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_JOB_TRACE_SUCCESS, () => { describe('RECEIVE_JOB_TRACE_SUCCESS', () => {
beforeEach(() => { beforeEach(() => {
mockedState.detailJob = { ...jobs[0], isLoading: true }; mockedState.detailJob = { ...jobs[0], isLoading: true };
}); });
it('sets output on detail job', () => { it('sets output on detail job', () => {
mutations[types.RECEIVE_JOB_TRACE_SUCCESS](mockedState, { html: 'html' }); mutations[types.RECEIVE_JOB_TRACE_SUCCESS](mockedState, { html: 'html' });
expect(mockedState.detailJob.output).toBe('html'); expect(mockedState.detailJob.output).toBe('html');
expect(mockedState.detailJob.isLoading).toBe(false); expect(mockedState.detailJob.isLoading).toBe(false);
}); });
......
...@@ -9,10 +9,7 @@ describe('IDE store file mutations', () => { ...@@ -9,10 +9,7 @@ describe('IDE store file mutations', () => {
beforeEach(() => { beforeEach(() => {
localState = state(); localState = state();
localFile = { localFile = { ...file(), type: 'blob' };
...file(),
type: 'blob',
};
localState.entries[localFile.path] = localFile; localState.entries[localFile.path] = localFile;
}); });
...@@ -28,11 +25,7 @@ describe('IDE store file mutations', () => { ...@@ -28,11 +25,7 @@ describe('IDE store file mutations', () => {
}); });
it('sets pending tab as not active', () => { it('sets pending tab as not active', () => {
localState.openFiles.push({ localState.openFiles.push({ ...localFile, pending: true, active: true });
...localFile,
pending: true,
active: true,
});
mutations.SET_FILE_ACTIVE(localState, { mutations.SET_FILE_ACTIVE(localState, {
path: localFile.path, path: localFile.path,
...@@ -132,7 +125,7 @@ describe('IDE store file mutations', () => { ...@@ -132,7 +125,7 @@ describe('IDE store file mutations', () => {
localFile, localFile,
].forEach(f => { ].forEach(f => {
expect(f).toEqual( expect(f).toEqual(
jasmine.objectContaining({ expect.objectContaining({
path, path,
name, name,
raw: null, raw: null,
...@@ -154,10 +147,7 @@ describe('IDE store file mutations', () => { ...@@ -154,10 +147,7 @@ describe('IDE store file mutations', () => {
}); });
it('adds raw data to open pending file', () => { it('adds raw data to open pending file', () => {
localState.openFiles.push({ localState.openFiles.push({ ...localFile, pending: true });
...localFile,
pending: true,
});
mutations.SET_FILE_RAW_DATA(localState, { mutations.SET_FILE_RAW_DATA(localState, {
file: localFile, file: localFile,
...@@ -168,11 +158,7 @@ describe('IDE store file mutations', () => { ...@@ -168,11 +158,7 @@ describe('IDE store file mutations', () => {
}); });
it('does not add raw data to open pending tempFile file', () => { it('does not add raw data to open pending tempFile file', () => {
localState.openFiles.push({ localState.openFiles.push({ ...localFile, pending: true, tempFile: true });
...localFile,
pending: true,
tempFile: true,
});
mutations.SET_FILE_RAW_DATA(localState, { mutations.SET_FILE_RAW_DATA(localState, {
file: localFile, file: localFile,
...@@ -234,7 +220,9 @@ describe('IDE store file mutations', () => { ...@@ -234,7 +220,9 @@ describe('IDE store file mutations', () => {
it('sets file mr change', () => { it('sets file mr change', () => {
mutations.SET_FILE_MERGE_REQUEST_CHANGE(localState, { mutations.SET_FILE_MERGE_REQUEST_CHANGE(localState, {
file: localFile, file: localFile,
mrChange: { diff: 'ABC' }, mrChange: {
diff: 'ABC',
},
}); });
expect(localFile.mrChange.diff).toBe('ABC'); expect(localFile.mrChange.diff).toBe('ABC');
...@@ -311,12 +299,7 @@ describe('IDE store file mutations', () => { ...@@ -311,12 +299,7 @@ describe('IDE store file mutations', () => {
mutations.DISCARD_FILE_CHANGES(localState, localFile.path); mutations.DISCARD_FILE_CHANGES(localState, localFile.path);
expect(localState.trees['gitlab-ce/master'].tree).toEqual([ expect(localState.trees['gitlab-ce/master'].tree).toEqual([{ ...localFile, deleted: false }]);
{
...localFile,
deleted: false,
},
]);
}); });
it('adds to parent tree if deleted', () => { it('adds to parent tree if deleted', () => {
...@@ -328,12 +311,7 @@ describe('IDE store file mutations', () => { ...@@ -328,12 +311,7 @@ describe('IDE store file mutations', () => {
mutations.DISCARD_FILE_CHANGES(localState, localFile.path); mutations.DISCARD_FILE_CHANGES(localState, localFile.path);
expect(localState.entries.parentPath.tree).toEqual([ expect(localState.entries.parentPath.tree).toEqual([{ ...localFile, deleted: false }]);
{
...localFile,
deleted: false,
},
]);
}); });
}); });
...@@ -379,11 +357,7 @@ describe('IDE store file mutations', () => { ...@@ -379,11 +357,7 @@ describe('IDE store file mutations', () => {
let f; let f;
beforeEach(() => { beforeEach(() => {
f = { f = { ...file(), type: 'blob', staged: true };
...file(),
type: 'blob',
staged: true,
};
localState.stagedFiles.push(f); localState.stagedFiles.push(f);
localState.changedFiles.push(f); localState.changedFiles.push(f);
...@@ -422,19 +396,16 @@ describe('IDE store file mutations', () => { ...@@ -422,19 +396,16 @@ describe('IDE store file mutations', () => {
describe('ADD_PENDING_TAB', () => { describe('ADD_PENDING_TAB', () => {
beforeEach(() => { beforeEach(() => {
const f = { const f = { ...file('openFile'), path: 'openFile', active: true, opened: true };
...file('openFile'),
path: 'openFile',
active: true,
opened: true,
};
localState.entries[f.path] = f; localState.entries[f.path] = f;
localState.openFiles.push(f); localState.openFiles.push(f);
}); });
it('adds file into openFiles as pending', () => { it('adds file into openFiles as pending', () => {
mutations.ADD_PENDING_TAB(localState, { file: localFile }); mutations.ADD_PENDING_TAB(localState, {
file: localFile,
});
expect(localState.openFiles.length).toBe(1); expect(localState.openFiles.length).toBe(1);
expect(localState.openFiles[0].pending).toBe(true); expect(localState.openFiles[0].pending).toBe(true);
...@@ -445,11 +416,15 @@ describe('IDE store file mutations', () => { ...@@ -445,11 +416,15 @@ describe('IDE store file mutations', () => {
const newFile = file('test'); const newFile = file('test');
localState.entries[newFile.path] = newFile; localState.entries[newFile.path] = newFile;
mutations.ADD_PENDING_TAB(localState, { file: localFile }); mutations.ADD_PENDING_TAB(localState, {
file: localFile,
});
expect(localState.openFiles.length).toBe(1); expect(localState.openFiles.length).toBe(1);
mutations.ADD_PENDING_TAB(localState, { file: file('test') }); mutations.ADD_PENDING_TAB(localState, {
file: file('test'),
});
expect(localState.openFiles.length).toBe(1); expect(localState.openFiles.length).toBe(1);
expect(localState.openFiles[0].name).toBe('test'); expect(localState.openFiles[0].name).toBe('test');
......
...@@ -51,7 +51,9 @@ describe('Multi-file store tree mutations', () => { ...@@ -51,7 +51,9 @@ describe('Multi-file store tree mutations', () => {
}); });
it('keeps loading state', () => { it('keeps loading state', () => {
mutations.CREATE_TREE(localState, { treePath: 'project/master' }); mutations.CREATE_TREE(localState, {
treePath: 'project/master',
});
mutations.SET_DIRECTORY_DATA(localState, { mutations.SET_DIRECTORY_DATA(localState, {
data, data,
treePath: 'project/master', treePath: 'project/master',
......
...@@ -26,15 +26,18 @@ describe('WebIDE utils', () => { ...@@ -26,15 +26,18 @@ describe('WebIDE utils', () => {
entry.deleted = true; entry.deleted = true;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.deleted); expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.deleted);
}); });
it('renders "addition" icon for temp entries', () => { it('renders "addition" icon for temp entries', () => {
entry.tempFile = true; entry.tempFile = true;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.addition); expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.addition);
}); });
it('renders "modified" icon for newly-renamed entries', () => { it('renders "modified" icon for newly-renamed entries', () => {
entry.prevPath = 'foo/bar'; entry.prevPath = 'foo/bar';
entry.tempFile = false; entry.tempFile = false;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.modified); expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.modified);
}); });
it('renders "modified" icon even for temp entries if they are newly-renamed', () => { it('renders "modified" icon even for temp entries if they are newly-renamed', () => {
entry.prevPath = 'foo/bar'; entry.prevPath = 'foo/bar';
entry.tempFile = true; entry.tempFile = true;
......
import * as pathUtils from 'path'; export * from '../../frontend/ide/helpers';
import { decorateData } from '~/ide/stores/utils';
import state from '~/ide/stores/state';
import commitState from '~/ide/stores/modules/commit/state';
import mergeRequestsState from '~/ide/stores/modules/merge_requests/state';
import pipelinesState from '~/ide/stores/modules/pipelines/state';
import branchesState from '~/ide/stores/modules/branches/state';
import fileTemplatesState from '~/ide/stores/modules/file_templates/state';
import paneState from '~/ide/stores/modules/pane/state';
export const resetStore = store => {
const newState = {
...state(),
commit: commitState(),
mergeRequests: mergeRequestsState(),
pipelines: pipelinesState(),
branches: branchesState(),
fileTemplates: fileTemplatesState(),
rightPane: paneState(),
};
store.replaceState(newState);
};
export const file = (name = 'name', id = name, type = '', parent = null) =>
decorateData({
id,
type,
icon: 'icon',
url: 'url',
name,
path: parent ? `${parent.path}/${name}` : name,
parentPath: parent ? parent.path : '',
lastCommit: {},
});
export const createEntriesFromPaths = paths =>
paths
.map(path => ({
name: pathUtils.basename(path),
dir: pathUtils.dirname(path),
ext: pathUtils.extname(path),
}))
.reduce((entries, path, idx) => {
const { name } = path;
const parent = path.dir ? entries[path.dir] : null;
const type = path.ext ? 'blob' : 'tree';
const entry = file(name, (idx + 1).toString(), type, parent);
return {
[entry.path]: entry,
...entries,
};
}, {});
import testAction from 'spec/helpers/vuex_action_helper';
import * as actions from '~/ide/stores/modules/pane/actions';
import * as types from '~/ide/stores/modules/pane/mutation_types';
describe('IDE pane module actions', () => {
const TEST_VIEW = { name: 'test' };
const TEST_VIEW_KEEP_ALIVE = { name: 'test-keep-alive', keepAlive: true };
describe('toggleOpen', () => {
it('dispatches open if closed', done => {
testAction(
actions.toggleOpen,
TEST_VIEW,
{ isOpen: false },
[],
[{ type: 'open', payload: TEST_VIEW }],
done,
);
});
it('dispatches close if opened', done => {
testAction(actions.toggleOpen, TEST_VIEW, { isOpen: true }, [], [{ type: 'close' }], done);
});
});
describe('open', () => {
it('commits SET_OPEN', done => {
testAction(actions.open, null, {}, [{ type: types.SET_OPEN, payload: true }], [], done);
});
it('commits SET_CURRENT_VIEW if view is given', done => {
testAction(
actions.open,
TEST_VIEW,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW.name },
],
[],
done,
);
});
it('commits KEEP_ALIVE_VIEW if keepAlive is true', done => {
testAction(
actions.open,
TEST_VIEW_KEEP_ALIVE,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
{ type: types.KEEP_ALIVE_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
],
[],
done,
);
});
});
describe('close', () => {
it('commits SET_OPEN', done => {
testAction(actions.close, null, {}, [{ type: types.SET_OPEN, payload: false }], [], done);
});
});
});
...@@ -136,7 +136,9 @@ describe Gitlab::Auth::LDAP::Access do ...@@ -136,7 +136,9 @@ describe Gitlab::Auth::LDAP::Access do
context 'without ActiveDirectory enabled' do context 'without ActiveDirectory enabled' do
before do before do
allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true) allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive(:active_directory).and_return(false) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive(:active_directory).and_return(false)
end
end end
it 'returns true' do it 'returns true' do
......
...@@ -58,7 +58,9 @@ describe Gitlab::Auth::LDAP::AuthHash do ...@@ -58,7 +58,9 @@ describe Gitlab::Auth::LDAP::AuthHash do
end end
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive(:attributes).and_return(attributes) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive(:attributes).and_return(attributes)
end
end end
it "has the correct username" do it "has the correct username" do
......
...@@ -18,8 +18,9 @@ describe Gitlab::Auth::LDAP::Authentication do ...@@ -18,8 +18,9 @@ describe Gitlab::Auth::LDAP::Authentication do
# try only to fake the LDAP call # try only to fake the LDAP call
adapter = double('adapter', dn: dn).as_null_object adapter = double('adapter', dn: dn).as_null_object
allow_any_instance_of(described_class) allow_next_instance_of(described_class) do |instance|
.to receive(:adapter).and_return(adapter) allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_truthy expect(described_class.login(login, password)).to be_truthy
end end
...@@ -27,8 +28,9 @@ describe Gitlab::Auth::LDAP::Authentication do ...@@ -27,8 +28,9 @@ describe Gitlab::Auth::LDAP::Authentication do
it "is false if the user does not exist" do it "is false if the user does not exist" do
# try only to fake the LDAP call # try only to fake the LDAP call
adapter = double('adapter', dn: dn).as_null_object adapter = double('adapter', dn: dn).as_null_object
allow_any_instance_of(described_class) allow_next_instance_of(described_class) do |instance|
.to receive(:adapter).and_return(adapter) allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_falsey expect(described_class.login(login, password)).to be_falsey
end end
...@@ -38,8 +40,9 @@ describe Gitlab::Auth::LDAP::Authentication do ...@@ -38,8 +40,9 @@ describe Gitlab::Auth::LDAP::Authentication do
# try only to fake the LDAP call # try only to fake the LDAP call
adapter = double('adapter', bind_as: nil).as_null_object adapter = double('adapter', bind_as: nil).as_null_object
allow_any_instance_of(described_class) allow_next_instance_of(described_class) do |instance|
.to receive(:adapter).and_return(adapter) allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_falsey expect(described_class.login(login, password)).to be_falsey
end end
......
...@@ -396,7 +396,9 @@ describe Gitlab::Auth::OAuth::User do ...@@ -396,7 +396,9 @@ describe Gitlab::Auth::OAuth::User do
context "and no account for the LDAP user" do context "and no account for the LDAP user" do
context 'dont block on create (LDAP)' do context 'dont block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end end
it do it do
...@@ -408,7 +410,9 @@ describe Gitlab::Auth::OAuth::User do ...@@ -408,7 +410,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do context 'block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end end
it do it do
...@@ -424,7 +428,9 @@ describe Gitlab::Auth::OAuth::User do ...@@ -424,7 +428,9 @@ describe Gitlab::Auth::OAuth::User do
context 'dont block on create (LDAP)' do context 'dont block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end end
it do it do
...@@ -436,7 +442,9 @@ describe Gitlab::Auth::OAuth::User do ...@@ -436,7 +442,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do context 'block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end end
it do it do
...@@ -480,7 +488,9 @@ describe Gitlab::Auth::OAuth::User do ...@@ -480,7 +488,9 @@ describe Gitlab::Auth::OAuth::User do
context 'dont block on create (LDAP)' do context 'dont block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end end
it do it do
...@@ -492,7 +502,9 @@ describe Gitlab::Auth::OAuth::User do ...@@ -492,7 +502,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do context 'block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end end
it do it do
......
...@@ -75,7 +75,9 @@ describe Gitlab::BareRepositoryImport::Importer, :seed_helper do ...@@ -75,7 +75,9 @@ describe Gitlab::BareRepositoryImport::Importer, :seed_helper do
end end
it 'does not schedule an import' do it 'does not schedule an import' do
expect_any_instance_of(Project).not_to receive(:import_schedule) expect_next_instance_of(Project) do |instance|
expect(instance).not_to receive(:import_schedule)
end
importer.create_project_if_needed importer.create_project_if_needed
end end
......
...@@ -9,7 +9,9 @@ describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do ...@@ -9,7 +9,9 @@ describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do
describe '.load_for_project' do describe '.load_for_project' do
it "loads the status" do it "loads the status" do
expect_any_instance_of(described_class).to receive(:load_status) expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:load_status)
end
described_class.load_for_project(project) described_class.load_for_project(project)
end end
......
...@@ -32,7 +32,9 @@ describe Gitlab::Checks::BranchCheck do ...@@ -32,7 +32,9 @@ describe Gitlab::Checks::BranchCheck do
end end
it 'raises an error if the user is not allowed to merge to protected branches' do it 'raises an error if the user is not allowed to merge to protected branches' do
expect_any_instance_of(Gitlab::Checks::MatchingMergeRequest).to receive(:match?).and_return(true) expect_next_instance_of(Gitlab::Checks::MatchingMergeRequest) do |instance|
expect(instance).to receive(:match?).and_return(true)
end
expect(user_access).to receive(:can_merge_to_branch?).and_return(false) expect(user_access).to receive(:can_merge_to_branch?).and_return(false)
expect(user_access).to receive(:can_push_to_branch?).and_return(false) expect(user_access).to receive(:can_push_to_branch?).and_return(false)
......
...@@ -14,31 +14,41 @@ describe Gitlab::Checks::ChangeAccess do ...@@ -14,31 +14,41 @@ describe Gitlab::Checks::ChangeAccess do
end end
it 'calls pushes checks' do it 'calls pushes checks' do
expect_any_instance_of(Gitlab::Checks::PushCheck).to receive(:validate!) expect_next_instance_of(Gitlab::Checks::PushCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec subject.exec
end end
it 'calls branches checks' do it 'calls branches checks' do
expect_any_instance_of(Gitlab::Checks::BranchCheck).to receive(:validate!) expect_next_instance_of(Gitlab::Checks::BranchCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec subject.exec
end end
it 'calls tags checks' do it 'calls tags checks' do
expect_any_instance_of(Gitlab::Checks::TagCheck).to receive(:validate!) expect_next_instance_of(Gitlab::Checks::TagCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec subject.exec
end end
it 'calls lfs checks' do it 'calls lfs checks' do
expect_any_instance_of(Gitlab::Checks::LfsCheck).to receive(:validate!) expect_next_instance_of(Gitlab::Checks::LfsCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec subject.exec
end end
it 'calls diff checks' do it 'calls diff checks' do
expect_any_instance_of(Gitlab::Checks::DiffCheck).to receive(:validate!) expect_next_instance_of(Gitlab::Checks::DiffCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec subject.exec
end end
......
...@@ -12,12 +12,16 @@ describe Gitlab::Ci::Build::Credentials::Factory do ...@@ -12,12 +12,16 @@ describe Gitlab::Ci::Build::Credentials::Factory do
end end
before do before do
allow_any_instance_of(described_class).to receive(:providers).and_return([TestProvider]) allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:providers).and_return([TestProvider])
end
end end
context 'when provider is valid' do context 'when provider is valid' do
before do before do
allow_any_instance_of(TestProvider).to receive(:valid?).and_return(true) allow_next_instance_of(TestProvider) do |instance|
allow(instance).to receive(:valid?).and_return(true)
end
end end
it 'generates an array of credentials objects' do it 'generates an array of credentials objects' do
...@@ -29,7 +33,9 @@ describe Gitlab::Ci::Build::Credentials::Factory do ...@@ -29,7 +33,9 @@ describe Gitlab::Ci::Build::Credentials::Factory do
context 'when provider is not valid' do context 'when provider is not valid' do
before do before do
allow_any_instance_of(TestProvider).to receive(:valid?).and_return(false) allow_next_instance_of(TestProvider) do |instance|
allow(instance).to receive(:valid?).and_return(false)
end
end end
it 'generates an array without specific credential object' do it 'generates an array without specific credential object' do
......
...@@ -15,8 +15,9 @@ describe Gitlab::Ci::Config::External::File::Project do ...@@ -15,8 +15,9 @@ describe Gitlab::Ci::Config::External::File::Project do
before do before do
project.add_developer(user) project.add_developer(user)
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!)
end
end end
describe '#matching?' do describe '#matching?' do
...@@ -159,8 +160,8 @@ describe Gitlab::Ci::Config::External::File::Project do ...@@ -159,8 +160,8 @@ describe Gitlab::Ci::Config::External::File::Project do
private private
def stub_project_blob(ref, path) def stub_project_blob(ref, path)
allow_any_instance_of(Repository) allow_next_instance_of(Repository) do |instance|
.to receive(:blob_data_at) allow(instance).to receive(:blob_data_at).with(ref, path) { yield }
.with(ref, path) { yield } end
end end
end end
...@@ -21,8 +21,9 @@ describe Gitlab::Ci::Config::External::File::Remote do ...@@ -21,8 +21,9 @@ describe Gitlab::Ci::Config::External::File::Remote do
end end
before do before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!)
end
end end
describe '#matching?' do describe '#matching?' do
......
...@@ -14,8 +14,9 @@ describe Gitlab::Ci::Config::External::File::Template do ...@@ -14,8 +14,9 @@ describe Gitlab::Ci::Config::External::File::Template do
let(:template_file) { described_class.new(params, context) } let(:template_file) { described_class.new(params, context) }
before do before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!)
end
end end
describe '#matching?' do describe '#matching?' do
......
...@@ -23,8 +23,9 @@ describe Gitlab::Ci::Config::External::Mapper do ...@@ -23,8 +23,9 @@ describe Gitlab::Ci::Config::External::Mapper do
before do before do
stub_full_request(remote_url).to_return(body: file_content) stub_full_request(remote_url).to_return(body: file_content)
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!)
end
end end
describe '#process' do describe '#process' do
......
...@@ -8,8 +8,9 @@ describe Gitlab::Ci::Config do ...@@ -8,8 +8,9 @@ describe Gitlab::Ci::Config do
set(:user) { create(:user) } set(:user) { create(:user) }
before do before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!)
end
end end
let(:config) do let(:config) do
...@@ -358,18 +359,11 @@ describe Gitlab::Ci::Config do ...@@ -358,18 +359,11 @@ describe Gitlab::Ci::Config do
context "when it takes too long to evaluate includes" do context "when it takes too long to evaluate includes" do
before do before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!).and_call_original
.and_call_original allow(instance).to receive(:set_deadline).with(described_class::TIMEOUT_SECONDS).and_call_original
allow(instance).to receive(:execution_expired?).and_return(true)
allow_any_instance_of(Gitlab::Ci::Config::External::Context) end
.to receive(:set_deadline)
.with(described_class::TIMEOUT_SECONDS)
.and_call_original
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:execution_expired?)
.and_return(true)
end end
it 'raises error TimeoutError' do it 'raises error TimeoutError' do
...@@ -384,9 +378,9 @@ describe Gitlab::Ci::Config do ...@@ -384,9 +378,9 @@ describe Gitlab::Ci::Config do
context 'when context expansion timeout is disabled' do context 'when context expansion timeout is disabled' do
before do before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!).and_call_original
.and_call_original end
allow(Feature) allow(Feature)
.to receive(:enabled?) .to receive(:enabled?)
......
...@@ -81,7 +81,9 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do ...@@ -81,7 +81,9 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do
context 'when a ref is protected' do context 'when a ref is protected' do
before do before do
allow_any_instance_of(Project).to receive(:protected_for?).and_return(true) allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:protected_for?).and_return(true)
end
end end
it 'returns protected builds' do it 'returns protected builds' do
...@@ -91,7 +93,9 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do ...@@ -91,7 +93,9 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do
context 'when a ref is not protected' do context 'when a ref is not protected' do
before do before do
allow_any_instance_of(Project).to receive(:protected_for?).and_return(false) allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:protected_for?).and_return(false)
end
end end
it 'returns unprotected builds' do it 'returns unprotected builds' do
......
...@@ -112,8 +112,9 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -112,8 +112,9 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end end
it 'calls get_chunk only once' do it 'calls get_chunk only once' do
expect_any_instance_of(Gitlab::Ci::Trace::ChunkedIO) expect_next_instance_of(Gitlab::Ci::Trace::ChunkedIO) do |instance|
.to receive(:current_chunk).once.and_call_original expect(instance).to receive(:current_chunk).once.and_call_original
end
chunked_io.each_line { |line| } chunked_io.each_line { |line| }
end end
......
...@@ -9,7 +9,9 @@ shared_examples 'base stage' do ...@@ -9,7 +9,9 @@ shared_examples 'base stage' do
before do before do
allow(stage).to receive(:project_median).and_return(1.12) allow(stage).to receive(:project_median).and_return(1.12)
allow_any_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher).to receive(:event_result).and_return({}) allow_next_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher) do |instance|
allow(instance).to receive(:event_result).and_return({})
end
end end
it 'has the median data value' do it 'has the median data value' do
......
...@@ -17,7 +17,9 @@ describe Gitlab::CycleAnalytics::UsageData do ...@@ -17,7 +17,9 @@ describe Gitlab::CycleAnalytics::UsageData do
projects.each_with_index do |project, time| projects.each_with_index do |project, time|
issue = create(:issue, project: project, created_at: (time + 1).hour.ago) issue = create(:issue, project: project, created_at: (time + 1).hour.ago)
allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue]) allow_next_instance_of(Gitlab::ReferenceExtractor) do |instance|
allow(instance).to receive(:issues).and_return([issue])
end
milestone = create(:milestone, project: project) milestone = create(:milestone, project: project)
mr = create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") mr = create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}")
......
...@@ -10,17 +10,25 @@ describe Gitlab::Diff::FileCollection::MergeRequestDiff do ...@@ -10,17 +10,25 @@ describe Gitlab::Diff::FileCollection::MergeRequestDiff do
describe '#diff_files' do describe '#diff_files' do
it 'does not highlight binary files' do it 'does not highlight binary files' do
allow_any_instance_of(Gitlab::Diff::File).to receive(:text?).and_return(false) allow_next_instance_of(Gitlab::Diff::File) do |instance|
allow(instance).to receive(:text?).and_return(false)
end
expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines) expect_next_instance_of(Gitlab::Diff::File) do |instance|
expect(instance).not_to receive(:highlighted_diff_lines)
end
diff_files diff_files
end end
it 'does not highlight files marked as undiffable in .gitattributes' do it 'does not highlight files marked as undiffable in .gitattributes' do
allow_any_instance_of(Gitlab::Diff::File).to receive(:diffable?).and_return(false) allow_next_instance_of(Gitlab::Diff::File) do |instance|
allow(instance).to receive(:diffable?).and_return(false)
end
expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines) expect_next_instance_of(Gitlab::Diff::File) do |instance|
expect(instance).not_to receive(:highlighted_diff_lines)
end
diff_files diff_files
end end
......
...@@ -95,7 +95,9 @@ describe Gitlab::Email::Handler::CreateMergeRequestHandler do ...@@ -95,7 +95,9 @@ describe Gitlab::Email::Handler::CreateMergeRequestHandler do
context "something is wrong" do context "something is wrong" do
context "when the merge request could not be saved" do context "when the merge request could not be saved" do
before do before do
allow_any_instance_of(MergeRequest).to receive(:save).and_return(false) allow_next_instance_of(MergeRequest) do |instance|
allow(instance).to receive(:save).and_return(false)
end
end end
it "raises an InvalidMergeRequestError" do it "raises an InvalidMergeRequestError" do
......
...@@ -38,8 +38,9 @@ describe Gitlab::EtagCaching::Middleware do ...@@ -38,8 +38,9 @@ describe Gitlab::EtagCaching::Middleware do
end end
it 'generates ETag' do it 'generates ETag' do
expect_any_instance_of(Gitlab::EtagCaching::Store) expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
.to receive(:touch).and_return('123') expect(instance).to receive(:touch).and_return('123')
end
middleware.call(build_request(path, if_none_match)) middleware.call(build_request(path, if_none_match))
end end
...@@ -177,9 +178,9 @@ describe Gitlab::EtagCaching::Middleware do ...@@ -177,9 +178,9 @@ describe Gitlab::EtagCaching::Middleware do
'SCRIPT_NAME' => '/relative-gitlab' 'SCRIPT_NAME' => '/relative-gitlab'
} }
expect_any_instance_of(Gitlab::EtagCaching::Store) expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
.to receive(:get).with("/relative-gitlab#{enabled_path}") expect(instance).to receive(:get).with("/relative-gitlab#{enabled_path}").and_return(nil)
.and_return(nil) end
middleware.call(env) middleware.call(env)
end end
...@@ -190,8 +191,9 @@ describe Gitlab::EtagCaching::Middleware do ...@@ -190,8 +191,9 @@ describe Gitlab::EtagCaching::Middleware do
end end
def mock_value_in_store(value) def mock_value_in_store(value)
allow_any_instance_of(Gitlab::EtagCaching::Store) allow_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
.to receive(:get).and_return(value) allow(instance).to receive(:get).and_return(value)
end
end end
def build_request(path, if_none_match) def build_request(path, if_none_match)
......
...@@ -158,7 +158,9 @@ describe Gitlab::Experimentation do ...@@ -158,7 +158,9 @@ describe Gitlab::Experimentation do
context 'the user is part of the control group' do context 'the user is part of the control group' do
before do before do
allow_any_instance_of(described_class).to receive(:experiment_enabled?).with(:test_experiment).and_return(false) allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
end
end end
it 'pushes the right parameters to gon' do it 'pushes the right parameters to gon' do
......
...@@ -20,6 +20,8 @@ describe Gitlab::FogbugzImport::Client do ...@@ -20,6 +20,8 @@ describe Gitlab::FogbugzImport::Client do
end end
def stub_api(users) def stub_api(users)
allow_any_instance_of(::Fogbugz::Interface).to receive(:command).with(:listPeople).and_return(users) allow_next_instance_of(::Fogbugz::Interface) do |instance|
allow(instance).to receive(:command).with(:listPeople).and_return(users)
end
end end
end end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::FogbugzImport::Importer do
let(:project) { create(:project_empty_repo) }
let(:importer) { described_class.new(project) }
let(:repo) do
instance_double(Gitlab::FogbugzImport::Repository,
safe_name: 'vim',
path: 'vim',
raw_data: '')
end
let(:import_data) { { 'repo' => repo } }
let(:credentials) do
{
'fb_session' => {
'uri' => 'https://testing.fogbugz.com',
'token' => 'token'
}
}
end
let(:closed_bug) do
{
fOpen: 'false',
sTitle: 'Closed bug',
sLatestTextSummary: "",
dtOpened: Time.now.to_s,
dtLastUpdated: Time.now.to_s,
events: { event: [] }
}.with_indifferent_access
end
let(:opened_bug) do
{
fOpen: 'true',
sTitle: 'Opened bug',
sLatestTextSummary: "",
dtOpened: Time.now.to_s,
dtLastUpdated: Time.now.to_s,
events: { event: [] }
}.with_indifferent_access
end
let(:fogbugz_bugs) { [opened_bug, closed_bug] }
before do
project.create_import_data(data: import_data, credentials: credentials)
allow_any_instance_of(::Fogbugz::Interface).to receive(:command).with(:listCategories).and_return([])
allow_any_instance_of(Gitlab::FogbugzImport::Client).to receive(:cases).and_return(fogbugz_bugs)
end
it 'imports bugs' do
expect { importer.execute }.to change { Issue.count }.by(2)
end
it 'imports opened bugs' do
importer.execute
issue = Issue.where(project_id: project.id).find_by_title(opened_bug[:sTitle])
expect(issue.state_id).to eq(Issue.available_states[:opened])
end
it 'imports closed bugs' do
importer.execute
issue = Issue.where(project_id: project.id).find_by_title(closed_bug[:sTitle])
expect(issue.state_id).to eq(Issue.available_states[:closed])
end
end
...@@ -134,7 +134,9 @@ describe Gitlab::Git::Blob, :seed_helper do ...@@ -134,7 +134,9 @@ describe Gitlab::Git::Blob, :seed_helper do
describe '.find with Rugged enabled', :enable_rugged do describe '.find with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.find(repository, SeedRepo::Commit::ID, 'files/images/6049019_460s.jpg') described_class.find(repository, SeedRepo::Commit::ID, 'files/images/6049019_460s.jpg')
end end
......
...@@ -176,7 +176,9 @@ describe Gitlab::Git::Commit, :seed_helper do ...@@ -176,7 +176,9 @@ describe Gitlab::Git::Commit, :seed_helper do
describe '.find with Rugged enabled', :enable_rugged do describe '.find with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.find(repository, SeedRepo::Commit::ID) described_class.find(repository, SeedRepo::Commit::ID)
end end
...@@ -438,7 +440,9 @@ describe Gitlab::Git::Commit, :seed_helper do ...@@ -438,7 +440,9 @@ describe Gitlab::Git::Commit, :seed_helper do
it_should_behave_like '.batch_by_oid' it_should_behave_like '.batch_by_oid'
it 'calls out to the Rugged implementation' do it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.batch_by_oid(repository, [SeedRepo::Commit::ID]) described_class.batch_by_oid(repository, [SeedRepo::Commit::ID])
end end
......
...@@ -145,7 +145,9 @@ describe Gitlab::Git::Tree, :seed_helper do ...@@ -145,7 +145,9 @@ describe Gitlab::Git::Tree, :seed_helper do
describe '.where with Rugged enabled', :enable_rugged do describe '.where with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:lookup).with(SeedRepo::Commit::ID) allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:lookup).with(SeedRepo::Commit::ID)
end
described_class.where(repository, SeedRepo::Commit::ID, 'files', false) described_class.where(repository, SeedRepo::Commit::ID, 'files', false)
end end
......
...@@ -730,7 +730,9 @@ describe Gitlab::GitAccess do ...@@ -730,7 +730,9 @@ describe Gitlab::GitAccess do
it 'checks LFS integrity only for first change' do it 'checks LFS integrity only for first change' do
allow(project).to receive(:lfs_enabled?).and_return(true) allow(project).to receive(:lfs_enabled?).and_return(true)
expect_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).exactly(1).times expect_next_instance_of(Gitlab::Checks::LfsIntegrity) do |instance|
expect(instance).to receive(:objects_missing?).exactly(1).times
end
push_access_check push_access_check
end end
......
...@@ -10,10 +10,11 @@ describe Gitlab::GitalyClient::CleanupService do ...@@ -10,10 +10,11 @@ describe Gitlab::GitalyClient::CleanupService do
describe '#apply_bfg_object_map_stream' do describe '#apply_bfg_object_map_stream' do
it 'sends an apply_bfg_object_map_stream message' do it 'sends an apply_bfg_object_map_stream message' do
expect_any_instance_of(Gitaly::CleanupService::Stub) expect_next_instance_of(Gitaly::CleanupService::Stub) do |instance|
.to receive(:apply_bfg_object_map_stream) expect(instance).to receive(:apply_bfg_object_map_stream)
.with(kind_of(Enumerator), kind_of(Hash)) .with(kind_of(Enumerator), kind_of(Hash))
.and_return([]) .and_return([])
end
client.apply_bfg_object_map_stream(StringIO.new) client.apply_bfg_object_map_stream(StringIO.new)
end end
......
...@@ -55,7 +55,9 @@ describe Gitlab::GitalyClient do ...@@ -55,7 +55,9 @@ describe Gitlab::GitalyClient do
it 'returns an empty string when the storage is not found in the response' do it 'returns an empty string when the storage is not found in the response' do
response = double("response") response = double("response")
allow(response).to receive(:storage_statuses).and_return([]) allow(response).to receive(:storage_statuses).and_return([])
allow_any_instance_of(Gitlab::GitalyClient::ServerService).to receive(:info).and_return(response) allow_next_instance_of(Gitlab::GitalyClient::ServerService) do |instance|
allow(instance).to receive(:info).and_return(response)
end
expect(described_class.filesystem_id('default')).to eq(nil) expect(described_class.filesystem_id('default')).to eq(nil)
end end
......
...@@ -144,9 +144,9 @@ describe Gitlab::GithubImport::Importer::DiffNoteImporter do ...@@ -144,9 +144,9 @@ describe Gitlab::GithubImport::Importer::DiffNoteImporter do
describe '#find_merge_request_id' do describe '#find_merge_request_id' do
it 'returns a merge request ID' do it 'returns a merge request ID' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder) expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
.to receive(:database_id) expect(instance).to receive(:database_id).and_return(10)
.and_return(10) end
expect(importer.find_merge_request_id).to eq(10) expect(importer.find_merge_request_id).to eq(10)
end end
......
...@@ -74,9 +74,9 @@ describe Gitlab::GithubImport::Importer::LabelLinksImporter do ...@@ -74,9 +74,9 @@ describe Gitlab::GithubImport::Importer::LabelLinksImporter do
describe '#find_target_id' do describe '#find_target_id' do
it 'returns the ID of the issuable to create the label link for' do it 'returns the ID of the issuable to create the label link for' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder) expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
.to receive(:database_id) expect(instance).to receive(:database_id).and_return(10)
.and_return(10) end
expect(importer.find_target_id).to eq(10) expect(importer.find_target_id).to eq(10)
end end
......
...@@ -50,8 +50,9 @@ describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_redis_cac ...@@ -50,8 +50,9 @@ describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_redis_cac
describe '#build_labels_cache' do describe '#build_labels_cache' do
it 'builds the labels cache' do it 'builds the labels cache' do
expect_any_instance_of(Gitlab::GithubImport::LabelFinder) expect_next_instance_of(Gitlab::GithubImport::LabelFinder) do |instance|
.to receive(:build_cache) expect(instance).to receive(:build_cache)
end
importer.build_labels_cache importer.build_labels_cache
end end
......
...@@ -80,8 +80,9 @@ describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis ...@@ -80,8 +80,9 @@ describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis
describe '#build_milestones_cache' do describe '#build_milestones_cache' do
it 'builds the milestones cache' do it 'builds the milestones cache' do
expect_any_instance_of(Gitlab::GithubImport::MilestoneFinder) expect_next_instance_of(Gitlab::GithubImport::MilestoneFinder) do |instance|
.to receive(:build_cache) expect(instance).to receive(:build_cache)
end
importer.build_milestones_cache importer.build_milestones_cache
end end
......
...@@ -143,9 +143,9 @@ describe Gitlab::GithubImport::Importer::NoteImporter do ...@@ -143,9 +143,9 @@ describe Gitlab::GithubImport::Importer::NoteImporter do
describe '#find_noteable_id' do describe '#find_noteable_id' do
it 'returns the ID of the noteable' do it 'returns the ID of the noteable' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder) expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
.to receive(:database_id) expect(instance).to receive(:database_id).and_return(10)
.and_return(10) end
expect(importer.find_noteable_id).to eq(10) expect(importer.find_noteable_id).to eq(10)
end end
......
...@@ -9,8 +9,9 @@ describe Gitlab::GithubImport::SequentialImporter do ...@@ -9,8 +9,9 @@ describe Gitlab::GithubImport::SequentialImporter do
project = double(:project, id: 1, repository: repository) project = double(:project, id: 1, repository: repository)
importer = described_class.new(project, token: 'foo') importer = described_class.new(project, token: 'foo')
expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
.to receive(:execute) expect(instance).to receive(:execute)
end
described_class::SEQUENTIAL_IMPORTERS.each do |klass| described_class::SEQUENTIAL_IMPORTERS.each do |klass|
instance = double(:instance) instance = double(:instance)
......
...@@ -21,18 +21,24 @@ describe Gitlab::GitlabImport::Client do ...@@ -21,18 +21,24 @@ describe Gitlab::GitlabImport::Client do
it 'uses membership and simple flags' do it 'uses membership and simple flags' do
stub_request('/api/v4/projects?membership=true&page=1&per_page=100&simple=true') stub_request('/api/v4/projects?membership=true&page=1&per_page=100&simple=true')
expect_any_instance_of(OAuth2::Response).to receive(:parsed).and_return([]) expect_next_instance_of(OAuth2::Response) do |instance|
expect(instance).to receive(:parsed).and_return([])
end
expect(client.projects.to_a).to eq [] expect(client.projects.to_a).to eq []
end end
shared_examples 'pagination params' do shared_examples 'pagination params' do
before do before do
allow_any_instance_of(OAuth2::Response).to receive(:parsed).and_return([]) allow_next_instance_of(OAuth2::Response) do |instance|
allow(instance).to receive(:parsed).and_return([])
end
end end
it 'allows page_limit param' do it 'allows page_limit param' do
allow_any_instance_of(OAuth2::Response).to receive(:parsed).and_return(element_list) allow_next_instance_of(OAuth2::Response) do |instance|
allow(instance).to receive(:parsed).and_return(element_list)
end
expect(client).to receive(:lazy_page_iterator).with(hash_including(page_limit: 2)).and_call_original expect(client).to receive(:lazy_page_iterator).with(hash_including(page_limit: 2)).and_call_original
......
...@@ -109,7 +109,9 @@ describe Gitlab::HttpIO do ...@@ -109,7 +109,9 @@ describe Gitlab::HttpIO do
end end
it 'calls get_chunk only once' do it 'calls get_chunk only once' do
expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original expect_next_instance_of(Net::HTTP) do |instance|
expect(instance).to receive(:request).once.and_call_original
end
http_io.each_line { |line| } http_io.each_line { |line| }
end end
......
...@@ -43,7 +43,9 @@ describe Gitlab::RequestContext do ...@@ -43,7 +43,9 @@ describe Gitlab::RequestContext do
let(:ip) { '192.168.1.11' } let(:ip) { '192.168.1.11' }
before do before do
allow_any_instance_of(Rack::Request).to receive(:ip).and_return(ip) allow_next_instance_of(Rack::Request) do |instance|
allow(instance).to receive(:ip).and_return(ip)
end
described_class.new(app).call(env) described_class.new(app).call(env)
end end
......
...@@ -80,6 +80,17 @@ describe Commit do ...@@ -80,6 +80,17 @@ describe Commit do
expect(commit.author).to eq(user) expect(commit.author).to eq(user)
end end
context 'with a user with an unconfirmed e-mail' do
before do
user = create(:user)
create(:email, user: user, email: commit.author_email)
end
it 'returns no user' do
expect(commit.author).to be_nil
end
end
context 'using eager loading' do context 'using eager loading' do
let!(:alice) { create(:user, email: 'alice@example.com') } let!(:alice) { create(:user, email: 'alice@example.com') }
let!(:bob) { create(:user, email: 'hunter2@example.com') } let!(:bob) { create(:user, email: 'hunter2@example.com') }
...@@ -115,7 +126,7 @@ describe Commit do ...@@ -115,7 +126,7 @@ describe Commit do
let!(:commits) { [alice_commit, bob_commit, eve_commit, jeff_commit] } let!(:commits) { [alice_commit, bob_commit, eve_commit, jeff_commit] }
before do before do
create(:email, user: bob, email: 'bob@example.com') create(:email, :confirmed, user: bob, email: 'bob@example.com')
end end
it 'executes only two SQL queries' do it 'executes only two SQL queries' do
...@@ -179,6 +190,32 @@ describe Commit do ...@@ -179,6 +190,32 @@ describe Commit do
end end
end end
describe '#committer' do
context 'with a confirmed e-mail' do
it 'returns the user' do
user = create(:user, email: commit.committer_email)
expect(commit.committer).to eq(user)
end
end
context 'with an unconfirmed e-mail' do
let(:user) { create(:user) }
before do
create(:email, user: user, email: commit.committer_email)
end
it 'returns no user' do
expect(commit.committer).to be_nil
end
it 'returns the user' do
expect(commit.committer(confirmed: false)).to eq(user)
end
end
end
describe '#to_reference' do describe '#to_reference' do
let(:project) { create(:project, :repository, path: 'sample-project') } let(:project) { create(:project, :repository, path: 'sample-project') }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment