Commit 4e375367 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 99ddca0d
<script>
import { mapActions, mapState } from 'vuex';
import _ from 'underscore';
import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
import Item from './item.vue';
export default {
......
<script>
import $ from 'jquery';
import { mapActions, mapState } from 'vuex';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
export default {
components: {
......
<script>
import { mapActions, mapGetters, mapState } from 'vuex';
import Icon from '~/vue_shared/components/icon.vue';
import { GlSkeletonLoading } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
import FileRow from '~/vue_shared/components/file_row.vue';
import NavDropdown from './nav_dropdown.vue';
import FileRowExtra from './file_row_extra.vue';
......
<script>
import { mapActions, mapState } from 'vuex';
import _ from 'underscore';
import { GlLoadingIcon } from '@gitlab/ui';
import { __ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import Item from './item.vue';
import TokenedInput from '../shared/tokened_input.vue';
......
<script>
import $ from 'jquery';
import { mapActions, mapState, mapGetters } from 'vuex';
import flash from '~/flash';
import { __, sprintf, s__ } from '~/locale';
import { mapActions, mapState, mapGetters } from 'vuex';
import DeprecatedModal2 from '~/vue_shared/components/deprecated_modal_2.vue';
import { modalTypes } from '../../constants';
......
<script>
import { listen } from 'codesandbox-api';
import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
export default {
components: {
......
<script>
import { __, sprintf } from '~/locale';
import { mapActions } from 'vuex';
import { __, sprintf } from '~/locale';
import FileIcon from '~/vue_shared/components/file_icon.vue';
import Icon from '~/vue_shared/components/icon.vue';
......
import $ from 'jquery';
import Vue from 'vue';
import _ from 'underscore';
import { __, sprintf } from '~/locale';
import { visitUrl } from '~/lib/utils/url_utility';
import flash from '~/flash';
import _ from 'underscore';
import * as types from './mutation_types';
import { decorateFiles } from '../lib/files';
import { stageKeys } from '../constants';
......
......@@ -246,7 +246,7 @@ class Commit
def lazy_author
BatchLoader.for(author_email.downcase).batch do |emails, loader|
users = User.by_any_email(emails).includes(:emails)
users = User.by_any_email(emails, confirmed: true).includes(:emails)
emails.each do |email|
user = users.find { |u| u.any_email?(email) }
......@@ -263,8 +263,8 @@ class Commit
end
request_cache(:author) { author_email.downcase }
def committer
@committer ||= User.find_by_any_email(committer_email)
def committer(confirmed: true)
@committer ||= User.find_by_any_email(committer_email, confirmed: confirmed)
end
def parents
......
---
title: Add nonunique indexes to Labels
merge_request: 21230
author:
type: fixed
---
title: Do not attribute unverified commit e-mails to GitLab users
merge_request: 21214
author:
type: fixed
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddLabelProjectGroupPartialIndexes < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
PROJECT_AND_TITLE = [:project_id, :title]
GROUP_AND_TITLE = [:group_id, :title]
def up
add_concurrent_index :labels, PROJECT_AND_TITLE, unique: false, where: "labels.group_id = null"
add_concurrent_index :labels, GROUP_AND_TITLE, unique: false, where: "labels.project_id = null"
end
def down
remove_concurrent_index :labels, PROJECT_AND_TITLE
remove_concurrent_index :labels, GROUP_AND_TITLE
end
end
......@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2019_12_04_070713) do
ActiveRecord::Schema.define(version: 2019_12_04_093410) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm"
......@@ -2228,6 +2228,8 @@ ActiveRecord::Schema.define(version: 2019_12_04_070713) do
t.integer "group_id"
t.integer "cached_markdown_version"
t.index ["group_id", "project_id", "title"], name: "index_labels_on_group_id_and_project_id_and_title", unique: true
t.index ["group_id", "title"], name: "index_labels_on_group_id_and_title", where: "(project_id = NULL::integer)"
t.index ["project_id", "title"], name: "index_labels_on_project_id_and_title", where: "(group_id = NULL::integer)"
t.index ["project_id"], name: "index_labels_on_project_id"
t.index ["template"], name: "index_labels_on_template", where: "template"
t.index ["title"], name: "index_labels_on_title"
......
......@@ -61,7 +61,7 @@ describe 'Member autocomplete', :js do
before do
allow(User).to receive(:find_by_any_email)
.with(noteable.author_email.downcase).and_return(author)
.with(noteable.author_email.downcase, confirmed: true).and_return(author)
visit project_commit_path(project, noteable)
end
......
......@@ -76,16 +76,23 @@ describe 'User browses commits' do
end
context 'secondary email' do
let(:user) { create(:user) }
it 'finds a commit by a secondary email' do
user =
create(:user) do |user|
create(:email, { user: user, email: 'dmitriy.zaporozhets@gmail.com' })
end
create(:email, :confirmed, user: user, email: 'dmitriy.zaporozhets@gmail.com')
visit(project_commit_path(project, sample_commit.parent_id))
check_author_link(sample_commit.author_email, user)
end
it 'links to an unverified e-mail address instead of the user' do
create(:email, user: user, email: 'dmitriy.zaporozhets@gmail.com')
visit(project_commit_path(project, sample_commit.parent_id))
check_author_email(sample_commit.author_email)
end
end
context 'when the blob does not exist' do
......@@ -263,3 +270,9 @@ def check_author_link(email, author)
expect(author_link['href']).to eq(user_path(author))
expect(find('.commit-author-name').text).to eq(author.name)
end
def check_author_email(email)
author_link = find('.commit-author-link')
expect(author_link['href']).to eq("mailto:#{email}")
end
import Vue from 'vue';
import mountCompontent from 'spec/helpers/vue_mount_component_helper';
import mountCompontent from 'helpers/vue_mount_component_helper';
import router from '~/ide/ide_router';
import Item from '~/ide/components/branches/item.vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
......@@ -30,7 +30,7 @@ describe('IDE branch item', () => {
it('renders branch name and timeago', () => {
const timeText = getTimeago().format(TEST_BRANCH.committedDate);
expect(vm.$el).toContainText(TEST_BRANCH.name);
expect(vm.$el.textContent).toContain(TEST_BRANCH.name);
expect(vm.$el.querySelector('time')).toHaveText(timeText);
expect(vm.$el.querySelector('.ic-mobile-issue-close')).toBe(null);
});
......@@ -39,7 +39,7 @@ describe('IDE branch item', () => {
const expectedHref = router.resolve(`/project/${TEST_PROJECT_ID}/edit/${TEST_BRANCH.name}`)
.href;
expect(vm.$el).toMatch('a');
expect(vm.$el.textContent).toMatch('a');
expect(vm.$el).toHaveAttr('href', expectedHref);
});
......
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import { __ } from '~/locale';
import List from '~/ide/components/branches/search_list.vue';
import Item from '~/ide/components/branches/item.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { branches } from '../../mock_data';
const localVue = createLocalVue();
......
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import List from '~/ide/components/merge_requests/list.vue';
import Item from '~/ide/components/merge_requests/item.vue';
import TokenedInput from '~/ide/components/shared/tokened_input.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { mergeRequests as mergeRequestsMock } from '../../mock_data';
const localVue = createLocalVue();
......
......@@ -3,7 +3,7 @@ import '~/behaviors/markdown/render_gfm';
import { createStore } from '~/ide/stores';
import RightPane from '~/ide/components/panes/right.vue';
import { rightSidebarViews } from '~/ide/constants';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
describe('IDE right pane', () => {
let Component;
......@@ -56,7 +56,7 @@ describe('IDE right pane', () => {
describe('click', () => {
beforeEach(() => {
spyOn(vm, 'open');
jest.spyOn(vm, 'open').mockReturnValue();
});
it('sets view to merge request', done => {
......@@ -74,7 +74,9 @@ describe('IDE right pane', () => {
describe('live preview', () => {
it('renders live preview button', done => {
Vue.set(vm.$store.state.entries, 'package.json', { name: 'package.json' });
Vue.set(vm.$store.state.entries, 'package.json', {
name: 'package.json',
});
vm.$store.state.clientsidePreviewEnabled = true;
vm.$nextTick(() => {
......
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import List from '~/ide/components/pipelines/list.vue';
import JobsList from '~/ide/components/jobs/list.vue';
import Tab from '~/vue_shared/components/tabs/tab.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import { pipelines } from '../../../../javascripts/ide/mock_data';
const localVue = createLocalVue();
......
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import mountComponent from 'helpers/vue_mount_component_helper';
import { TEST_HOST } from 'helpers/test_constants';
import ClientsideNavigator from '~/ide/components/preview/navigator.vue';
describe('IDE clientside preview navigator', () => {
......@@ -12,14 +13,9 @@ describe('IDE clientside preview navigator', () => {
});
beforeEach(() => {
manager = {
bundlerURL: gl.TEST_HOST,
iframe: { src: '' },
};
manager = { bundlerURL: TEST_HOST, iframe: { src: '' } };
vm = mountComponent(Component, {
manager,
});
vm = mountComponent(Component, { manager });
});
afterEach(() => {
......@@ -47,7 +43,7 @@ describe('IDE clientside preview navigator', () => {
it('calls back method when clicking back button', done => {
vm.navigationStack.push('/test');
vm.navigationStack.push('/test2');
spyOn(vm, 'back');
jest.spyOn(vm, 'back').mockReturnValue();
vm.$nextTick(() => {
vm.$el.querySelector('.ide-navigator-btn').click();
......@@ -60,7 +56,7 @@ describe('IDE clientside preview navigator', () => {
it('calls forward method when clicking forward button', done => {
vm.forwardNavigationStack.push('/test');
spyOn(vm, 'forward');
jest.spyOn(vm, 'forward').mockReturnValue();
vm.$nextTick(() => {
vm.$el.querySelectorAll('.ide-navigator-btn')[1].click();
......@@ -73,49 +69,35 @@ describe('IDE clientside preview navigator', () => {
describe('onUrlChange', () => {
it('updates the path', () => {
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
expect(vm.path).toBe('/url');
});
it('sets currentBrowsingIndex 0 if not already set', () => {
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
expect(vm.currentBrowsingIndex).toBe(0);
});
it('increases currentBrowsingIndex if path doesnt match', () => {
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
vm.onUrlChange({
url: `${gl.TEST_HOST}/url2`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url2` });
expect(vm.currentBrowsingIndex).toBe(1);
});
it('does not increase currentBrowsingIndex if path matches', () => {
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
expect(vm.currentBrowsingIndex).toBe(0);
});
it('pushes path into navigation stack', () => {
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
expect(vm.navigationStack).toEqual(['/url']);
});
......@@ -128,7 +110,7 @@ describe('IDE clientside preview navigator', () => {
vm.navigationStack.push('/test');
vm.navigationStack.push('/test2');
spyOn(vm, 'visitPath');
jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.back();
});
......@@ -152,7 +134,7 @@ describe('IDE clientside preview navigator', () => {
describe('forward', () => {
it('calls visitPath with first entry in forwardNavigationStack', () => {
spyOn(vm, 'visitPath');
jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.forwardNavigationStack.push('/test');
vm.forwardNavigationStack.push('/test2');
......@@ -165,7 +147,7 @@ describe('IDE clientside preview navigator', () => {
describe('refresh', () => {
it('calls refresh with current path', () => {
spyOn(vm, 'visitPath');
jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.path = '/test';
......@@ -179,7 +161,7 @@ describe('IDE clientside preview navigator', () => {
it('updates iframe src with passed in path', () => {
vm.visitPath('/testpath');
expect(manager.iframe.src).toBe(`${gl.TEST_HOST}/testpath`);
expect(manager.iframe.src).toBe(`${TEST_HOST}/testpath`);
});
});
});
import * as pathUtils from 'path';
import { decorateData } from '~/ide/stores/utils';
import state from '~/ide/stores/state';
import commitState from '~/ide/stores/modules/commit/state';
import mergeRequestsState from '~/ide/stores/modules/merge_requests/state';
import pipelinesState from '~/ide/stores/modules/pipelines/state';
import branchesState from '~/ide/stores/modules/branches/state';
import fileTemplatesState from '~/ide/stores/modules/file_templates/state';
import paneState from '~/ide/stores/modules/pane/state';
export const resetStore = store => {
const newState = {
...state(),
commit: commitState(),
mergeRequests: mergeRequestsState(),
pipelines: pipelinesState(),
branches: branchesState(),
fileTemplates: fileTemplatesState(),
rightPane: paneState(),
};
store.replaceState(newState);
};
export const file = (name = 'name', id = name, type = '', parent = null) =>
decorateData({
id,
type,
icon: 'icon',
url: 'url',
name,
path: parent ? `${parent.path}/${name}` : name,
parentPath: parent ? parent.path : '',
lastCommit: {},
});
export const createEntriesFromPaths = paths =>
paths
.map(path => ({
name: pathUtils.basename(path),
dir: pathUtils.dirname(path),
ext: pathUtils.extname(path),
}))
.reduce((entries, path, idx) => {
const { name } = path;
const parent = path.dir ? entries[path.dir] : null;
const type = path.ext ? 'blob' : 'tree';
const entry = file(name, (idx + 1).toString(), type, parent);
return {
[entry.path]: entry,
...entries,
};
}, {});
......@@ -31,7 +31,7 @@ describe('IDE router', () => {
`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}`,
].forEach(route => {
it(`finds project path when route is "${route}"`, () => {
spyOn(store, 'dispatch').and.returnValue(new Promise(() => {}));
jest.spyOn(store, 'dispatch').mockReturnValue(new Promise(() => {}));
router.push(route);
......
......@@ -45,7 +45,9 @@ describe('IDE store getters', () => {
localState.currentMergeRequestId = 1;
localState.projects.abcproject = {
mergeRequests: {
1: { mergeId: 1 },
1: {
mergeId: 1,
},
},
};
......@@ -62,9 +64,21 @@ describe('IDE store getters', () => {
describe('allBlobs', () => {
beforeEach(() => {
Object.assign(localState.entries, {
index: { type: 'blob', name: 'index', lastOpenedAt: 0 },
app: { type: 'blob', name: 'blob', lastOpenedAt: 0 },
folder: { type: 'folder', name: 'folder', lastOpenedAt: 0 },
index: {
type: 'blob',
name: 'index',
lastOpenedAt: 0,
},
app: {
type: 'blob',
name: 'blob',
lastOpenedAt: 0,
},
folder: {
type: 'folder',
name: 'folder',
lastOpenedAt: 0,
},
});
});
......@@ -174,7 +188,7 @@ describe('IDE store getters', () => {
},
};
const localGetters = {
findBranch: jasmine.createSpy('findBranchSpy'),
findBranch: jest.fn(),
};
getters.currentBranch(localState, localGetters);
......@@ -251,7 +265,9 @@ describe('IDE store getters', () => {
describe('packageJson', () => {
it('returns package.json entry', () => {
localState.entries['package.json'] = { name: 'package.json' };
localState.entries['package.json'] = {
name: 'package.json',
};
expect(getters.packageJson(localState)).toEqual({
name: 'package.json',
......@@ -273,7 +289,9 @@ describe('IDE store getters', () => {
currentProject: {
default_branch: 'master',
},
currentBranch: { can_push: true },
currentBranch: {
can_push: true,
},
};
expect(getters.canPushToBranch({}, localGetters)).toBeTruthy();
......@@ -284,7 +302,9 @@ describe('IDE store getters', () => {
currentProject: {
default_branch: 'master',
},
currentBranch: { can_push: false },
currentBranch: {
can_push: false,
},
};
expect(getters.canPushToBranch({}, localGetters)).toBeFalsy();
......
import MockAdapter from 'axios-mock-adapter';
import testAction from 'spec/helpers/vuex_action_helper';
import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
import state from '~/ide/stores/modules/branches/state';
import * as types from '~/ide/stores/modules/branches/mutation_types';
......@@ -21,12 +21,8 @@ describe('IDE branches actions', () => {
beforeEach(() => {
mockedContext = {
dispatch() {},
rootState: {
currentProjectId: projectData.name_with_namespace,
},
rootGetters: {
currentProject: projectData,
},
rootState: { currentProjectId: projectData.name_with_namespace },
rootGetters: { currentProject: projectData },
state: state(),
};
......@@ -70,7 +66,7 @@ describe('IDE branches actions', () => {
type: 'setErrorMessage',
payload: {
text: 'Error loading branches.',
action: jasmine.any(Function),
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: { search: TEST_SEARCH },
},
......@@ -105,15 +101,12 @@ describe('IDE branches actions', () => {
});
it('calls API with params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
const apiSpy = jest.spyOn(axios, 'get');
fetchBranches(mockedContext, { search: TEST_SEARCH });
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
params: jasmine.objectContaining({
search: TEST_SEARCH,
sort: 'updated_desc',
}),
expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
params: expect.objectContaining({ search: TEST_SEARCH, sort: 'updated_desc' }),
});
});
......@@ -126,10 +119,7 @@ describe('IDE branches actions', () => {
[
{ type: 'requestBranches' },
{ type: 'resetBranches' },
{
type: 'receiveBranchesSuccess',
payload: branches,
},
{ type: 'receiveBranchesSuccess', payload: branches },
],
done,
);
......@@ -150,10 +140,7 @@ describe('IDE branches actions', () => {
[
{ type: 'requestBranches' },
{ type: 'resetBranches' },
{
type: 'receiveBranchesError',
payload: { search: TEST_SEARCH },
},
{ type: 'receiveBranchesError', payload: { search: TEST_SEARCH } },
],
done,
);
......
......@@ -10,7 +10,7 @@ describe('IDE branches mutations', () => {
mockedState = state();
});
describe(types.REQUEST_BRANCHES, () => {
describe('REQUEST_BRANCHES', () => {
it('sets loading to true', () => {
mutations[types.REQUEST_BRANCHES](mockedState);
......@@ -18,7 +18,7 @@ describe('IDE branches mutations', () => {
});
});
describe(types.RECEIVE_BRANCHES_ERROR, () => {
describe('RECEIVE_BRANCHES_ERROR', () => {
it('sets loading to false', () => {
mutations[types.RECEIVE_BRANCHES_ERROR](mockedState);
......@@ -26,7 +26,7 @@ describe('IDE branches mutations', () => {
});
});
describe(types.RECEIVE_BRANCHES_SUCCESS, () => {
describe('RECEIVE_BRANCHES_SUCCESS', () => {
it('sets branches', () => {
const expectedBranches = branches.map(branch => ({
name: branch.name,
......@@ -39,7 +39,7 @@ describe('IDE branches mutations', () => {
});
});
describe(types.RESET_BRANCHES, () => {
describe('RESET_BRANCHES', () => {
it('clears branches array', () => {
mockedState.branches = ['test'];
......
......@@ -19,9 +19,7 @@ describe('IDE merge requests actions', () => {
beforeEach(() => {
mockedState = state();
mockedRootState = {
currentProjectId: 7,
};
mockedRootState = { currentProjectId: 7 };
mock = new MockAdapter(axios);
});
......@@ -54,7 +52,7 @@ describe('IDE merge requests actions', () => {
type: 'setErrorMessage',
payload: {
text: 'Error loading merge requests.',
action: jasmine.any(Function),
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: { type: 'created', search: '' },
},
......@@ -71,12 +69,7 @@ describe('IDE merge requests actions', () => {
receiveMergeRequestsSuccess,
mergeRequests,
mockedState,
[
{
type: types.RECEIVE_MERGE_REQUESTS_SUCCESS,
payload: mergeRequests,
},
],
[{ type: types.RECEIVE_MERGE_REQUESTS_SUCCESS, payload: mergeRequests }],
[],
done,
);
......@@ -94,36 +87,34 @@ describe('IDE merge requests actions', () => {
});
it('calls API with params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState },
{ type: 'created' },
);
{
dispatch() {},
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
params: {
scope: 'created-by-me',
state: 'opened',
search: '',
state: mockedState,
rootState: mockedRootState,
},
{ type: 'created' },
);
expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
params: { scope: 'created-by-me', state: 'opened', search: '' },
});
});
it('calls API with search', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState },
{ type: 'created', search: 'testing search' },
);
{
dispatch() {},
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
params: {
scope: 'created-by-me',
state: 'opened',
search: 'testing search',
state: mockedState,
rootState: mockedRootState,
},
{ type: 'created', search: 'testing search' },
);
expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
params: { scope: 'created-by-me', state: 'opened', search: 'testing search' },
});
});
......@@ -136,10 +127,7 @@ describe('IDE merge requests actions', () => {
[
{ type: 'requestMergeRequests' },
{ type: 'resetMergeRequests' },
{
type: 'receiveMergeRequestsSuccess',
payload: mergeRequests,
},
{ type: 'receiveMergeRequestsSuccess', payload: mergeRequests },
],
done,
);
......@@ -152,21 +140,19 @@ describe('IDE merge requests actions', () => {
});
it('calls API with project', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState },
{
dispatch() {},
state: mockedState,
rootState: mockedRootState,
},
{ type: null, search: 'testing search' },
);
expect(apiSpy).toHaveBeenCalledWith(
jasmine.stringMatching(`projects/${mockedRootState.currentProjectId}/merge_requests`),
{
params: {
state: 'opened',
search: 'testing search',
},
},
expect.stringMatching(`projects/${mockedRootState.currentProjectId}/merge_requests`),
{ params: { state: 'opened', search: 'testing search' } },
);
});
......@@ -179,10 +165,7 @@ describe('IDE merge requests actions', () => {
[
{ type: 'requestMergeRequests' },
{ type: 'resetMergeRequests' },
{
type: 'receiveMergeRequestsSuccess',
payload: mergeRequests,
},
{ type: 'receiveMergeRequestsSuccess', payload: mergeRequests },
],
done,
);
......
import { TEST_HOST } from 'helpers/test_constants';
import state from '~/ide/stores/modules/merge_requests/state';
import mutations from '~/ide/stores/modules/merge_requests/mutations';
import * as types from '~/ide/stores/modules/merge_requests/mutation_types';
......@@ -10,7 +11,7 @@ describe('IDE merge requests mutations', () => {
mockedState = state();
});
describe(types.REQUEST_MERGE_REQUESTS, () => {
describe('REQUEST_MERGE_REQUESTS', () => {
it('sets loading to true', () => {
mutations[types.REQUEST_MERGE_REQUESTS](mockedState);
......@@ -18,7 +19,7 @@ describe('IDE merge requests mutations', () => {
});
});
describe(types.RECEIVE_MERGE_REQUESTS_ERROR, () => {
describe('RECEIVE_MERGE_REQUESTS_ERROR', () => {
it('sets loading to false', () => {
mutations[types.RECEIVE_MERGE_REQUESTS_ERROR](mockedState);
......@@ -26,9 +27,9 @@ describe('IDE merge requests mutations', () => {
});
});
describe(types.RECEIVE_MERGE_REQUESTS_SUCCESS, () => {
describe('RECEIVE_MERGE_REQUESTS_SUCCESS', () => {
it('sets merge requests', () => {
gon.gitlab_url = gl.TEST_HOST;
gon.gitlab_url = TEST_HOST;
mutations[types.RECEIVE_MERGE_REQUESTS_SUCCESS](mockedState, mergeRequests);
expect(mockedState.mergeRequests).toEqual([
......@@ -43,7 +44,7 @@ describe('IDE merge requests mutations', () => {
});
});
describe(types.RESET_MERGE_REQUESTS, () => {
describe('RESET_MERGE_REQUESTS', () => {
it('clears merge request array', () => {
mockedState.mergeRequests = ['test'];
......
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/ide/stores/modules/pane/actions';
import * as types from '~/ide/stores/modules/pane/mutation_types';
describe('IDE pane module actions', () => {
const TEST_VIEW = { name: 'test' };
const TEST_VIEW_KEEP_ALIVE = { name: 'test-keep-alive', keepAlive: true };
describe('toggleOpen', () => {
it('dispatches open if closed', done => {
testAction(
actions.toggleOpen,
TEST_VIEW,
{ isOpen: false },
[],
[{ type: 'open', payload: TEST_VIEW }],
done,
);
});
it('dispatches close if opened', done => {
testAction(actions.toggleOpen, TEST_VIEW, { isOpen: true }, [], [{ type: 'close' }], done);
});
});
describe('open', () => {
it('commits SET_OPEN', done => {
testAction(actions.open, null, {}, [{ type: types.SET_OPEN, payload: true }], [], done);
});
it('commits SET_CURRENT_VIEW if view is given', done => {
testAction(
actions.open,
TEST_VIEW,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW.name },
],
[],
done,
);
});
it('commits KEEP_ALIVE_VIEW if keepAlive is true', done => {
testAction(
actions.open,
TEST_VIEW_KEEP_ALIVE,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
{ type: types.KEEP_ALIVE_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
],
[],
done,
);
});
});
describe('close', () => {
it('commits SET_OPEN', done => {
testAction(actions.close, null, {}, [{ type: types.SET_OPEN, payload: false }], [], done);
});
});
});
import Visibility from 'visibilityjs';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import {
requestLatestPipeline,
......@@ -78,7 +79,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage',
payload: {
text: 'An error occurred whilst fetching the latest pipeline.',
action: jasmine.any(Function),
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: null,
},
......@@ -91,38 +92,28 @@ describe('IDE pipelines actions', () => {
});
describe('receiveLatestPipelineSuccess', () => {
const rootGetters = {
lastCommit: { id: '123' },
};
const rootGetters = { lastCommit: { id: '123' } };
let commit;
beforeEach(() => {
commit = jasmine.createSpy('commit');
commit = jest.fn().mockName('commit');
});
it('commits pipeline', () => {
receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines });
expect(commit.calls.argsFor(0)).toEqual([
types.RECEIVE_LASTEST_PIPELINE_SUCCESS,
pipelines[0],
]);
expect(commit).toHaveBeenCalledWith(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, pipelines[0]);
});
it('commits false when there are no pipelines', () => {
receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines: [] });
expect(commit.calls.argsFor(0)).toEqual([types.RECEIVE_LASTEST_PIPELINE_SUCCESS, false]);
expect(commit).toHaveBeenCalledWith(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, false);
});
});
describe('fetchLatestPipeline', () => {
beforeEach(() => {
jasmine.clock().install();
});
beforeEach(() => {});
afterEach(() => {
jasmine.clock().uninstall();
stopPipelinePolling();
clearEtagPoll();
});
......@@ -135,10 +126,10 @@ describe('IDE pipelines actions', () => {
});
it('dispatches request', done => {
spyOn(axios, 'get').and.callThrough();
spyOn(Visibility, 'hidden').and.returnValue(false);
jest.spyOn(axios, 'get');
jest.spyOn(Visibility, 'hidden').mockReturnValue(false);
const dispatch = jasmine.createSpy('dispatch');
const dispatch = jest.fn().mockName('dispatch');
const rootGetters = {
lastCommit: { id: 'abc123def456ghi789jkl' },
currentProject: { path_with_namespace: 'abc/def' },
......@@ -146,31 +137,29 @@ describe('IDE pipelines actions', () => {
fetchLatestPipeline({ dispatch, rootGetters });
expect(dispatch.calls.argsFor(0)).toEqual(['requestLatestPipeline']);
expect(dispatch).toHaveBeenCalledWith('requestLatestPipeline');
jasmine.clock().tick(1000);
jest.advanceTimersByTime(1000);
new Promise(resolve => requestAnimationFrame(resolve))
.then(() => {
expect(axios.get).toHaveBeenCalled();
expect(axios.get.calls.count()).toBe(1);
expect(dispatch.calls.argsFor(1)).toEqual([
expect(axios.get).toHaveBeenCalledTimes(1);
expect(dispatch).toHaveBeenCalledWith(
'receiveLatestPipelineSuccess',
jasmine.anything(),
]);
expect.anything(),
);
jasmine.clock().tick(10000);
jest.advanceTimersByTime(10000);
})
.then(() => new Promise(resolve => requestAnimationFrame(resolve)))
.then(() => {
expect(axios.get).toHaveBeenCalled();
expect(axios.get.calls.count()).toBe(2);
expect(dispatch.calls.argsFor(2)).toEqual([
expect(axios.get).toHaveBeenCalledTimes(2);
expect(dispatch).toHaveBeenCalledWith(
'receiveLatestPipelineSuccess',
jasmine.anything(),
]);
expect.anything(),
);
})
.then(done)
.catch(done.fail);
......@@ -183,7 +172,7 @@ describe('IDE pipelines actions', () => {
});
it('dispatches error', done => {
const dispatch = jasmine.createSpy('dispatch');
const dispatch = jest.fn().mockName('dispatch');
const rootGetters = {
lastCommit: { id: 'abc123def456ghi789jkl' },
currentProject: { path_with_namespace: 'abc/def' },
......@@ -191,14 +180,11 @@ describe('IDE pipelines actions', () => {
fetchLatestPipeline({ dispatch, rootGetters });
jasmine.clock().tick(1500);
jest.advanceTimersByTime(1500);
new Promise(resolve => requestAnimationFrame(resolve))
.then(() => {
expect(dispatch.calls.argsFor(1)).toEqual([
'receiveLatestPipelineError',
jasmine.anything(),
]);
expect(dispatch).toHaveBeenCalledWith('receiveLatestPipelineError', expect.anything());
})
.then(done)
.catch(done.fail);
......@@ -224,7 +210,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage',
payload: {
text: 'An error occurred whilst loading the pipelines jobs.',
action: jasmine.anything(),
action: expect.anything(),
actionText: 'Please try again',
actionPayload: { id: 1 },
},
......@@ -249,10 +235,7 @@ describe('IDE pipelines actions', () => {
});
describe('fetchJobs', () => {
const stage = {
id: 1,
dropdownPath: `${gl.TEST_HOST}/jobs`,
};
const stage = { id: 1, dropdownPath: `${TEST_HOST}/jobs` };
describe('success', () => {
beforeEach(() => {
......@@ -361,7 +344,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage',
payload: {
text: 'An error occurred whilst fetching the job trace.',
action: jasmine.any(Function),
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: null,
},
......@@ -387,15 +370,13 @@ describe('IDE pipelines actions', () => {
describe('fetchJobTrace', () => {
beforeEach(() => {
mockedState.detailJob = {
path: `${gl.TEST_HOST}/project/builds`,
};
mockedState.detailJob = { path: `${TEST_HOST}/project/builds` };
});
describe('success', () => {
beforeEach(() => {
spyOn(axios, 'get').and.callThrough();
mock.onGet(`${gl.TEST_HOST}/project/builds/trace`).replyOnce(200, { html: 'html' });
jest.spyOn(axios, 'get');
mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(200, { html: 'html' });
});
it('dispatches request', done => {
......@@ -413,9 +394,12 @@ describe('IDE pipelines actions', () => {
});
it('sends get request to correct URL', () => {
fetchJobTrace({ state: mockedState, dispatch() {} });
fetchJobTrace({
state: mockedState,
expect(axios.get).toHaveBeenCalledWith(`${gl.TEST_HOST}/project/builds/trace`, {
dispatch() {},
});
expect(axios.get).toHaveBeenCalledWith(`${TEST_HOST}/project/builds/trace`, {
params: { format: 'json' },
});
});
......@@ -423,7 +407,7 @@ describe('IDE pipelines actions', () => {
describe('error', () => {
beforeEach(() => {
mock.onGet(`${gl.TEST_HOST}/project/builds/trace`).replyOnce(500);
mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(500);
});
it('dispatches error', done => {
......
......@@ -10,7 +10,7 @@ describe('IDE pipelines mutations', () => {
mockedState = state();
});
describe(types.REQUEST_LATEST_PIPELINE, () => {
describe('REQUEST_LATEST_PIPELINE', () => {
it('sets loading to true', () => {
mutations[types.REQUEST_LATEST_PIPELINE](mockedState);
......@@ -18,7 +18,7 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_LASTEST_PIPELINE_ERROR, () => {
describe('RECEIVE_LASTEST_PIPELINE_ERROR', () => {
it('sets loading to false', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_ERROR](mockedState);
......@@ -26,7 +26,7 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, () => {
describe('RECEIVE_LASTEST_PIPELINE_SUCCESS', () => {
const itSetsPipelineLoadingStates = () => {
it('sets has loaded to true', () => {
expect(mockedState.hasLoadedPipeline).toBe(true);
......@@ -52,7 +52,7 @@ describe('IDE pipelines mutations', () => {
id: '51',
path: 'test',
commit: { id: '123' },
details: { status: jasmine.any(Object) },
details: { status: expect.any(Object) },
yamlError: undefined,
});
});
......@@ -95,12 +95,9 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.REQUEST_JOBS, () => {
describe('REQUEST_JOBS', () => {
beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({
...stage,
id: i,
}));
mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
});
it('sets isLoading on stage', () => {
......@@ -110,12 +107,9 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_JOBS_ERROR, () => {
describe('RECEIVE_JOBS_ERROR', () => {
beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({
...stage,
id: i,
}));
mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
});
it('sets isLoading on stage after error', () => {
......@@ -125,29 +119,22 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_JOBS_SUCCESS, () => {
describe('RECEIVE_JOBS_SUCCESS', () => {
let data;
beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({
...stage,
id: i,
}));
mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
data = {
latest_statuses: [...jobs],
};
data = { latest_statuses: [...jobs] };
});
it('updates loading', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data });
expect(mockedState.stages[0].isLoading).toBe(false);
});
it('sets jobs on stage', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data });
expect(mockedState.stages[0].jobs.length).toBe(jobs.length);
expect(mockedState.stages[0].jobs).toEqual(
jobs.map(job => ({
......@@ -164,13 +151,9 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.TOGGLE_STAGE_COLLAPSE, () => {
describe('TOGGLE_STAGE_COLLAPSE', () => {
beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({
...stage,
id: i,
isCollapsed: false,
}));
mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i, isCollapsed: false }));
});
it('toggles collapsed state', () => {
......@@ -184,7 +167,7 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.SET_DETAIL_JOB, () => {
describe('SET_DETAIL_JOB', () => {
it('sets detail job', () => {
mutations[types.SET_DETAIL_JOB](mockedState, jobs[0]);
......@@ -192,7 +175,7 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.REQUEST_JOB_TRACE, () => {
describe('REQUEST_JOB_TRACE', () => {
beforeEach(() => {
mockedState.detailJob = { ...jobs[0] };
});
......@@ -204,7 +187,7 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_JOB_TRACE_ERROR, () => {
describe('RECEIVE_JOB_TRACE_ERROR', () => {
beforeEach(() => {
mockedState.detailJob = { ...jobs[0], isLoading: true };
});
......@@ -216,14 +199,13 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_JOB_TRACE_SUCCESS, () => {
describe('RECEIVE_JOB_TRACE_SUCCESS', () => {
beforeEach(() => {
mockedState.detailJob = { ...jobs[0], isLoading: true };
});
it('sets output on detail job', () => {
mutations[types.RECEIVE_JOB_TRACE_SUCCESS](mockedState, { html: 'html' });
expect(mockedState.detailJob.output).toBe('html');
expect(mockedState.detailJob.isLoading).toBe(false);
});
......
......@@ -9,10 +9,7 @@ describe('IDE store file mutations', () => {
beforeEach(() => {
localState = state();
localFile = {
...file(),
type: 'blob',
};
localFile = { ...file(), type: 'blob' };
localState.entries[localFile.path] = localFile;
});
......@@ -28,11 +25,7 @@ describe('IDE store file mutations', () => {
});
it('sets pending tab as not active', () => {
localState.openFiles.push({
...localFile,
pending: true,
active: true,
});
localState.openFiles.push({ ...localFile, pending: true, active: true });
mutations.SET_FILE_ACTIVE(localState, {
path: localFile.path,
......@@ -132,7 +125,7 @@ describe('IDE store file mutations', () => {
localFile,
].forEach(f => {
expect(f).toEqual(
jasmine.objectContaining({
expect.objectContaining({
path,
name,
raw: null,
......@@ -154,10 +147,7 @@ describe('IDE store file mutations', () => {
});
it('adds raw data to open pending file', () => {
localState.openFiles.push({
...localFile,
pending: true,
});
localState.openFiles.push({ ...localFile, pending: true });
mutations.SET_FILE_RAW_DATA(localState, {
file: localFile,
......@@ -168,11 +158,7 @@ describe('IDE store file mutations', () => {
});
it('does not add raw data to open pending tempFile file', () => {
localState.openFiles.push({
...localFile,
pending: true,
tempFile: true,
});
localState.openFiles.push({ ...localFile, pending: true, tempFile: true });
mutations.SET_FILE_RAW_DATA(localState, {
file: localFile,
......@@ -234,7 +220,9 @@ describe('IDE store file mutations', () => {
it('sets file mr change', () => {
mutations.SET_FILE_MERGE_REQUEST_CHANGE(localState, {
file: localFile,
mrChange: { diff: 'ABC' },
mrChange: {
diff: 'ABC',
},
});
expect(localFile.mrChange.diff).toBe('ABC');
......@@ -311,12 +299,7 @@ describe('IDE store file mutations', () => {
mutations.DISCARD_FILE_CHANGES(localState, localFile.path);
expect(localState.trees['gitlab-ce/master'].tree).toEqual([
{
...localFile,
deleted: false,
},
]);
expect(localState.trees['gitlab-ce/master'].tree).toEqual([{ ...localFile, deleted: false }]);
});
it('adds to parent tree if deleted', () => {
......@@ -328,12 +311,7 @@ describe('IDE store file mutations', () => {
mutations.DISCARD_FILE_CHANGES(localState, localFile.path);
expect(localState.entries.parentPath.tree).toEqual([
{
...localFile,
deleted: false,
},
]);
expect(localState.entries.parentPath.tree).toEqual([{ ...localFile, deleted: false }]);
});
});
......@@ -379,11 +357,7 @@ describe('IDE store file mutations', () => {
let f;
beforeEach(() => {
f = {
...file(),
type: 'blob',
staged: true,
};
f = { ...file(), type: 'blob', staged: true };
localState.stagedFiles.push(f);
localState.changedFiles.push(f);
......@@ -422,19 +396,16 @@ describe('IDE store file mutations', () => {
describe('ADD_PENDING_TAB', () => {
beforeEach(() => {
const f = {
...file('openFile'),
path: 'openFile',
active: true,
opened: true,
};
const f = { ...file('openFile'), path: 'openFile', active: true, opened: true };
localState.entries[f.path] = f;
localState.openFiles.push(f);
});
it('adds file into openFiles as pending', () => {
mutations.ADD_PENDING_TAB(localState, { file: localFile });
mutations.ADD_PENDING_TAB(localState, {
file: localFile,
});
expect(localState.openFiles.length).toBe(1);
expect(localState.openFiles[0].pending).toBe(true);
......@@ -445,11 +416,15 @@ describe('IDE store file mutations', () => {
const newFile = file('test');
localState.entries[newFile.path] = newFile;
mutations.ADD_PENDING_TAB(localState, { file: localFile });
mutations.ADD_PENDING_TAB(localState, {
file: localFile,
});
expect(localState.openFiles.length).toBe(1);
mutations.ADD_PENDING_TAB(localState, { file: file('test') });
mutations.ADD_PENDING_TAB(localState, {
file: file('test'),
});
expect(localState.openFiles.length).toBe(1);
expect(localState.openFiles[0].name).toBe('test');
......
......@@ -51,7 +51,9 @@ describe('Multi-file store tree mutations', () => {
});
it('keeps loading state', () => {
mutations.CREATE_TREE(localState, { treePath: 'project/master' });
mutations.CREATE_TREE(localState, {
treePath: 'project/master',
});
mutations.SET_DIRECTORY_DATA(localState, {
data,
treePath: 'project/master',
......
......@@ -26,15 +26,18 @@ describe('WebIDE utils', () => {
entry.deleted = true;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.deleted);
});
it('renders "addition" icon for temp entries', () => {
entry.tempFile = true;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.addition);
});
it('renders "modified" icon for newly-renamed entries', () => {
entry.prevPath = 'foo/bar';
entry.tempFile = false;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.modified);
});
it('renders "modified" icon even for temp entries if they are newly-renamed', () => {
entry.prevPath = 'foo/bar';
entry.tempFile = true;
......
import * as pathUtils from 'path';
import { decorateData } from '~/ide/stores/utils';
import state from '~/ide/stores/state';
import commitState from '~/ide/stores/modules/commit/state';
import mergeRequestsState from '~/ide/stores/modules/merge_requests/state';
import pipelinesState from '~/ide/stores/modules/pipelines/state';
import branchesState from '~/ide/stores/modules/branches/state';
import fileTemplatesState from '~/ide/stores/modules/file_templates/state';
import paneState from '~/ide/stores/modules/pane/state';
export const resetStore = store => {
const newState = {
...state(),
commit: commitState(),
mergeRequests: mergeRequestsState(),
pipelines: pipelinesState(),
branches: branchesState(),
fileTemplates: fileTemplatesState(),
rightPane: paneState(),
};
store.replaceState(newState);
};
export const file = (name = 'name', id = name, type = '', parent = null) =>
decorateData({
id,
type,
icon: 'icon',
url: 'url',
name,
path: parent ? `${parent.path}/${name}` : name,
parentPath: parent ? parent.path : '',
lastCommit: {},
});
export const createEntriesFromPaths = paths =>
paths
.map(path => ({
name: pathUtils.basename(path),
dir: pathUtils.dirname(path),
ext: pathUtils.extname(path),
}))
.reduce((entries, path, idx) => {
const { name } = path;
const parent = path.dir ? entries[path.dir] : null;
const type = path.ext ? 'blob' : 'tree';
const entry = file(name, (idx + 1).toString(), type, parent);
return {
[entry.path]: entry,
...entries,
};
}, {});
export * from '../../frontend/ide/helpers';
import testAction from 'spec/helpers/vuex_action_helper';
import * as actions from '~/ide/stores/modules/pane/actions';
import * as types from '~/ide/stores/modules/pane/mutation_types';
describe('IDE pane module actions', () => {
const TEST_VIEW = { name: 'test' };
const TEST_VIEW_KEEP_ALIVE = { name: 'test-keep-alive', keepAlive: true };
describe('toggleOpen', () => {
it('dispatches open if closed', done => {
testAction(
actions.toggleOpen,
TEST_VIEW,
{ isOpen: false },
[],
[{ type: 'open', payload: TEST_VIEW }],
done,
);
});
it('dispatches close if opened', done => {
testAction(actions.toggleOpen, TEST_VIEW, { isOpen: true }, [], [{ type: 'close' }], done);
});
});
describe('open', () => {
it('commits SET_OPEN', done => {
testAction(actions.open, null, {}, [{ type: types.SET_OPEN, payload: true }], [], done);
});
it('commits SET_CURRENT_VIEW if view is given', done => {
testAction(
actions.open,
TEST_VIEW,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW.name },
],
[],
done,
);
});
it('commits KEEP_ALIVE_VIEW if keepAlive is true', done => {
testAction(
actions.open,
TEST_VIEW_KEEP_ALIVE,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
{ type: types.KEEP_ALIVE_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
],
[],
done,
);
});
});
describe('close', () => {
it('commits SET_OPEN', done => {
testAction(actions.close, null, {}, [{ type: types.SET_OPEN, payload: false }], [], done);
});
});
});
......@@ -136,7 +136,9 @@ describe Gitlab::Auth::LDAP::Access do
context 'without ActiveDirectory enabled' do
before do
allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive(:active_directory).and_return(false)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive(:active_directory).and_return(false)
end
end
it 'returns true' do
......
......@@ -58,7 +58,9 @@ describe Gitlab::Auth::LDAP::AuthHash do
end
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive(:attributes).and_return(attributes)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive(:attributes).and_return(attributes)
end
end
it "has the correct username" do
......
......@@ -18,8 +18,9 @@ describe Gitlab::Auth::LDAP::Authentication do
# try only to fake the LDAP call
adapter = double('adapter', dn: dn).as_null_object
allow_any_instance_of(described_class)
.to receive(:adapter).and_return(adapter)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_truthy
end
......@@ -27,8 +28,9 @@ describe Gitlab::Auth::LDAP::Authentication do
it "is false if the user does not exist" do
# try only to fake the LDAP call
adapter = double('adapter', dn: dn).as_null_object
allow_any_instance_of(described_class)
.to receive(:adapter).and_return(adapter)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_falsey
end
......@@ -38,8 +40,9 @@ describe Gitlab::Auth::LDAP::Authentication do
# try only to fake the LDAP call
adapter = double('adapter', bind_as: nil).as_null_object
allow_any_instance_of(described_class)
.to receive(:adapter).and_return(adapter)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_falsey
end
......
......@@ -396,7 +396,9 @@ describe Gitlab::Auth::OAuth::User do
context "and no account for the LDAP user" do
context 'dont block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
it do
......@@ -408,7 +410,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
it do
......@@ -424,7 +428,9 @@ describe Gitlab::Auth::OAuth::User do
context 'dont block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
it do
......@@ -436,7 +442,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
it do
......@@ -480,7 +488,9 @@ describe Gitlab::Auth::OAuth::User do
context 'dont block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
it do
......@@ -492,7 +502,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
it do
......
......@@ -75,7 +75,9 @@ describe Gitlab::BareRepositoryImport::Importer, :seed_helper do
end
it 'does not schedule an import' do
expect_any_instance_of(Project).not_to receive(:import_schedule)
expect_next_instance_of(Project) do |instance|
expect(instance).not_to receive(:import_schedule)
end
importer.create_project_if_needed
end
......
......@@ -9,7 +9,9 @@ describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do
describe '.load_for_project' do
it "loads the status" do
expect_any_instance_of(described_class).to receive(:load_status)
expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:load_status)
end
described_class.load_for_project(project)
end
......
......@@ -32,7 +32,9 @@ describe Gitlab::Checks::BranchCheck do
end
it 'raises an error if the user is not allowed to merge to protected branches' do
expect_any_instance_of(Gitlab::Checks::MatchingMergeRequest).to receive(:match?).and_return(true)
expect_next_instance_of(Gitlab::Checks::MatchingMergeRequest) do |instance|
expect(instance).to receive(:match?).and_return(true)
end
expect(user_access).to receive(:can_merge_to_branch?).and_return(false)
expect(user_access).to receive(:can_push_to_branch?).and_return(false)
......
......@@ -14,31 +14,41 @@ describe Gitlab::Checks::ChangeAccess do
end
it 'calls pushes checks' do
expect_any_instance_of(Gitlab::Checks::PushCheck).to receive(:validate!)
expect_next_instance_of(Gitlab::Checks::PushCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec
end
it 'calls branches checks' do
expect_any_instance_of(Gitlab::Checks::BranchCheck).to receive(:validate!)
expect_next_instance_of(Gitlab::Checks::BranchCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec
end
it 'calls tags checks' do
expect_any_instance_of(Gitlab::Checks::TagCheck).to receive(:validate!)
expect_next_instance_of(Gitlab::Checks::TagCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec
end
it 'calls lfs checks' do
expect_any_instance_of(Gitlab::Checks::LfsCheck).to receive(:validate!)
expect_next_instance_of(Gitlab::Checks::LfsCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec
end
it 'calls diff checks' do
expect_any_instance_of(Gitlab::Checks::DiffCheck).to receive(:validate!)
expect_next_instance_of(Gitlab::Checks::DiffCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec
end
......
......@@ -12,12 +12,16 @@ describe Gitlab::Ci::Build::Credentials::Factory do
end
before do
allow_any_instance_of(described_class).to receive(:providers).and_return([TestProvider])
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:providers).and_return([TestProvider])
end
end
context 'when provider is valid' do
before do
allow_any_instance_of(TestProvider).to receive(:valid?).and_return(true)
allow_next_instance_of(TestProvider) do |instance|
allow(instance).to receive(:valid?).and_return(true)
end
end
it 'generates an array of credentials objects' do
......@@ -29,7 +33,9 @@ describe Gitlab::Ci::Build::Credentials::Factory do
context 'when provider is not valid' do
before do
allow_any_instance_of(TestProvider).to receive(:valid?).and_return(false)
allow_next_instance_of(TestProvider) do |instance|
allow(instance).to receive(:valid?).and_return(false)
end
end
it 'generates an array without specific credential object' do
......
......@@ -15,8 +15,9 @@ describe Gitlab::Ci::Config::External::File::Project do
before do
project.add_developer(user)
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!)
end
end
describe '#matching?' do
......@@ -159,8 +160,8 @@ describe Gitlab::Ci::Config::External::File::Project do
private
def stub_project_blob(ref, path)
allow_any_instance_of(Repository)
.to receive(:blob_data_at)
.with(ref, path) { yield }
allow_next_instance_of(Repository) do |instance|
allow(instance).to receive(:blob_data_at).with(ref, path) { yield }
end
end
end
......@@ -21,8 +21,9 @@ describe Gitlab::Ci::Config::External::File::Remote do
end
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!)
end
end
describe '#matching?' do
......
......@@ -14,8 +14,9 @@ describe Gitlab::Ci::Config::External::File::Template do
let(:template_file) { described_class.new(params, context) }
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!)
end
end
describe '#matching?' do
......
......@@ -23,8 +23,9 @@ describe Gitlab::Ci::Config::External::Mapper do
before do
stub_full_request(remote_url).to_return(body: file_content)
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!)
end
end
describe '#process' do
......
......@@ -8,8 +8,9 @@ describe Gitlab::Ci::Config do
set(:user) { create(:user) }
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!)
end
end
let(:config) do
......@@ -358,18 +359,11 @@ describe Gitlab::Ci::Config do
context "when it takes too long to evaluate includes" do
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
.and_call_original
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:set_deadline)
.with(described_class::TIMEOUT_SECONDS)
.and_call_original
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:execution_expired?)
.and_return(true)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!).and_call_original
allow(instance).to receive(:set_deadline).with(described_class::TIMEOUT_SECONDS).and_call_original
allow(instance).to receive(:execution_expired?).and_return(true)
end
end
it 'raises error TimeoutError' do
......@@ -384,9 +378,9 @@ describe Gitlab::Ci::Config do
context 'when context expansion timeout is disabled' do
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
.and_call_original
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!).and_call_original
end
allow(Feature)
.to receive(:enabled?)
......
......@@ -81,7 +81,9 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do
context 'when a ref is protected' do
before do
allow_any_instance_of(Project).to receive(:protected_for?).and_return(true)
allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:protected_for?).and_return(true)
end
end
it 'returns protected builds' do
......@@ -91,7 +93,9 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do
context 'when a ref is not protected' do
before do
allow_any_instance_of(Project).to receive(:protected_for?).and_return(false)
allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:protected_for?).and_return(false)
end
end
it 'returns unprotected builds' do
......
......@@ -112,8 +112,9 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end
it 'calls get_chunk only once' do
expect_any_instance_of(Gitlab::Ci::Trace::ChunkedIO)
.to receive(:current_chunk).once.and_call_original
expect_next_instance_of(Gitlab::Ci::Trace::ChunkedIO) do |instance|
expect(instance).to receive(:current_chunk).once.and_call_original
end
chunked_io.each_line { |line| }
end
......
......@@ -9,7 +9,9 @@ shared_examples 'base stage' do
before do
allow(stage).to receive(:project_median).and_return(1.12)
allow_any_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher).to receive(:event_result).and_return({})
allow_next_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher) do |instance|
allow(instance).to receive(:event_result).and_return({})
end
end
it 'has the median data value' do
......
......@@ -17,7 +17,9 @@ describe Gitlab::CycleAnalytics::UsageData do
projects.each_with_index do |project, time|
issue = create(:issue, project: project, created_at: (time + 1).hour.ago)
allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue])
allow_next_instance_of(Gitlab::ReferenceExtractor) do |instance|
allow(instance).to receive(:issues).and_return([issue])
end
milestone = create(:milestone, project: project)
mr = create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}")
......
......@@ -10,17 +10,25 @@ describe Gitlab::Diff::FileCollection::MergeRequestDiff do
describe '#diff_files' do
it 'does not highlight binary files' do
allow_any_instance_of(Gitlab::Diff::File).to receive(:text?).and_return(false)
allow_next_instance_of(Gitlab::Diff::File) do |instance|
allow(instance).to receive(:text?).and_return(false)
end
expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines)
expect_next_instance_of(Gitlab::Diff::File) do |instance|
expect(instance).not_to receive(:highlighted_diff_lines)
end
diff_files
end
it 'does not highlight files marked as undiffable in .gitattributes' do
allow_any_instance_of(Gitlab::Diff::File).to receive(:diffable?).and_return(false)
allow_next_instance_of(Gitlab::Diff::File) do |instance|
allow(instance).to receive(:diffable?).and_return(false)
end
expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines)
expect_next_instance_of(Gitlab::Diff::File) do |instance|
expect(instance).not_to receive(:highlighted_diff_lines)
end
diff_files
end
......
......@@ -95,7 +95,9 @@ describe Gitlab::Email::Handler::CreateMergeRequestHandler do
context "something is wrong" do
context "when the merge request could not be saved" do
before do
allow_any_instance_of(MergeRequest).to receive(:save).and_return(false)
allow_next_instance_of(MergeRequest) do |instance|
allow(instance).to receive(:save).and_return(false)
end
end
it "raises an InvalidMergeRequestError" do
......
......@@ -38,8 +38,9 @@ describe Gitlab::EtagCaching::Middleware do
end
it 'generates ETag' do
expect_any_instance_of(Gitlab::EtagCaching::Store)
.to receive(:touch).and_return('123')
expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
expect(instance).to receive(:touch).and_return('123')
end
middleware.call(build_request(path, if_none_match))
end
......@@ -177,9 +178,9 @@ describe Gitlab::EtagCaching::Middleware do
'SCRIPT_NAME' => '/relative-gitlab'
}
expect_any_instance_of(Gitlab::EtagCaching::Store)
.to receive(:get).with("/relative-gitlab#{enabled_path}")
.and_return(nil)
expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
expect(instance).to receive(:get).with("/relative-gitlab#{enabled_path}").and_return(nil)
end
middleware.call(env)
end
......@@ -190,8 +191,9 @@ describe Gitlab::EtagCaching::Middleware do
end
def mock_value_in_store(value)
allow_any_instance_of(Gitlab::EtagCaching::Store)
.to receive(:get).and_return(value)
allow_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
allow(instance).to receive(:get).and_return(value)
end
end
def build_request(path, if_none_match)
......
......@@ -158,7 +158,9 @@ describe Gitlab::Experimentation do
context 'the user is part of the control group' do
before do
allow_any_instance_of(described_class).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
end
end
it 'pushes the right parameters to gon' do
......
......@@ -20,6 +20,8 @@ describe Gitlab::FogbugzImport::Client do
end
def stub_api(users)
allow_any_instance_of(::Fogbugz::Interface).to receive(:command).with(:listPeople).and_return(users)
allow_next_instance_of(::Fogbugz::Interface) do |instance|
allow(instance).to receive(:command).with(:listPeople).and_return(users)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::FogbugzImport::Importer do
let(:project) { create(:project_empty_repo) }
let(:importer) { described_class.new(project) }
let(:repo) do
instance_double(Gitlab::FogbugzImport::Repository,
safe_name: 'vim',
path: 'vim',
raw_data: '')
end
let(:import_data) { { 'repo' => repo } }
let(:credentials) do
{
'fb_session' => {
'uri' => 'https://testing.fogbugz.com',
'token' => 'token'
}
}
end
let(:closed_bug) do
{
fOpen: 'false',
sTitle: 'Closed bug',
sLatestTextSummary: "",
dtOpened: Time.now.to_s,
dtLastUpdated: Time.now.to_s,
events: { event: [] }
}.with_indifferent_access
end
let(:opened_bug) do
{
fOpen: 'true',
sTitle: 'Opened bug',
sLatestTextSummary: "",
dtOpened: Time.now.to_s,
dtLastUpdated: Time.now.to_s,
events: { event: [] }
}.with_indifferent_access
end
let(:fogbugz_bugs) { [opened_bug, closed_bug] }
before do
project.create_import_data(data: import_data, credentials: credentials)
allow_any_instance_of(::Fogbugz::Interface).to receive(:command).with(:listCategories).and_return([])
allow_any_instance_of(Gitlab::FogbugzImport::Client).to receive(:cases).and_return(fogbugz_bugs)
end
it 'imports bugs' do
expect { importer.execute }.to change { Issue.count }.by(2)
end
it 'imports opened bugs' do
importer.execute
issue = Issue.where(project_id: project.id).find_by_title(opened_bug[:sTitle])
expect(issue.state_id).to eq(Issue.available_states[:opened])
end
it 'imports closed bugs' do
importer.execute
issue = Issue.where(project_id: project.id).find_by_title(closed_bug[:sTitle])
expect(issue.state_id).to eq(Issue.available_states[:closed])
end
end
......@@ -134,7 +134,9 @@ describe Gitlab::Git::Blob, :seed_helper do
describe '.find with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.find(repository, SeedRepo::Commit::ID, 'files/images/6049019_460s.jpg')
end
......
......@@ -176,7 +176,9 @@ describe Gitlab::Git::Commit, :seed_helper do
describe '.find with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.find(repository, SeedRepo::Commit::ID)
end
......@@ -438,7 +440,9 @@ describe Gitlab::Git::Commit, :seed_helper do
it_should_behave_like '.batch_by_oid'
it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.batch_by_oid(repository, [SeedRepo::Commit::ID])
end
......
......@@ -145,7 +145,9 @@ describe Gitlab::Git::Tree, :seed_helper do
describe '.where with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:lookup).with(SeedRepo::Commit::ID)
allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:lookup).with(SeedRepo::Commit::ID)
end
described_class.where(repository, SeedRepo::Commit::ID, 'files', false)
end
......
......@@ -730,7 +730,9 @@ describe Gitlab::GitAccess do
it 'checks LFS integrity only for first change' do
allow(project).to receive(:lfs_enabled?).and_return(true)
expect_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).exactly(1).times
expect_next_instance_of(Gitlab::Checks::LfsIntegrity) do |instance|
expect(instance).to receive(:objects_missing?).exactly(1).times
end
push_access_check
end
......
......@@ -10,10 +10,11 @@ describe Gitlab::GitalyClient::CleanupService do
describe '#apply_bfg_object_map_stream' do
it 'sends an apply_bfg_object_map_stream message' do
expect_any_instance_of(Gitaly::CleanupService::Stub)
.to receive(:apply_bfg_object_map_stream)
.with(kind_of(Enumerator), kind_of(Hash))
.and_return([])
expect_next_instance_of(Gitaly::CleanupService::Stub) do |instance|
expect(instance).to receive(:apply_bfg_object_map_stream)
.with(kind_of(Enumerator), kind_of(Hash))
.and_return([])
end
client.apply_bfg_object_map_stream(StringIO.new)
end
......
......@@ -55,7 +55,9 @@ describe Gitlab::GitalyClient do
it 'returns an empty string when the storage is not found in the response' do
response = double("response")
allow(response).to receive(:storage_statuses).and_return([])
allow_any_instance_of(Gitlab::GitalyClient::ServerService).to receive(:info).and_return(response)
allow_next_instance_of(Gitlab::GitalyClient::ServerService) do |instance|
allow(instance).to receive(:info).and_return(response)
end
expect(described_class.filesystem_id('default')).to eq(nil)
end
......
......@@ -144,9 +144,9 @@ describe Gitlab::GithubImport::Importer::DiffNoteImporter do
describe '#find_merge_request_id' do
it 'returns a merge request ID' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
.to receive(:database_id)
.and_return(10)
expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
expect(instance).to receive(:database_id).and_return(10)
end
expect(importer.find_merge_request_id).to eq(10)
end
......
......@@ -74,9 +74,9 @@ describe Gitlab::GithubImport::Importer::LabelLinksImporter do
describe '#find_target_id' do
it 'returns the ID of the issuable to create the label link for' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
.to receive(:database_id)
.and_return(10)
expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
expect(instance).to receive(:database_id).and_return(10)
end
expect(importer.find_target_id).to eq(10)
end
......
......@@ -50,8 +50,9 @@ describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_redis_cac
describe '#build_labels_cache' do
it 'builds the labels cache' do
expect_any_instance_of(Gitlab::GithubImport::LabelFinder)
.to receive(:build_cache)
expect_next_instance_of(Gitlab::GithubImport::LabelFinder) do |instance|
expect(instance).to receive(:build_cache)
end
importer.build_labels_cache
end
......
......@@ -80,8 +80,9 @@ describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis
describe '#build_milestones_cache' do
it 'builds the milestones cache' do
expect_any_instance_of(Gitlab::GithubImport::MilestoneFinder)
.to receive(:build_cache)
expect_next_instance_of(Gitlab::GithubImport::MilestoneFinder) do |instance|
expect(instance).to receive(:build_cache)
end
importer.build_milestones_cache
end
......
......@@ -143,9 +143,9 @@ describe Gitlab::GithubImport::Importer::NoteImporter do
describe '#find_noteable_id' do
it 'returns the ID of the noteable' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
.to receive(:database_id)
.and_return(10)
expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
expect(instance).to receive(:database_id).and_return(10)
end
expect(importer.find_noteable_id).to eq(10)
end
......
......@@ -9,8 +9,9 @@ describe Gitlab::GithubImport::SequentialImporter do
project = double(:project, id: 1, repository: repository)
importer = described_class.new(project, token: 'foo')
expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter)
.to receive(:execute)
expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
expect(instance).to receive(:execute)
end
described_class::SEQUENTIAL_IMPORTERS.each do |klass|
instance = double(:instance)
......
......@@ -21,18 +21,24 @@ describe Gitlab::GitlabImport::Client do
it 'uses membership and simple flags' do
stub_request('/api/v4/projects?membership=true&page=1&per_page=100&simple=true')
expect_any_instance_of(OAuth2::Response).to receive(:parsed).and_return([])
expect_next_instance_of(OAuth2::Response) do |instance|
expect(instance).to receive(:parsed).and_return([])
end
expect(client.projects.to_a).to eq []
end
shared_examples 'pagination params' do
before do
allow_any_instance_of(OAuth2::Response).to receive(:parsed).and_return([])
allow_next_instance_of(OAuth2::Response) do |instance|
allow(instance).to receive(:parsed).and_return([])
end
end
it 'allows page_limit param' do
allow_any_instance_of(OAuth2::Response).to receive(:parsed).and_return(element_list)
allow_next_instance_of(OAuth2::Response) do |instance|
allow(instance).to receive(:parsed).and_return(element_list)
end
expect(client).to receive(:lazy_page_iterator).with(hash_including(page_limit: 2)).and_call_original
......
......@@ -109,7 +109,9 @@ describe Gitlab::HttpIO do
end
it 'calls get_chunk only once' do
expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original
expect_next_instance_of(Net::HTTP) do |instance|
expect(instance).to receive(:request).once.and_call_original
end
http_io.each_line { |line| }
end
......
......@@ -43,7 +43,9 @@ describe Gitlab::RequestContext do
let(:ip) { '192.168.1.11' }
before do
allow_any_instance_of(Rack::Request).to receive(:ip).and_return(ip)
allow_next_instance_of(Rack::Request) do |instance|
allow(instance).to receive(:ip).and_return(ip)
end
described_class.new(app).call(env)
end
......
......@@ -80,6 +80,17 @@ describe Commit do
expect(commit.author).to eq(user)
end
context 'with a user with an unconfirmed e-mail' do
before do
user = create(:user)
create(:email, user: user, email: commit.author_email)
end
it 'returns no user' do
expect(commit.author).to be_nil
end
end
context 'using eager loading' do
let!(:alice) { create(:user, email: 'alice@example.com') }
let!(:bob) { create(:user, email: 'hunter2@example.com') }
......@@ -115,7 +126,7 @@ describe Commit do
let!(:commits) { [alice_commit, bob_commit, eve_commit, jeff_commit] }
before do
create(:email, user: bob, email: 'bob@example.com')
create(:email, :confirmed, user: bob, email: 'bob@example.com')
end
it 'executes only two SQL queries' do
......@@ -179,6 +190,32 @@ describe Commit do
end
end
describe '#committer' do
context 'with a confirmed e-mail' do
it 'returns the user' do
user = create(:user, email: commit.committer_email)
expect(commit.committer).to eq(user)
end
end
context 'with an unconfirmed e-mail' do
let(:user) { create(:user) }
before do
create(:email, user: user, email: commit.committer_email)
end
it 'returns no user' do
expect(commit.committer).to be_nil
end
it 'returns the user' do
expect(commit.committer(confirmed: false)).to eq(user)
end
end
end
describe '#to_reference' do
let(:project) { create(:project, :repository, path: 'sample-project') }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment