Commit 52b0352f authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-06-07

# Conflicts:
#	Gemfile.lock
#	Gemfile.rails5.lock
#	app/views/layouts/devise.html.haml
#	app/views/layouts/devise_empty.html.haml
#	app/views/projects/project_members/index.html.haml
#	spec/requests/api/commits_spec.rb

[ci skip]
parents a4b2d939 7c179bf3
### Description ### Problem to solve
(Include problem, use cases, benefits, and/or goals) ### Further details
(Include use cases, benefits, and/or goals)
### Proposal ### Proposal
### What does success look like, and how can we measure that?
(If no way to measure success, link to an issue that will implement a way to measure this)
### Links / references ### Links / references
/label ~"feature proposal" /label ~"feature proposal"
...@@ -186,6 +186,7 @@ GEM ...@@ -186,6 +186,7 @@ GEM
dropzonejs-rails (0.7.2) dropzonejs-rails (0.7.2)
rails (> 3.1) rails (> 3.1)
ed25519 (1.2.4) ed25519 (1.2.4)
<<<<<<< HEAD
elasticsearch (5.0.3) elasticsearch (5.0.3)
elasticsearch-api (= 5.0.3) elasticsearch-api (= 5.0.3)
elasticsearch-transport (= 5.0.3) elasticsearch-transport (= 5.0.3)
...@@ -199,6 +200,8 @@ GEM ...@@ -199,6 +200,8 @@ GEM
elasticsearch-transport (5.0.3) elasticsearch-transport (5.0.3)
faraday faraday
multi_json multi_json
=======
>>>>>>> upstream/master
email_reply_trimmer (0.1.6) email_reply_trimmer (0.1.6)
email_spec (2.2.0) email_spec (2.2.0)
htmlentities (~> 4.3.3) htmlentities (~> 4.3.3)
...@@ -539,7 +542,10 @@ GEM ...@@ -539,7 +542,10 @@ GEM
mustermann (~> 1.0.0) mustermann (~> 1.0.0)
mysql2 (0.4.10) mysql2 (0.4.10)
net-ldap (0.16.0) net-ldap (0.16.0)
<<<<<<< HEAD
net-ntp (2.1.3) net-ntp (2.1.3)
=======
>>>>>>> upstream/master
net-ssh (5.0.1) net-ssh (5.0.1)
netrc (0.11.0) netrc (0.11.0)
nokogiri (1.8.2) nokogiri (1.8.2)
...@@ -1044,9 +1050,12 @@ DEPENDENCIES ...@@ -1044,9 +1050,12 @@ DEPENDENCIES
doorkeeper-openid_connect (~> 1.3) doorkeeper-openid_connect (~> 1.3)
dropzonejs-rails (~> 0.7.1) dropzonejs-rails (~> 0.7.1)
ed25519 (~> 1.2) ed25519 (~> 1.2)
<<<<<<< HEAD
elasticsearch-api (= 5.0.3) elasticsearch-api (= 5.0.3)
elasticsearch-model (~> 0.1.9) elasticsearch-model (~> 0.1.9)
elasticsearch-rails (~> 0.1.9) elasticsearch-rails (~> 0.1.9)
=======
>>>>>>> upstream/master
email_reply_trimmer (~> 0.1) email_reply_trimmer (~> 0.1)
email_spec (~> 2.2.0) email_spec (~> 2.2.0)
factory_bot_rails (~> 4.8.2) factory_bot_rails (~> 4.8.2)
...@@ -1122,7 +1131,10 @@ DEPENDENCIES ...@@ -1122,7 +1131,10 @@ DEPENDENCIES
mousetrap-rails (~> 1.4.6) mousetrap-rails (~> 1.4.6)
mysql2 (~> 0.4.10) mysql2 (~> 0.4.10)
net-ldap net-ldap
<<<<<<< HEAD
net-ntp net-ntp
=======
>>>>>>> upstream/master
net-ssh (~> 5.0) net-ssh (~> 5.0)
nokogiri (~> 1.8.2) nokogiri (~> 1.8.2)
oauth2 (~> 1.4) oauth2 (~> 1.4)
......
...@@ -188,6 +188,7 @@ GEM ...@@ -188,6 +188,7 @@ GEM
dropzonejs-rails (0.7.4) dropzonejs-rails (0.7.4)
rails (> 3.1) rails (> 3.1)
ed25519 (1.2.4) ed25519 (1.2.4)
<<<<<<< HEAD
elasticsearch (5.0.3) elasticsearch (5.0.3)
elasticsearch-api (= 5.0.3) elasticsearch-api (= 5.0.3)
elasticsearch-transport (= 5.0.3) elasticsearch-transport (= 5.0.3)
...@@ -201,6 +202,8 @@ GEM ...@@ -201,6 +202,8 @@ GEM
elasticsearch-transport (5.0.3) elasticsearch-transport (5.0.3)
faraday faraday
multi_json multi_json
=======
>>>>>>> upstream/master
email_reply_trimmer (0.1.10) email_reply_trimmer (0.1.10)
email_spec (2.2.0) email_spec (2.2.0)
htmlentities (~> 4.3.3) htmlentities (~> 4.3.3)
...@@ -534,7 +537,10 @@ GEM ...@@ -534,7 +537,10 @@ GEM
mustermann (~> 1.0.0) mustermann (~> 1.0.0)
mysql2 (0.4.10) mysql2 (0.4.10)
net-ldap (0.16.1) net-ldap (0.16.1)
<<<<<<< HEAD
net-ntp (2.1.3) net-ntp (2.1.3)
=======
>>>>>>> upstream/master
net-ssh (5.0.1) net-ssh (5.0.1)
netrc (0.11.0) netrc (0.11.0)
nio4r (2.3.1) nio4r (2.3.1)
...@@ -1045,9 +1051,12 @@ DEPENDENCIES ...@@ -1045,9 +1051,12 @@ DEPENDENCIES
doorkeeper-openid_connect (~> 1.3) doorkeeper-openid_connect (~> 1.3)
dropzonejs-rails (~> 0.7.1) dropzonejs-rails (~> 0.7.1)
ed25519 (~> 1.2) ed25519 (~> 1.2)
<<<<<<< HEAD
elasticsearch-api (= 5.0.3) elasticsearch-api (= 5.0.3)
elasticsearch-model (~> 0.1.9) elasticsearch-model (~> 0.1.9)
elasticsearch-rails (~> 0.1.9) elasticsearch-rails (~> 0.1.9)
=======
>>>>>>> upstream/master
email_reply_trimmer (~> 0.1) email_reply_trimmer (~> 0.1)
email_spec (~> 2.2.0) email_spec (~> 2.2.0)
factory_bot_rails (~> 4.8.2) factory_bot_rails (~> 4.8.2)
...@@ -1121,7 +1130,10 @@ DEPENDENCIES ...@@ -1121,7 +1130,10 @@ DEPENDENCIES
mousetrap-rails (~> 1.4.6) mousetrap-rails (~> 1.4.6)
mysql2 (~> 0.4.10) mysql2 (~> 0.4.10)
net-ldap net-ldap
<<<<<<< HEAD
net-ntp net-ntp
=======
>>>>>>> upstream/master
net-ssh (~> 5.0) net-ssh (~> 5.0)
nokogiri (~> 1.8.2) nokogiri (~> 1.8.2)
oauth2 (~> 1.4) oauth2 (~> 1.4)
......
...@@ -11,17 +11,20 @@ export default { ...@@ -11,17 +11,20 @@ export default {
}, },
computed: { computed: {
...mapGetters(['currentMergeRequest']), ...mapGetters(['currentMergeRequest']),
...mapState(['viewer']), ...mapState(['viewer', 'currentMergeRequestId']),
showLatestChangesText() { showLatestChangesText() {
return !this.currentMergeRequest || this.viewer === viewerTypes.diff; return !this.currentMergeRequestId || this.viewer === viewerTypes.diff;
}, },
showMergeRequestText() { showMergeRequestText() {
return this.currentMergeRequest && this.viewer === viewerTypes.mr; return this.currentMergeRequestId && this.viewer === viewerTypes.mr;
},
mergeRequestId() {
return `!${this.currentMergeRequest.iid}`;
}, },
}, },
mounted() { mounted() {
this.$nextTick(() => { this.$nextTick(() => {
this.updateViewer(this.currentMergeRequest ? viewerTypes.mr : viewerTypes.diff); this.updateViewer(this.currentMergeRequestId ? viewerTypes.mr : viewerTypes.diff);
}); });
}, },
methods: { methods: {
...@@ -54,7 +57,11 @@ export default { ...@@ -54,7 +57,11 @@ export default {
</template> </template>
<template v-else-if="showMergeRequestText"> <template v-else-if="showMergeRequestText">
{{ __('Merge request') }} {{ __('Merge request') }}
(<a :href="currentMergeRequest.web_url">!{{ currentMergeRequest.iid }}</a>) (<a
v-if="currentMergeRequest"
:href="currentMergeRequest.web_url"
v-text="mergeRequestId"
></a>)
</template> </template>
</div> </div>
</template> </template>
......
<script> <script>
import $ from 'jquery';
import { mapState, mapGetters } from 'vuex'; import { mapState, mapGetters } from 'vuex';
import ProjectAvatarImage from '~/vue_shared/components/project_avatar/image.vue'; import ProjectAvatarImage from '~/vue_shared/components/project_avatar/image.vue';
import Icon from '~/vue_shared/components/icon.vue'; import Icon from '~/vue_shared/components/icon.vue';
...@@ -13,6 +14,7 @@ import CommitSection from './repo_commit_section.vue'; ...@@ -13,6 +14,7 @@ import CommitSection from './repo_commit_section.vue';
import CommitForm from './commit_sidebar/form.vue'; import CommitForm from './commit_sidebar/form.vue';
import IdeReview from './ide_review.vue'; import IdeReview from './ide_review.vue';
import SuccessMessage from './commit_sidebar/success_message.vue'; import SuccessMessage from './commit_sidebar/success_message.vue';
import MergeRequestDropdown from './merge_requests/dropdown.vue';
import { activityBarViews } from '../constants'; import { activityBarViews } from '../constants';
export default { export default {
...@@ -32,10 +34,12 @@ export default { ...@@ -32,10 +34,12 @@ export default {
CommitForm, CommitForm,
IdeReview, IdeReview,
SuccessMessage, SuccessMessage,
MergeRequestDropdown,
}, },
data() { data() {
return { return {
showTooltip: false, showTooltip: false,
showMergeRequestsDropdown: false,
}; };
}, },
computed: { computed: {
...@@ -46,6 +50,7 @@ export default { ...@@ -46,6 +50,7 @@ export default {
'changedFiles', 'changedFiles',
'stagedFiles', 'stagedFiles',
'lastCommitMsg', 'lastCommitMsg',
'currentMergeRequestId',
]), ]),
...mapGetters(['currentProject', 'someUncommitedChanges']), ...mapGetters(['currentProject', 'someUncommitedChanges']),
showSuccessMessage() { showSuccessMessage() {
...@@ -61,9 +66,39 @@ export default { ...@@ -61,9 +66,39 @@ export default {
watch: { watch: {
currentBranchId() { currentBranchId() {
this.$nextTick(() => { this.$nextTick(() => {
if (!this.$refs.branchId) return;
this.showTooltip = this.$refs.branchId.scrollWidth > this.$refs.branchId.offsetWidth; this.showTooltip = this.$refs.branchId.scrollWidth > this.$refs.branchId.offsetWidth;
}); });
}, },
loading() {
this.$nextTick(() => {
this.addDropdownListeners();
});
},
},
mounted() {
this.addDropdownListeners();
},
beforeDestroy() {
$(this.$refs.mergeRequestDropdown)
.off('show.bs.dropdown')
.off('hide.bs.dropdown');
},
methods: {
addDropdownListeners() {
if (!this.$refs.mergeRequestDropdown) return;
$(this.$refs.mergeRequestDropdown)
.on('show.bs.dropdown', () => {
this.toggleMergeRequestDropdown();
}).on('hide.bs.dropdown', () => {
this.toggleMergeRequestDropdown();
});
},
toggleMergeRequestDropdown() {
this.showMergeRequestsDropdown = !this.showMergeRequestsDropdown;
},
}, },
}; };
</script> </script>
...@@ -88,9 +123,13 @@ export default { ...@@ -88,9 +123,13 @@ export default {
</div> </div>
</template> </template>
<template v-else> <template v-else>
<div class="context-header ide-context-header"> <div
<a class="context-header ide-context-header dropdown"
:href="currentProject.web_url" ref="mergeRequestDropdown"
>
<button
type="button"
data-toggle="dropdown"
> >
<div <div
v-if="currentProject.avatar_url" v-if="currentProject.avatar_url"
...@@ -114,19 +153,41 @@ export default { ...@@ -114,19 +153,41 @@ export default {
<div class="sidebar-context-title"> <div class="sidebar-context-title">
{{ currentProject.name }} {{ currentProject.name }}
</div> </div>
<div <div class="d-flex">
class="sidebar-context-title ide-sidebar-branch-title" <div
ref="branchId" v-if="currentBranchId"
v-tooltip class="sidebar-context-title ide-sidebar-branch-title"
:title="branchTooltipTitle" ref="branchId"
> v-tooltip
<icon :title="branchTooltipTitle"
name="branch" >
css-classes="append-right-5" <icon
/>{{ currentBranchId }} name="branch"
css-classes="append-right-5"
/>{{ currentBranchId }}
</div>
<div
v-if="currentMergeRequestId"
class="sidebar-context-title ide-sidebar-branch-title"
:class="{
'prepend-left-8': currentBranchId
}"
>
<icon
name="git-merge"
css-classes="append-right-5"
/>!{{ currentMergeRequestId }}
</div>
</div> </div>
</div> </div>
</a> <icon
class="ml-auto"
name="chevron-down"
/>
</button>
<merge-request-dropdown
:show="showMergeRequestsDropdown"
/>
</div> </div>
<div class="multi-file-commit-panel-inner-scroll"> <div class="multi-file-commit-panel-inner-scroll">
<component <component
......
...@@ -35,9 +35,7 @@ export default { ...@@ -35,9 +35,7 @@ export default {
}, },
watch: { watch: {
lastCommit() { lastCommit() {
if (!this.isPollingInitialized) { this.initPipelinePolling();
this.initPipelinePolling();
}
}, },
}, },
mounted() { mounted() {
...@@ -47,9 +45,8 @@ export default { ...@@ -47,9 +45,8 @@ export default {
if (this.intervalId) { if (this.intervalId) {
clearInterval(this.intervalId); clearInterval(this.intervalId);
} }
if (this.isPollingInitialized) {
this.stopPipelinePolling(); this.stopPipelinePolling();
}
}, },
methods: { methods: {
...mapActions('pipelines', ['fetchLatestPipeline', 'stopPipelinePolling']), ...mapActions('pipelines', ['fetchLatestPipeline', 'stopPipelinePolling']),
...@@ -59,8 +56,9 @@ export default { ...@@ -59,8 +56,9 @@ export default {
}, 1000); }, 1000);
}, },
initPipelinePolling() { initPipelinePolling() {
this.fetchLatestPipeline(); if (this.lastCommit) {
this.isPollingInitialized = true; this.fetchLatestPipeline();
}
}, },
commitAgeUpdate() { commitAgeUpdate() {
if (this.lastCommit) { if (this.lastCommit) {
......
<script>
import { mapGetters } from 'vuex';
import Tabs from '../../../vue_shared/components/tabs/tabs';
import Tab from '../../../vue_shared/components/tabs/tab.vue';
import List from './list.vue';
export default {
components: {
Tabs,
Tab,
List,
},
props: {
show: {
type: Boolean,
required: true,
},
},
computed: {
...mapGetters('mergeRequests', ['assignedData', 'createdData']),
createdMergeRequestLength() {
return this.createdData.mergeRequests.length;
},
assignedMergeRequestLength() {
return this.assignedData.mergeRequests.length;
},
},
};
</script>
<template>
<div class="dropdown-menu ide-merge-requests-dropdown p-0">
<tabs
v-if="show"
stop-propagation
>
<tab active>
<template slot="title">
{{ __('Created by me') }}
<span class="badge badge-pill">
{{ createdMergeRequestLength }}
</span>
</template>
<list
type="created"
:empty-text="__('You have not created any merge requests')"
/>
</tab>
<tab>
<template slot="title">
{{ __('Assigned to me') }}
<span class="badge badge-pill">
{{ assignedMergeRequestLength }}
</span>
</template>
<list
type="assigned"
:empty-text="__('You do not have any assigned merge requests')"
/>
</tab>
</tabs>
</div>
</template>
<script>
import Icon from '../../../vue_shared/components/icon.vue';
export default {
components: {
Icon,
},
props: {
item: {
type: Object,
required: true,
},
currentId: {
type: String,
required: true,
},
currentProjectId: {
type: String,
required: true,
},
},
computed: {
isActive() {
return (
this.item.iid === parseInt(this.currentId, 10) &&
this.currentProjectId === this.item.projectPathWithNamespace
);
},
pathWithID() {
return `${this.item.projectPathWithNamespace}!${this.item.iid}`;
},
},
methods: {
clickItem() {
this.$emit('click', this.item);
},
},
};
</script>
<template>
<button
type="button"
class="btn-link d-flex align-items-center"
@click="clickItem"
>
<span class="d-flex append-right-default ide-merge-request-current-icon">
<icon
v-if="isActive"
name="mobile-issue-close"
:size="18"
/>
</span>
<span>
<strong>
{{ item.title }}
</strong>
<span class="ide-merge-request-project-path d-block mt-1">
{{ pathWithID }}
</span>
</span>
</button>
</template>
<script>
import { mapActions, mapGetters, mapState } from 'vuex';
import _ from 'underscore';
import LoadingIcon from '../../../vue_shared/components/loading_icon.vue';
import Item from './item.vue';
export default {
components: {
LoadingIcon,
Item,
},
props: {
type: {
type: String,
required: true,
},
emptyText: {
type: String,
required: true,
},
},
data() {
return {
search: '',
};
},
computed: {
...mapGetters('mergeRequests', ['getData']),
...mapState(['currentMergeRequestId', 'currentProjectId']),
data() {
return this.getData(this.type);
},
isLoading() {
return this.data.isLoading;
},
mergeRequests() {
return this.data.mergeRequests;
},
hasMergeRequests() {
return this.mergeRequests.length !== 0;
},
hasNoSearchResults() {
return this.search !== '' && !this.hasMergeRequests;
},
},
watch: {
isLoading: {
handler: 'focusSearch',
},
},
mounted() {
this.loadMergeRequests();
},
methods: {
...mapActions('mergeRequests', ['fetchMergeRequests', 'openMergeRequest']),
loadMergeRequests() {
this.fetchMergeRequests({ type: this.type, search: this.search });
},
viewMergeRequest(item) {
this.openMergeRequest({
projectPath: item.projectPathWithNamespace,
id: item.iid,
});
},
searchMergeRequests: _.debounce(function debounceSearch() {
this.loadMergeRequests();
}, 250),
focusSearch() {
if (!this.isLoading) {
this.$nextTick(() => {
this.$refs.searchInput.focus();
});
}
},
},
};
</script>
<template>
<div>
<div class="dropdown-input mt-3 pb-3 mb-0 border-bottom">
<input
type="search"
class="dropdown-input-field"
:placeholder="__('Search merge requests')"
v-model="search"
@input="searchMergeRequests"
ref="searchInput"
/>
<i
aria-hidden="true"
class="fa fa-search dropdown-input-search"
></i>
</div>
<div class="dropdown-content ide-merge-requests-dropdown-content d-flex">
<loading-icon
class="mt-3 mb-3 align-self-center ml-auto mr-auto"
v-if="isLoading"
size="2"
/>
<ul
v-else
class="mb-3 w-100"
>
<template v-if="hasMergeRequests">
<li
v-for="item in mergeRequests"
:key="item.id"
>
<item
:item="item"
:current-id="currentMergeRequestId"
:current-project-id="currentProjectId"
@click="viewMergeRequest"
/>
</li>
</template>
<li
v-else
class="ide-merge-requests-empty d-flex align-items-center justify-content-center"
>
<template v-if="hasNoSearchResults">
{{ __('No merge requests found') }}
</template>
<template v-else>
{{ emptyText }}
</template>
</li>
</ul>
</div>
</div>
</template>
...@@ -17,9 +17,7 @@ export const getMergeRequestData = ( ...@@ -17,9 +17,7 @@ export const getMergeRequestData = (
mergeRequestId, mergeRequestId,
mergeRequest: data, mergeRequest: data,
}); });
if (!state.currentMergeRequestId) { commit(types.SET_CURRENT_MERGE_REQUEST, mergeRequestId);
commit(types.SET_CURRENT_MERGE_REQUEST, mergeRequestId);
}
resolve(data); resolve(data);
}) })
.catch(() => { .catch(() => {
......
...@@ -13,8 +13,7 @@ export const getProjectData = ({ commit, state }, { namespace, projectId, force ...@@ -13,8 +13,7 @@ export const getProjectData = ({ commit, state }, { namespace, projectId, force
.then(data => { .then(data => {
commit(types.TOGGLE_LOADING, { entry: state }); commit(types.TOGGLE_LOADING, { entry: state });
commit(types.SET_PROJECT, { projectPath: `${namespace}/${projectId}`, project: data }); commit(types.SET_PROJECT, { projectPath: `${namespace}/${projectId}`, project: data });
if (!state.currentProjectId) commit(types.SET_CURRENT_PROJECT, `${namespace}/${projectId}`);
commit(types.SET_CURRENT_PROJECT, `${namespace}/${projectId}`);
resolve(data); resolve(data);
}) })
.catch(() => { .catch(() => {
......
import { __ } from '../../../../locale'; import { __ } from '../../../../locale';
import Api from '../../../../api'; import Api from '../../../../api';
import flash from '../../../../flash'; import flash from '../../../../flash';
import router from '../../../ide_router';
import { scopes } from './constants';
import * as types from './mutation_types'; import * as types from './mutation_types';
import * as rootTypes from '../../mutation_types';
export const requestMergeRequests = ({ commit }) => commit(types.REQUEST_MERGE_REQUESTS); export const requestMergeRequests = ({ commit }, type) =>
export const receiveMergeRequestsError = ({ commit }) => { commit(types.REQUEST_MERGE_REQUESTS, type);
export const receiveMergeRequestsError = ({ commit }, type) => {
flash(__('Error loading merge requests.')); flash(__('Error loading merge requests.'));
commit(types.RECEIVE_MERGE_REQUESTS_ERROR); commit(types.RECEIVE_MERGE_REQUESTS_ERROR, type);
}; };
export const receiveMergeRequestsSuccess = ({ commit }, data) => export const receiveMergeRequestsSuccess = ({ commit }, { type, data }) =>
commit(types.RECEIVE_MERGE_REQUESTS_SUCCESS, data); commit(types.RECEIVE_MERGE_REQUESTS_SUCCESS, { type, data });
export const fetchMergeRequests = ({ dispatch, state: { scope, state } }, search = '') => { export const fetchMergeRequests = ({ dispatch, state: { state } }, { type, search = '' }) => {
dispatch('requestMergeRequests'); const scope = scopes[type];
dispatch('resetMergeRequests'); dispatch('requestMergeRequests', type);
dispatch('resetMergeRequests', type);
Api.mergeRequests({ scope, state, search }) Api.mergeRequests({ scope, state, search })
.then(({ data }) => dispatch('receiveMergeRequestsSuccess', data)) .then(({ data }) => dispatch('receiveMergeRequestsSuccess', { type, data }))
.catch(() => dispatch('receiveMergeRequestsError')); .catch(() => dispatch('receiveMergeRequestsError', type));
}; };
export const resetMergeRequests = ({ commit }) => commit(types.RESET_MERGE_REQUESTS); export const resetMergeRequests = ({ commit }, type) => commit(types.RESET_MERGE_REQUESTS, type);
export const openMergeRequest = ({ commit, dispatch }, { projectPath, id }) => {
commit(rootTypes.CLEAR_PROJECTS, null, { root: true });
commit(rootTypes.SET_CURRENT_MERGE_REQUEST, `${id}`, { root: true });
commit(rootTypes.RESET_OPEN_FILES, null, { root: true });
dispatch('pipelines/stopPipelinePolling', null, { root: true });
dispatch('pipelines/clearEtagPoll', null, { root: true });
dispatch('pipelines/resetLatestPipeline', null, { root: true });
dispatch('setCurrentBranchId', '', { root: true });
router.push(`/project/${projectPath}/merge_requests/${id}`);
};
export default () => {}; export default () => {};
export const scopes = { export const scopes = {
assignedToMe: 'assigned-to-me', assigned: 'assigned-to-me',
createdByMe: 'created-by-me', created: 'created-by-me',
}; };
export const states = { export const states = {
......
export const getData = state => type => state[type];
export const assignedData = state => state.assigned;
export const createdData = state => state.created;
import state from './state'; import state from './state';
import * as actions from './actions'; import * as actions from './actions';
import * as getters from './getters';
import mutations from './mutations'; import mutations from './mutations';
export default { export default {
...@@ -7,4 +8,5 @@ export default { ...@@ -7,4 +8,5 @@ export default {
state: state(), state: state(),
actions, actions,
mutations, mutations,
getters,
}; };
...@@ -2,15 +2,15 @@ ...@@ -2,15 +2,15 @@
import * as types from './mutation_types'; import * as types from './mutation_types';
export default { export default {
[types.REQUEST_MERGE_REQUESTS](state) { [types.REQUEST_MERGE_REQUESTS](state, type) {
state.isLoading = true; state[type].isLoading = true;
}, },
[types.RECEIVE_MERGE_REQUESTS_ERROR](state) { [types.RECEIVE_MERGE_REQUESTS_ERROR](state, type) {
state.isLoading = false; state[type].isLoading = false;
}, },
[types.RECEIVE_MERGE_REQUESTS_SUCCESS](state, data) { [types.RECEIVE_MERGE_REQUESTS_SUCCESS](state, { type, data }) {
state.isLoading = false; state[type].isLoading = false;
state.mergeRequests = data.map(mergeRequest => ({ state[type].mergeRequests = data.map(mergeRequest => ({
id: mergeRequest.id, id: mergeRequest.id,
iid: mergeRequest.iid, iid: mergeRequest.iid,
title: mergeRequest.title, title: mergeRequest.title,
...@@ -20,7 +20,7 @@ export default { ...@@ -20,7 +20,7 @@ export default {
.replace(`/merge_requests/${mergeRequest.iid}`, ''), .replace(`/merge_requests/${mergeRequest.iid}`, ''),
})); }));
}, },
[types.RESET_MERGE_REQUESTS](state) { [types.RESET_MERGE_REQUESTS](state, type) {
state.mergeRequests = []; state[type].mergeRequests = [];
}, },
}; };
import { scopes, states } from './constants'; import { states } from './constants';
export default () => ({ export default () => ({
isLoading: false, created: {
mergeRequests: [], isLoading: false,
scope: scopes.assignedToMe, mergeRequests: [],
},
assigned: {
isLoading: false,
mergeRequests: [],
},
state: states.opened, state: states.opened,
}); });
...@@ -102,4 +102,7 @@ export const fetchJobTrace = ({ dispatch, state }) => { ...@@ -102,4 +102,7 @@ export const fetchJobTrace = ({ dispatch, state }) => {
.catch(() => dispatch('receiveJobTraceError')); .catch(() => dispatch('receiveJobTraceError'));
}; };
export const resetLatestPipeline = ({ commit }) =>
commit(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, null);
export default () => {}; export default () => {};
...@@ -68,3 +68,6 @@ export const TOGGLE_FILE_FINDER = 'TOGGLE_FILE_FINDER'; ...@@ -68,3 +68,6 @@ export const TOGGLE_FILE_FINDER = 'TOGGLE_FILE_FINDER';
export const BURST_UNUSED_SEAL = 'BURST_UNUSED_SEAL'; export const BURST_UNUSED_SEAL = 'BURST_UNUSED_SEAL';
export const SET_RIGHT_PANE = 'SET_RIGHT_PANE'; export const SET_RIGHT_PANE = 'SET_RIGHT_PANE';
export const CLEAR_PROJECTS = 'CLEAR_PROJECTS';
export const RESET_OPEN_FILES = 'RESET_OPEN_FILES';
...@@ -157,6 +157,12 @@ export default { ...@@ -157,6 +157,12 @@ export default {
[types.SET_LINKS](state, links) { [types.SET_LINKS](state, links) {
Object.assign(state, { links }); Object.assign(state, { links });
}, },
[types.CLEAR_PROJECTS](state) {
Object.assign(state, { projects: {}, trees: {} });
},
[types.RESET_OPEN_FILES](state) {
Object.assign(state, { openFiles: [] });
},
...projectMutations, ...projectMutations,
...mergeRequestMutation, ...mergeRequestMutation,
...fileMutations, ...fileMutations,
......
import $ from 'jquery'; import $ from 'jquery';
import stickyMonitor from './lib/utils/sticky'; import { stickyMonitor } from './lib/utils/sticky';
export default (stickyTop) => { export default (stickyTop) => {
stickyMonitor(document.querySelector('.js-diff-files-changed'), stickyTop); stickyMonitor(document.querySelector('.js-diff-files-changed'), stickyTop);
......
import $ from 'jquery'; import $ from 'jquery';
import _ from 'underscore'; import _ from 'underscore';
import StickyFill from 'stickyfilljs'; import { polyfillSticky } from './lib/utils/sticky';
import axios from './lib/utils/axios_utils'; import axios from './lib/utils/axios_utils';
import { visitUrl } from './lib/utils/url_utility'; import { visitUrl } from './lib/utils/url_utility';
import bp from './breakpoints'; import bp from './breakpoints';
...@@ -70,14 +70,7 @@ export default class Job extends LogOutputBehaviours { ...@@ -70,14 +70,7 @@ export default class Job extends LogOutputBehaviours {
} }
initAffixTopArea() { initAffixTopArea() {
/** polyfillSticky(this.$topBar);
If the browser does not support position sticky, it returns the position as static.
If the browser does support sticky, then we allow the browser to handle it, if not
then we use a polyfill
*/
if (this.$topBar.css('position') !== 'static') return;
StickyFill.add(this.$topBar);
} }
scrollToBottom() { scrollToBottom() {
......
import StickyFill from 'stickyfilljs';
export const createPlaceholder = () => { export const createPlaceholder = () => {
const placeholder = document.createElement('div'); const placeholder = document.createElement('div');
placeholder.classList.add('sticky-placeholder'); placeholder.classList.add('sticky-placeholder');
...@@ -28,7 +30,16 @@ export const isSticky = (el, scrollY, stickyTop, insertPlaceholder) => { ...@@ -28,7 +30,16 @@ export const isSticky = (el, scrollY, stickyTop, insertPlaceholder) => {
} }
}; };
export default (el, stickyTop, insertPlaceholder = true) => { /**
* Create a listener that will toggle a 'is-stuck' class, based on the current scroll position.
*
* - If the current environment does not support `position: sticky`, do nothing.
*
* @param {HTMLElement} el The `position: sticky` element.
* @param {Number} stickyTop Used to determine when an element is stuck.
* @param {Boolean} insertPlaceholder Should a placeholder element be created when element is stuck?
*/
export const stickyMonitor = (el, stickyTop, insertPlaceholder = true) => {
if (!el) return; if (!el) return;
if (typeof CSS === 'undefined' || !(CSS.supports('(position: -webkit-sticky) or (position: sticky)'))) return; if (typeof CSS === 'undefined' || !(CSS.supports('(position: -webkit-sticky) or (position: sticky)'))) return;
...@@ -37,3 +48,13 @@ export default (el, stickyTop, insertPlaceholder = true) => { ...@@ -37,3 +48,13 @@ export default (el, stickyTop, insertPlaceholder = true) => {
passive: true, passive: true,
}); });
}; };
/**
* Polyfill the `position: sticky` behavior.
*
* - If the current environment supports `position: sticky`, do nothing.
* - Can receive an iterable element list (NodeList, jQuery collection, etc.) or single HTMLElement.
*/
export const polyfillSticky = (el) => {
StickyFill.add(el);
};
...@@ -89,14 +89,13 @@ export default { ...@@ -89,14 +89,13 @@ export default {
<div> <div>
<div <div
class="js-gcp-machine-type-dropdown dropdown" class="js-gcp-machine-type-dropdown dropdown"
:class="{ 'gl-show-field-errors': hasErrors }"
> >
<dropdown-hidden-input <dropdown-hidden-input
:name="fieldName" :name="fieldName"
:value="selectedMachineType" :value="selectedMachineType"
/> />
<dropdown-button <dropdown-button
:class="{ 'gl-field-error-outline': hasErrors }" :class="{ 'border-danger': hasErrors }"
:is-disabled="isDisabled" :is-disabled="isDisabled"
:is-loading="isLoading" :is-loading="isLoading"
:toggle-text="toggleText" :toggle-text="toggleText"
...@@ -132,8 +131,11 @@ export default { ...@@ -132,8 +131,11 @@ export default {
</div> </div>
</div> </div>
<span <span
class="form-text text-muted" class="form-text"
:class="{ 'gl-field-error': hasErrors }" :class="{
'text-danger': hasErrors,
'text-muted': !hasErrors
}"
v-if="hasErrors" v-if="hasErrors"
> >
{{ errorMessage }} {{ errorMessage }}
......
...@@ -147,7 +147,6 @@ export default { ...@@ -147,7 +147,6 @@ export default {
<div> <div>
<div <div
class="js-gcp-project-id-dropdown dropdown" class="js-gcp-project-id-dropdown dropdown"
:class="{ 'gl-show-field-errors': hasErrors }"
> >
<dropdown-hidden-input <dropdown-hidden-input
:name="fieldName" :name="fieldName"
...@@ -155,7 +154,7 @@ export default { ...@@ -155,7 +154,7 @@ export default {
/> />
<dropdown-button <dropdown-button
:class="{ :class="{
'gl-field-error-outline': hasErrors, 'border-danger': hasErrors,
'read-only': hasOneProject 'read-only': hasOneProject
}" }"
:is-disabled="isDisabled" :is-disabled="isDisabled"
...@@ -193,8 +192,11 @@ export default { ...@@ -193,8 +192,11 @@ export default {
</div> </div>
</div> </div>
<span <span
class="form-text text-muted" class="form-text"
:class="{ 'gl-field-error': hasErrors }" :class="{
'text-danger': hasErrors,
'text-muted': !hasErrors
}"
v-html="helpText" v-html="helpText"
></span> ></span>
</div> </div>
......
...@@ -63,14 +63,13 @@ export default { ...@@ -63,14 +63,13 @@ export default {
<div> <div>
<div <div
class="js-gcp-zone-dropdown dropdown" class="js-gcp-zone-dropdown dropdown"
:class="{ 'gl-show-field-errors': hasErrors }"
> >
<dropdown-hidden-input <dropdown-hidden-input
:name="fieldName" :name="fieldName"
:value="selectedZone" :value="selectedZone"
/> />
<dropdown-button <dropdown-button
:class="{ 'gl-field-error-outline': hasErrors }" :class="{ 'border-danger': hasErrors }"
:is-disabled="isDisabled" :is-disabled="isDisabled"
:is-loading="isLoading" :is-loading="isLoading"
:toggle-text="toggleText" :toggle-text="toggleText"
...@@ -106,8 +105,11 @@ export default { ...@@ -106,8 +105,11 @@ export default {
</div> </div>
</div> </div>
<span <span
class="form-text text-muted" class="form-text"
:class="{ 'gl-field-error': hasErrors }" :class="{
'text-danger': hasErrors,
'text-muted': !hasErrors
}"
v-if="hasErrors" v-if="hasErrors"
> >
{{ errorMessage }} {{ errorMessage }}
......
...@@ -26,6 +26,11 @@ export default { ...@@ -26,6 +26,11 @@ export default {
created() { created() {
this.isTab = true; this.isTab = true;
}, },
updated() {
if (this.$parent) {
this.$parent.$forceUpdate();
}
},
}; };
</script> </script>
......
export default { export default {
props: {
stopPropagation: {
type: Boolean,
required: false,
default: false,
},
},
data() { data() {
return { return {
currentIndex: 0, currentIndex: 0,
...@@ -13,7 +20,12 @@ export default { ...@@ -13,7 +20,12 @@ export default {
this.tabs = this.$children.filter(child => child.isTab); this.tabs = this.$children.filter(child => child.isTab);
this.currentIndex = this.tabs.findIndex(tab => tab.localActive); this.currentIndex = this.tabs.findIndex(tab => tab.localActive);
}, },
setTab(index) { setTab(e, index) {
if (this.stopPropagation) {
e.stopPropagation();
e.preventDefault();
}
this.tabs[this.currentIndex].localActive = false; this.tabs[this.currentIndex].localActive = false;
this.tabs[index].localActive = true; this.tabs[index].localActive = true;
...@@ -36,7 +48,7 @@ export default { ...@@ -36,7 +48,7 @@ export default {
href: '#', href: '#',
}, },
on: { on: {
click: () => this.setTab(i), click: e => this.setTab(e, i),
}, },
}, },
tab.$slots.title || tab.title, tab.$slots.title || tab.title,
......
...@@ -251,3 +251,13 @@ table { ...@@ -251,3 +251,13 @@ table {
pre code { pre code {
white-space: pre-wrap; white-space: pre-wrap;
} }
.alert-danger {
background-color: $red-500;
border-color: $red-500;
color: $white-light;
h4 {
color: $white-light;
}
}
...@@ -26,19 +26,25 @@ ...@@ -26,19 +26,25 @@
margin-right: 2px; margin-right: 2px;
width: $contextual-sidebar-width; width: $contextual-sidebar-width;
a { > a,
> button {
transition: padding $sidebar-transition-duration; transition: padding $sidebar-transition-duration;
font-weight: $gl-font-weight-bold; font-weight: $gl-font-weight-bold;
display: flex; display: flex;
width: 100%;
align-items: center; align-items: center;
padding: 10px 16px 10px 10px; padding: 10px 16px 10px 10px;
color: $gl-text-color; color: $gl-text-color;
} background-color: transparent;
border: 0;
text-align: left;
&:hover, &:hover,
a:hover { &:focus {
background-color: $link-hover-background; background-color: $link-hover-background;
color: $gl-text-color; color: $gl-text-color;
outline: 0;
}
} }
.avatar-container { .avatar-container {
......
...@@ -299,6 +299,7 @@ ...@@ -299,6 +299,7 @@
height: 14px; height: 14px;
width: 14px; width: 14px;
vertical-align: middle; vertical-align: middle;
margin-bottom: 4px;
} }
.dropdown-toggle-text { .dropdown-toggle-text {
......
.table-holder { .table-holder {
margin: 0; margin: 0;
overflow: auto;
} }
table { table {
......
...@@ -42,6 +42,10 @@ ...@@ -42,6 +42,10 @@
background: none; background: none;
} }
&:focus {
outline: none;
}
.toggle-icon { .toggle-icon {
position: relative; position: relative;
display: block; display: block;
......
...@@ -327,9 +327,6 @@ ...@@ -327,9 +327,6 @@
box-shadow: 0 1px 2px $issue-boards-card-shadow; box-shadow: 0 1px 2px $issue-boards-card-shadow;
list-style: none; list-style: none;
// as a fallback, hide overflow content so that dragging and dropping still works
overflow: hidden;
&:not(:last-child) { &:not(:last-child) {
margin-bottom: 5px; margin-bottom: 5px;
} }
......
...@@ -196,10 +196,6 @@ ...@@ -196,10 +196,6 @@
.prioritized-labels { .prioritized-labels {
margin-bottom: 30px; margin-bottom: 30px;
h5 {
font-size: $gl-font-size;
}
.add-priority { .add-priority {
display: none; display: none;
color: $gray-light; color: $gray-light;
...@@ -214,10 +210,6 @@ ...@@ -214,10 +210,6 @@
} }
.other-labels { .other-labels {
h5 {
font-size: $gl-font-size;
}
.remove-priority { .remove-priority {
display: none; display: none;
} }
......
...@@ -458,14 +458,10 @@ ...@@ -458,14 +458,10 @@
width: auto; width: auto;
margin-right: 0; margin-right: 0;
a { > a,
> button {
height: 60px; height: 60px;
} }
a:hover,
a:focus {
text-decoration: none;
}
} }
.projects-sidebar { .projects-sidebar {
...@@ -1135,6 +1131,11 @@ ...@@ -1135,6 +1131,11 @@
.avatar { .avatar {
flex: 0 0 40px; flex: 0 0 40px;
} }
.ide-merge-requests-dropdown.dropdown-menu {
width: 385px;
max-height: initial;
}
} }
.ide-sidebar-project-title { .ide-sidebar-project-title {
...@@ -1143,6 +1144,10 @@ ...@@ -1143,6 +1144,10 @@
.sidebar-context-title { .sidebar-context-title {
white-space: nowrap; white-space: nowrap;
} }
.ide-sidebar-branch-title {
min-width: 50px;
}
} }
.ide-external-link { .ide-external-link {
...@@ -1274,3 +1279,52 @@ ...@@ -1274,3 +1279,52 @@
.ide-job-header { .ide-job-header {
min-height: 60px; min-height: 60px;
} }
.ide-merge-requests-dropdown {
.nav-links li {
width: 50%;
padding-left: 0;
padding-right: 0;
a {
text-align: center;
&:not(.active) {
background-color: $gray-light;
}
}
}
.dropdown-input {
padding-left: $gl-padding;
padding-right: $gl-padding;
.fa {
right: 26px;
}
}
.btn-link {
padding-top: $gl-padding;
padding-bottom: $gl-padding;
}
}
.ide-merge-request-current-icon {
min-width: 18px;
}
.ide-merge-requests-empty {
height: 230px;
}
.ide-merge-requests-dropdown-content {
min-height: 230px;
max-height: 470px;
}
.ide-merge-request-project-path {
font-size: 12px;
line-height: 16px;
color: $gl-text-color-secondary;
}
...@@ -91,6 +91,10 @@ class ApplicationController < ActionController::Base ...@@ -91,6 +91,10 @@ class ApplicationController < ActionController::Base
payload[:user_id] = logged_user.try(:id) payload[:user_id] = logged_user.try(:id)
payload[:username] = logged_user.try(:username) payload[:username] = logged_user.try(:username)
end end
if response.status == 422 && response.body.present? && response.content_type == 'application/json'.freeze
payload[:response] = response.body
end
end end
# Controllers such as GitHttpController may use alternative methods # Controllers such as GitHttpController may use alternative methods
......
...@@ -692,6 +692,12 @@ class Project < ActiveRecord::Base ...@@ -692,6 +692,12 @@ class Project < ActiveRecord::Base
end end
end end
def human_import_status_name
ensure_import_state
import_state.human_status_name
end
def import_schedule def import_schedule
ensure_import_state(force: true) ensure_import_state(force: true)
......
...@@ -1005,6 +1005,10 @@ class Repository ...@@ -1005,6 +1005,10 @@ class Repository
blob_data_at(sha, path) blob_data_at(sha, path)
end end
def lfsconfig_for(sha)
blob_data_at(sha, '.lfsconfig')
end
def fetch_ref(source_repository, source_ref:, target_ref:) def fetch_ref(source_repository, source_ref:, target_ref:)
raw_repository.fetch_ref(source_repository.raw_repository, source_ref: source_ref, target_ref: target_ref) raw_repository.fetch_ref(source_repository.raw_repository, source_ref: source_ref, target_ref: target_ref)
end end
......
...@@ -3,7 +3,7 @@ class BaseService ...@@ -3,7 +3,7 @@ class BaseService
attr_accessor :project, :current_user, :params attr_accessor :project, :current_user, :params
def initialize(project, user, params = {}) def initialize(project, user = nil, params = {})
@project, @current_user, @params = project, user, params.dup @project, @current_user, @params = project, user, params.dup
end end
......
...@@ -17,6 +17,8 @@ module Projects ...@@ -17,6 +17,8 @@ module Projects
def execute def execute
add_repository_to_project add_repository_to_project
download_lfs_objects
import_data import_data
success success
...@@ -37,7 +39,7 @@ module Projects ...@@ -37,7 +39,7 @@ module Projects
# We should skip the repository for a GitHub import or GitLab project import, # We should skip the repository for a GitHub import or GitLab project import,
# because these importers fetch the project repositories for us. # because these importers fetch the project repositories for us.
return if has_importer? && importer_class.try(:imports_repository?) return if importer_imports_repository?
if unknown_url? if unknown_url?
# In this case, we only want to import issues, not a repository. # In this case, we only want to import issues, not a repository.
...@@ -73,6 +75,27 @@ module Projects ...@@ -73,6 +75,27 @@ module Projects
end end
end end
def download_lfs_objects
# In this case, we only want to import issues
return if unknown_url?
# If it has its own repository importer, it has to implements its own lfs import download
return if importer_imports_repository?
return unless project.lfs_enabled?
oids_to_download = Projects::LfsPointers::LfsImportService.new(project).execute
download_service = Projects::LfsPointers::LfsDownloadService.new(project)
oids_to_download.each do |oid, link|
download_service.execute(oid, link)
end
rescue => e
# Right now, to avoid aborting the importing process, we silently fail
# if any exception raises.
Rails.logger.error("The Lfs import process failed. #{e.message}")
end
def import_data def import_data
return unless has_importer? return unless has_importer?
...@@ -98,5 +121,9 @@ module Projects ...@@ -98,5 +121,9 @@ module Projects
def unknown_url? def unknown_url?
project.import_url == Project::UNKNOWN_IMPORT_URL project.import_url == Project::UNKNOWN_IMPORT_URL
end end
def importer_imports_repository?
has_importer? && importer_class.try(:imports_repository?)
end
end end
end end
# This service lists the download link from a remote source based on the
# oids provided
module Projects
module LfsPointers
class LfsDownloadLinkListService < BaseService
DOWNLOAD_ACTION = 'download'.freeze
DownloadLinksError = Class.new(StandardError)
DownloadLinkNotFound = Class.new(StandardError)
attr_reader :remote_uri
def initialize(project, remote_uri: nil)
super(project)
@remote_uri = remote_uri
end
# This method accepts two parameters:
# - oids: hash of oids to query. The structure is { lfs_file_oid => lfs_file_size }
#
# Returns a hash with the structure { lfs_file_oids => download_link }
def execute(oids)
return {} unless project&.lfs_enabled? && remote_uri && oids.present?
get_download_links(oids)
end
private
def get_download_links(oids)
response = Gitlab::HTTP.post(remote_uri,
body: request_body(oids),
headers: headers)
raise DownloadLinksError, response.message unless response.success?
parse_response_links(response['objects'])
end
def parse_response_links(objects_response)
objects_response.each_with_object({}) do |entry, link_list|
begin
oid = entry['oid']
link = entry.dig('actions', DOWNLOAD_ACTION, 'href')
raise DownloadLinkNotFound unless link
link_list[oid] = add_credentials(link)
rescue DownloadLinkNotFound, URI::InvalidURIError
Rails.logger.error("Link for Lfs Object with oid #{oid} not found or invalid.")
end
end
end
def request_body(oids)
{
operation: DOWNLOAD_ACTION,
objects: oids.map { |oid, size| { oid: oid, size: size } }
}.to_json
end
def headers
{
'Accept' => LfsRequest::CONTENT_TYPE,
'Content-Type' => LfsRequest::CONTENT_TYPE
}.freeze
end
def add_credentials(link)
uri = URI.parse(link)
if should_add_credentials?(uri)
uri.user = remote_uri.user
uri.password = remote_uri.password
end
uri.to_s
end
# The download link can be a local url or an object storage url
# If the download link has the some host as the import url then
# we add the same credentials because we may need them
def should_add_credentials?(link_uri)
url_credentials? && link_uri.host == remote_uri.host
end
def url_credentials?
remote_uri.user.present? || remote_uri.password.present?
end
end
end
end
# This service downloads and links lfs objects from a remote URL
module Projects
module LfsPointers
class LfsDownloadService < BaseService
def execute(oid, url)
return unless project&.lfs_enabled? && oid.present? && url.present?
return if LfsObject.exists?(oid: oid)
sanitized_uri = Gitlab::UrlSanitizer.new(url)
with_tmp_file(oid) do |file|
size = download_and_save_file(file, sanitized_uri)
lfs_object = LfsObject.new(oid: oid, size: size, file: file)
project.all_lfs_objects << lfs_object
end
rescue StandardError => e
Rails.logger.error("LFS file with oid #{oid} could't be downloaded from #{sanitized_uri.sanitized_url}: #{e.message}")
end
private
def download_and_save_file(file, sanitized_uri)
IO.copy_stream(open(sanitized_uri.sanitized_url, headers(sanitized_uri)), file)
end
def headers(sanitized_uri)
{}.tap do |headers|
credentials = sanitized_uri.credentials
if credentials[:user].present? || credentials[:password].present?
# Using authentication headers in the request
headers[:http_basic_authentication] = [credentials[:user], credentials[:password]]
end
end
end
def with_tmp_file(oid)
create_tmp_storage_dir
File.open(File.join(tmp_storage_dir, oid), 'w') { |file| yield file }
end
def create_tmp_storage_dir
FileUtils.makedirs(tmp_storage_dir) unless Dir.exist?(tmp_storage_dir)
end
def tmp_storage_dir
@tmp_storage_dir ||= File.join(storage_dir, 'tmp', 'download')
end
def storage_dir
@storage_dir ||= Gitlab.config.lfs.storage_path
end
end
end
end
# This service manages the whole worflow of discovering the Lfs files in a
# repository, linking them to the project and downloading (and linking) the non
# existent ones.
module Projects
module LfsPointers
class LfsImportService < BaseService
include Gitlab::Utils::StrongMemoize
HEAD_REV = 'HEAD'.freeze
LFS_ENDPOINT_PATTERN = /^\t?url\s*=\s*(.+)$/.freeze
LFS_BATCH_API_ENDPOINT = '/info/lfs/objects/batch'.freeze
LfsImportError = Class.new(StandardError)
def execute
return {} unless project&.lfs_enabled?
if external_lfs_endpoint?
# If the endpoint host is different from the import_url it means
# that the repo is using a third party service for storing the LFS files.
# In this case, we have to disable lfs in the project
disable_lfs!
return {}
end
get_download_links
rescue LfsDownloadLinkListService::DownloadLinksError => e
raise LfsImportError, "The LFS objects download list couldn't be imported. Error: #{e.message}"
end
private
def external_lfs_endpoint?
lfsconfig_endpoint_uri && lfsconfig_endpoint_uri.host != import_uri.host
end
def disable_lfs!
project.update(lfs_enabled: false)
end
def get_download_links
existent_lfs = LfsListService.new(project).execute
linked_oids = LfsLinkService.new(project).execute(existent_lfs.keys)
# Retrieving those oids not linked and which we need to download
not_linked_lfs = existent_lfs.except(*linked_oids)
LfsDownloadLinkListService.new(project, remote_uri: current_endpoint_uri).execute(not_linked_lfs)
end
def lfsconfig_endpoint_uri
strong_memoize(:lfsconfig_endpoint_uri) do
# Retrieveing the blob data from the .lfsconfig file
data = project.repository.lfsconfig_for(HEAD_REV)
# Parsing the data to retrieve the url
parsed_data = data&.match(LFS_ENDPOINT_PATTERN)
if parsed_data
URI.parse(parsed_data[1]).tap do |endpoint|
endpoint.user ||= import_uri.user
endpoint.password ||= import_uri.password
end
end
end
rescue URI::InvalidURIError
raise LfsImportError, 'Invalid URL in .lfsconfig file'
end
def import_uri
@import_uri ||= URI.parse(project.import_url)
rescue URI::InvalidURIError
raise LfsImportError, 'Invalid project import URL'
end
def current_endpoint_uri
(lfsconfig_endpoint_uri || default_endpoint_uri)
end
# The import url must end with '.git' here we ensure it is
def default_endpoint_uri
@default_endpoint_uri ||= begin
import_uri.dup.tap do |uri|
path = uri.path.gsub(%r(/$), '')
path += '.git' unless path.ends_with?('.git')
uri.path = path + LFS_BATCH_API_ENDPOINT
end
end
end
end
end
end
# Given a list of oids, this services links the existent Lfs Objects to the project
module Projects
module LfsPointers
class LfsLinkService < BaseService
# Accept an array of oids to link
#
# Returns a hash with the same structure with oids linked
def execute(oids)
return {} unless project&.lfs_enabled?
# Search and link existing LFS Object
link_existing_lfs_objects(oids)
end
private
def link_existing_lfs_objects(oids)
existent_lfs_objects = LfsObject.where(oid: oids)
return [] unless existent_lfs_objects.any?
not_linked_lfs_objects = existent_lfs_objects.where.not(id: project.all_lfs_objects)
project.all_lfs_objects << not_linked_lfs_objects
existent_lfs_objects.pluck(:oid)
end
end
end
end
# This service list all existent Lfs objects in a repository
module Projects
module LfsPointers
class LfsListService < BaseService
REV = 'HEAD'.freeze
# Retrieve all lfs blob pointers and returns a hash
# with the structure { lfs_file_oid => lfs_file_size }
def execute
return {} unless project&.lfs_enabled?
Gitlab::Git::LfsChanges.new(project.repository, REV)
.all_pointers
.map! { |blob| [blob.lfs_oid, blob.lfs_size] }
.to_h
end
end
end
end
...@@ -44,7 +44,7 @@ ...@@ -44,7 +44,7 @@
%li.divider %li.divider
- if user.can_be_removed? - if user.can_be_removed?
%li %li
%button.delete-user-button.btn.btn-danger{ data: { toggle: 'modal', %button.delete-user-button.btn.text-danger{ data: { toggle: 'modal',
target: '#delete-user-modal', target: '#delete-user-modal',
delete_user_url: admin_user_path(user), delete_user_url: admin_user_path(user),
block_user_url: block_admin_user_path(user), block_user_url: block_admin_user_path(user),
...@@ -53,7 +53,7 @@ ...@@ -53,7 +53,7 @@
= s_('AdminUsers|Delete user') = s_('AdminUsers|Delete user')
%li %li
%button.delete-user-button.btn.btn-danger{ data: { toggle: 'modal', %button.delete-user-button.btn.text-danger{ data: { toggle: 'modal',
target: '#delete-user-modal', target: '#delete-user-modal',
delete_user_url: admin_user_path(user, hard_delete: true), delete_user_url: admin_user_path(user, hard_delete: true),
block_user_url: block_admin_user_path(user), block_user_url: block_admin_user_path(user),
......
...@@ -2,7 +2,10 @@ ...@@ -2,7 +2,10 @@
%html.devise-layout-html{ class: system_message_class } %html.devise-layout-html{ class: system_message_class }
= render "layouts/head" = render "layouts/head"
%body.ui-indigo.login-page.application.navless{ data: { page: body_data_page } } %body.ui-indigo.login-page.application.navless{ data: { page: body_data_page } }
<<<<<<< HEAD
= header_message = header_message
=======
>>>>>>> upstream/master
.page-wrap .page-wrap
= render "layouts/header/empty" = render "layouts/header/empty"
.login-page-broadcast .login-page-broadcast
......
...@@ -2,7 +2,10 @@ ...@@ -2,7 +2,10 @@
%html{ lang: "en", class: system_message_class } %html{ lang: "en", class: system_message_class }
= render "layouts/head" = render "layouts/head"
%body.ui-indigo.login-page.application.navless %body.ui-indigo.login-page.application.navless
<<<<<<< HEAD
= header_message = header_message
=======
>>>>>>> upstream/master
= render "layouts/header/empty" = render "layouts/header/empty"
= render "layouts/broadcast" = render "layouts/broadcast"
.container.navless-container .container.navless-container
......
...@@ -7,8 +7,8 @@ ...@@ -7,8 +7,8 @@
- link_gke = link_to(s_('ClusterIntegration|Google Kubernetes Engine'), @cluster.gke_cluster_url, target: '_blank', rel: 'noopener noreferrer') - link_gke = link_to(s_('ClusterIntegration|Google Kubernetes Engine'), @cluster.gke_cluster_url, target: '_blank', rel: 'noopener noreferrer')
= s_('ClusterIntegration|Manage your Kubernetes cluster by visiting %{link_gke}').html_safe % { link_gke: link_gke } = s_('ClusterIntegration|Manage your Kubernetes cluster by visiting %{link_gke}').html_safe % { link_gke: link_gke }
.card.form-group .sub-section.form-group
%label.text-danger %h4.text-danger
= s_('ClusterIntegration|Remove Kubernetes cluster integration') = s_('ClusterIntegration|Remove Kubernetes cluster integration')
%p %p
= s_("ClusterIntegration|Remove this Kubernetes cluster's configuration from this project. This will not delete your actual Kubernetes cluster.") = s_("ClusterIntegration|Remove this Kubernetes cluster's configuration from this project. This will not delete your actual Kubernetes cluster.")
......
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
-# Only show it in the first page -# Only show it in the first page
- hide = @available_labels.empty? || (params[:page].present? && params[:page] != '1') - hide = @available_labels.empty? || (params[:page].present? && params[:page] != '1')
.prioritized-labels{ class: ('hide' if hide) } .prioritized-labels{ class: ('hide' if hide) }
%h5.prepend-top-10 Prioritized Labels %h5 Prioritized Labels
%ul.content-list.manage-labels-list.js-prioritized-labels{ "data-url" => set_priorities_project_labels_path(@project) } %ul.content-list.manage-labels-list.js-prioritized-labels{ "data-url" => set_priorities_project_labels_path(@project) }
#js-priority-labels-empty-state{ class: "#{'hidden' unless @prioritized_labels.empty?}" } #js-priority-labels-empty-state{ class: "#{'hidden' unless @prioritized_labels.empty?}" }
= render 'shared/empty_states/priority_labels' = render 'shared/empty_states/priority_labels'
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
%i Owners %i Owners
.light .light
<<<<<<< HEAD
- if can_admin_project_members && project_can_be_shared? - if can_admin_project_members && project_can_be_shared?
- if !membership_locked? && @project.allowed_to_share_with_group? - if !membership_locked? && @project.allowed_to_share_with_group?
%ul.nav-links.nav.nav-tabs.gitlab-tabs{ role: 'tablist' } %ul.nav-links.nav.nav-tabs.gitlab-tabs{ role: 'tablist' }
...@@ -34,6 +35,15 @@ ...@@ -34,6 +35,15 @@
- elsif @project.allowed_to_share_with_group? - elsif @project.allowed_to_share_with_group?
.share-with-group= render 'projects/project_members/new_shared_group', tab_title: 'Share with group' .share-with-group= render 'projects/project_members/new_shared_group', tab_title: 'Share with group'
=======
- if can?(current_user, :admin_project_member, @project)
%ul.nav-links.nav.nav-tabs.gitlab-tabs{ role: 'tablist' }
%li.nav-tab{ role: 'presentation' }
%a.nav-link.active{ href: '#add-member-pane', id: 'add-member-tab', data: { toggle: 'tab' }, role: 'tab' } Add member
- if @project.allowed_to_share_with_group?
%li.nav-tab{ role: 'presentation' }
%a.nav-link{ href: '#share-with-group-pane', id: 'share-with-group-tab', data: { toggle: 'tab' }, role: 'tab' } Share with group
>>>>>>> upstream/master
= render 'shared/members/requests', membership_source: @project, requesters: @requesters = render 'shared/members/requests', membership_source: @project, requesters: @requesters
......
...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
= render 'shared/ref_switcher', destination: 'tree', path: @path, show_create: true = render 'shared/ref_switcher', destination: 'tree', path: @path, show_create: true
- if on_top_of_branch? - if on_top_of_branch?
- addtotree_toggle_attributes = { href: '#', 'data-toggle': 'dropdown', 'data-target': '.add-to-tree-dropdown' } - addtotree_toggle_attributes = { href: '#', 'data-toggle': 'dropdown', 'data-target': '.add-to-tree-dropdown', 'data-boundary': 'window' }
- else - else
- addtotree_toggle_attributes = { title: _("You can only add files when you are on a branch"), data: { container: 'body' }, class: 'disabled has-tooltip' } - addtotree_toggle_attributes = { title: _("You can only add files when you are on a branch"), data: { container: 'body' }, class: 'disabled has-tooltip' }
......
...@@ -31,12 +31,14 @@ ...@@ -31,12 +31,14 @@
- github_importer:github_import_import_diff_note - github_importer:github_import_import_diff_note
- github_importer:github_import_import_issue - github_importer:github_import_import_issue
- github_importer:github_import_import_note - github_importer:github_import_import_note
- github_importer:github_import_import_lfs_object
- github_importer:github_import_import_pull_request - github_importer:github_import_import_pull_request
- github_importer:github_import_refresh_import_jid - github_importer:github_import_refresh_import_jid
- github_importer:github_import_stage_finish_import - github_importer:github_import_stage_finish_import
- github_importer:github_import_stage_import_base_data - github_importer:github_import_stage_import_base_data
- github_importer:github_import_stage_import_issues_and_diff_notes - github_importer:github_import_stage_import_issues_and_diff_notes
- github_importer:github_import_stage_import_notes - github_importer:github_import_stage_import_notes
- github_importer:github_import_stage_import_lfs_objects
- github_importer:github_import_stage_import_pull_requests - github_importer:github_import_stage_import_pull_requests
- github_importer:github_import_stage_import_repository - github_importer:github_import_stage_import_repository
......
...@@ -21,6 +21,7 @@ module Gitlab ...@@ -21,6 +21,7 @@ module Gitlab
STAGES = { STAGES = {
issues_and_diff_notes: Stage::ImportIssuesAndDiffNotesWorker, issues_and_diff_notes: Stage::ImportIssuesAndDiffNotesWorker,
notes: Stage::ImportNotesWorker, notes: Stage::ImportNotesWorker,
lfs_objects: Stage::ImportLfsObjectsWorker,
finish: Stage::FinishImportWorker finish: Stage::FinishImportWorker
}.freeze }.freeze
......
# frozen_string_literal: true
module Gitlab
module GithubImport
class ImportLfsObjectWorker
include ObjectImporter
def representation_class
Representation::LfsObject
end
def importer_class
Importer::LfsObjectImporter
end
def counter_name
:github_importer_imported_lfs_objects
end
def counter_description
'The number of imported GitHub Lfs Objects'
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
module Stage
class ImportLfsObjectsWorker
include ApplicationWorker
include GithubImport::Queue
include StageMethods
def perform(project_id)
return unless (project = find_project(project_id))
import(project)
end
# project - An instance of Project.
def import(project)
waiter = Importer::LfsObjectsImporter
.new(project, nil)
.execute
AdvanceStageWorker.perform_async(
project.id,
{ waiter.key => waiter.jobs_remaining },
:finish
)
end
end
end
end
end
...@@ -18,7 +18,7 @@ module Gitlab ...@@ -18,7 +18,7 @@ module Gitlab
AdvanceStageWorker.perform_async( AdvanceStageWorker.perform_async(
project.id, project.id,
{ waiter.key => waiter.jobs_remaining }, { waiter.key => waiter.jobs_remaining },
:finish :lfs_objects
) )
end end
end end
......
---
title: Showing project import_status in a humanized form no longer gives an error
merge_request: 19470
author:
type: fixed
---
title: Upgrade GitLab from Bootstrap 3 to 4
merge_request:
author:
type: other
---
title: Added support for LFS Download in the importing process
merge_request: 18871
author:
type: fixed
---
title: Log response body to production_json.log when a controller responds with a
422 status
merge_request:
author:
type: other
---
title: Update screenshot in Gitlab.com integration documentation
merge_request: 19433
author: Tuğçe Nur Taş
type: other
...@@ -27,6 +27,7 @@ unless Sidekiq.server? ...@@ -27,6 +27,7 @@ unless Sidekiq.server?
gitaly_calls = Gitlab::GitalyClient.get_request_count gitaly_calls = Gitlab::GitalyClient.get_request_count
payload[:gitaly_calls] = gitaly_calls if gitaly_calls > 0 payload[:gitaly_calls] = gitaly_calls if gitaly_calls > 0
payload[:response] = event.payload[:response] if event.payload[:response]
payload payload
end end
......
...@@ -7,13 +7,11 @@ GitLab.com will generate an application ID and secret key for you to use. ...@@ -7,13 +7,11 @@ GitLab.com will generate an application ID and secret key for you to use.
1. Sign in to GitLab.com 1. Sign in to GitLab.com
1. Navigate to your profile settings. 1. On the upper right corner, click on your avatar and go to your **Settings**.
1. Select "Applications" in the left menu. 1. Select **Applications** in the left menu.
1. Select "New application". 1. Provide the required details for **Add new application**.
1. Provide the required details.
- Name: This can be anything. Consider something like `<Organization>'s GitLab` or `<Your Name>'s GitLab` or something else descriptive. - Name: This can be anything. Consider something like `<Organization>'s GitLab` or `<Your Name>'s GitLab` or something else descriptive.
- Redirect URI: - Redirect URI:
...@@ -24,9 +22,9 @@ GitLab.com will generate an application ID and secret key for you to use. ...@@ -24,9 +22,9 @@ GitLab.com will generate an application ID and secret key for you to use.
The first link is required for the importer and second for the authorization. The first link is required for the importer and second for the authorization.
1. Select "Submit". 1. Select **Save application**.
1. You should now see a Client ID and Client Secret near the top right of the page (see screenshot). 1. You should now see a **Application Id** and **Secret** near the top right of the page (see screenshot).
Keep this page open as you continue configuration. Keep this page open as you continue configuration.
![GitLab app](img/gitlab_app.png) ![GitLab app](img/gitlab_app.png)
......
doc/integration/img/gitlab_app.png

15 KB | W: | H:

doc/integration/img/gitlab_app.png

55.2 KB | W: | H:

doc/integration/img/gitlab_app.png
doc/integration/img/gitlab_app.png
doc/integration/img/gitlab_app.png
doc/integration/img/gitlab_app.png
  • 2-up
  • Swipe
  • Onion skin
...@@ -23,7 +23,7 @@ documentation. ...@@ -23,7 +23,7 @@ documentation.
GitLab is a fully integrated software development platform that enables you GitLab is a fully integrated software development platform that enables you
and your team to work cohesively, faster, transparently, and effectively, and your team to work cohesively, faster, transparently, and effectively,
since the discussion of a new idea until taking that idea to production all since the discussion of a new idea until taking that idea to production all
all the way through, from within the same platform. the way through, from within the same platform.
Please check this page for an overview on [GitLab's features](https://about.gitlab.com/features/). Please check this page for an overview on [GitLab's features](https://about.gitlab.com/features/).
......
...@@ -228,7 +228,7 @@ backoff period. ...@@ -228,7 +228,7 @@ backoff period.
If the mirror fails (eg: branch diverged from upstream), the project's backoff If the mirror fails (eg: branch diverged from upstream), the project's backoff
period will be penalized each time it fails up to a maximum amount of time. period will be penalized each time it fails up to a maximum amount of time.
## Pushing to a remote repository **[STARTER]** ## Pushing to a remote repository
>[Introduced](https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/249) in >[Introduced](https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/249) in
GitLab Enterprise Edition 8.7. [Moved to GitLab Community Edition][ce-18715] in 10.8. GitLab Enterprise Edition 8.7. [Moved to GitLab Community Edition][ce-18715] in 10.8.
......
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
class LfsObjectImporter
attr_reader :lfs_object, :project
# lfs_object - An instance of `Gitlab::GithubImport::Representation::LfsObject`.
# project - An instance of `Project`.
def initialize(lfs_object, project, _)
@lfs_object = lfs_object
@project = project
end
def execute
Projects::LfsPointers::LfsDownloadService
.new(project)
.execute(lfs_object.oid, lfs_object.download_link)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
class LfsObjectsImporter
include ParallelScheduling
def importer_class
LfsObjectImporter
end
def representation_class
Representation::LfsObject
end
def sidekiq_worker_class
ImportLfsObjectWorker
end
def collection_method
:lfs_objects
end
def each_object_to_import
lfs_objects = Projects::LfsPointers::LfsImportService.new(project).execute
lfs_objects.each do |object|
yield object
end
rescue StandardError => e
Rails.logger.error("The Lfs import process failed. #{e.message}")
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module GithubImport
module Representation
class LfsObject
include ToHash
include ExposeAttribute
attr_reader :attributes
expose_attribute :oid, :download_link
# Builds a lfs_object
def self.from_api_response(lfs_object)
new({ oid: lfs_object[0], download_link: lfs_object[1] })
end
# Builds a new lfs_object using a Hash that was built from a JSON payload.
def self.from_json_hash(raw_hash)
new(Representation.symbolize_hash(raw_hash))
end
# attributes - A Hash containing the raw lfs_object details. The keys of this
# Hash must be Symbols.
def initialize(attributes)
@attributes = attributes
end
end
end
end
end
...@@ -19,7 +19,8 @@ module Gitlab ...@@ -19,7 +19,8 @@ module Gitlab
Importer::PullRequestsImporter, Importer::PullRequestsImporter,
Importer::IssuesImporter, Importer::IssuesImporter,
Importer::DiffNotesImporter, Importer::DiffNotesImporter,
Importer::NotesImporter Importer::NotesImporter,
Importer::LfsObjectsImporter
].freeze ].freeze
# project - The project to import the data into. # project - The project to import the data into.
......
...@@ -24,10 +24,10 @@ module QA::Page ...@@ -24,10 +24,10 @@ module QA::Page
end end
end end
def has_build?(name, status: :success, wait:) def has_build?(name, status: :success, wait: nil)
within('.pipeline-graph') do within('.pipeline-graph') do
within('.ci-job-component', text: name) do within('.ci-job-component', text: name) do
has_selector?(".ci-status-icon-#{status}", wait: wait) has_selector?(".ci-status-icon-#{status}", { wait: wait }.compact)
end end
end end
end end
......
# coding: utf-8
require 'spec_helper' require 'spec_helper'
describe ApplicationController do describe ApplicationController do
...@@ -478,6 +479,63 @@ describe ApplicationController do ...@@ -478,6 +479,63 @@ describe ApplicationController do
end end
end end
describe '#append_info_to_payload' do
controller(described_class) do
attr_reader :last_payload
def index
render text: 'authenticated'
end
def append_info_to_payload(payload)
super
@last_payload = payload
end
end
it 'does not log errors with a 200 response' do
get :index
expect(controller.last_payload.has_key?(:response)).to be_falsey
end
context '422 errors' do
it 'logs a response with a string' do
response = spy(ActionDispatch::Response, status: 422, body: 'Hello world', content_type: 'application/json')
allow(controller).to receive(:response).and_return(response)
get :index
expect(controller.last_payload[:response]).to eq('Hello world')
end
it 'logs a response with an array' do
body = ['I want', 'my hat back']
response = spy(ActionDispatch::Response, status: 422, body: body, content_type: 'application/json')
allow(controller).to receive(:response).and_return(response)
get :index
expect(controller.last_payload[:response]).to eq(body)
end
it 'does not log a string with an empty body' do
response = spy(ActionDispatch::Response, status: 422, body: nil, content_type: 'application/json')
allow(controller).to receive(:response).and_return(response)
get :index
expect(controller.last_payload.has_key?(:response)).to be_falsey
end
it 'does not log an HTML body' do
response = spy(ActionDispatch::Response, status: 422, body: 'This is a test', content_type: 'application/html')
allow(controller).to receive(:response).and_return(response)
get :index
expect(controller.last_payload.has_key?(:response)).to be_falsey
end
end
end
describe '#access_denied' do describe '#access_denied' do
controller(described_class) do controller(described_class) do
def index def index
......
import Vue from 'vue';
import { createStore } from '~/ide/stores';
import Dropdown from '~/ide/components/merge_requests/dropdown.vue';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
import { mergeRequests } from '../../mock_data';
describe('IDE merge requests dropdown', () => {
const Component = Vue.extend(Dropdown);
let vm;
beforeEach(() => {
const store = createStore();
vm = createComponentWithStore(Component, store, { show: false }).$mount();
});
afterEach(() => {
vm.$destroy();
});
it('does not render tabs when show is false', () => {
expect(vm.$el.querySelector('.nav-links')).toBe(null);
});
describe('when show is true', () => {
beforeEach(done => {
vm.show = true;
vm.$store.state.mergeRequests.assigned.mergeRequests.push(mergeRequests[0]);
vm.$nextTick(done);
});
it('renders tabs', () => {
expect(vm.$el.querySelector('.nav-links')).not.toBe(null);
});
it('renders count for assigned & created data', () => {
expect(vm.$el.querySelector('.nav-links a').textContent).toContain('Created by me');
expect(vm.$el.querySelector('.nav-links a .badge').textContent).toContain('0');
expect(vm.$el.querySelectorAll('.nav-links a')[1].textContent).toContain('Assigned to me');
expect(
vm.$el.querySelectorAll('.nav-links a')[1].querySelector('.badge').textContent,
).toContain('1');
});
});
});
import Vue from 'vue';
import Item from '~/ide/components/merge_requests/item.vue';
import mountCompontent from '../../../helpers/vue_mount_component_helper';
describe('IDE merge request item', () => {
const Component = Vue.extend(Item);
let vm;
beforeEach(() => {
vm = mountCompontent(Component, {
item: {
iid: 1,
projectPathWithNamespace: 'gitlab-org/gitlab-ce',
title: 'Merge request title',
},
currentId: '1',
currentProjectId: 'gitlab-org/gitlab-ce',
});
});
afterEach(() => {
vm.$destroy();
});
it('renders merge requests data', () => {
expect(vm.$el.textContent).toContain('Merge request title');
expect(vm.$el.textContent).toContain('gitlab-org/gitlab-ce!1');
});
it('renders icon if ID matches currentId', () => {
expect(vm.$el.querySelector('.ic-mobile-issue-close')).not.toBe(null);
});
it('does not render icon if ID does not match currentId', done => {
vm.currentId = '2';
vm.$nextTick(() => {
expect(vm.$el.querySelector('.ic-mobile-issue-close')).toBe(null);
done();
});
});
it('does not render icon if project ID does not match', done => {
vm.currentProjectId = 'test/test';
vm.$nextTick(() => {
expect(vm.$el.querySelector('.ic-mobile-issue-close')).toBe(null);
done();
});
});
it('emits click event on click', () => {
spyOn(vm, '$emit');
vm.$el.click();
expect(vm.$emit).toHaveBeenCalledWith('click', vm.item);
});
});
import Vue from 'vue';
import store from '~/ide/stores';
import List from '~/ide/components/merge_requests/list.vue';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
import { mergeRequests } from '../../mock_data';
import { resetStore } from '../../helpers';
describe('IDE merge requests list', () => {
const Component = Vue.extend(List);
let vm;
beforeEach(() => {
vm = createComponentWithStore(Component, store, {
type: 'created',
emptyText: 'empty text',
});
spyOn(vm, 'fetchMergeRequests');
vm.$mount();
});
afterEach(() => {
vm.$destroy();
resetStore(vm.$store);
});
it('calls fetch on mounted', () => {
expect(vm.fetchMergeRequests).toHaveBeenCalledWith({
type: 'created',
search: '',
});
});
it('renders loading icon', done => {
vm.$store.state.mergeRequests.created.isLoading = true;
vm.$nextTick(() => {
expect(vm.$el.querySelector('.loading-container')).not.toBe(null);
done();
});
});
it('renders empty text when no merge requests exist', () => {
expect(vm.$el.textContent).toContain('empty text');
});
it('renders no search results text when search is not empty', done => {
vm.search = 'testing';
vm.$nextTick(() => {
expect(vm.$el.textContent).toContain('No merge requests found');
done();
});
});
describe('with merge requests', () => {
beforeEach(done => {
vm.$store.state.mergeRequests.created.mergeRequests.push({
...mergeRequests[0],
projectPathWithNamespace: 'gitlab-org/gitlab-ce',
});
vm.$nextTick(done);
});
it('renders list', () => {
expect(vm.$el.querySelectorAll('li').length).toBe(1);
expect(vm.$el.querySelector('li').textContent).toContain(mergeRequests[0].title);
});
it('calls openMergeRequest when clicking merge request', done => {
spyOn(vm, 'openMergeRequest');
vm.$el.querySelector('li button').click();
vm.$nextTick(() => {
expect(vm.openMergeRequest).toHaveBeenCalledWith({
projectPath: 'gitlab-org/gitlab-ce',
id: 1,
});
done();
});
});
});
describe('focusSearch', () => {
it('focuses search input when loading is false', done => {
spyOn(vm.$refs.searchInput, 'focus');
vm.$store.state.mergeRequests.created.isLoading = false;
vm.focusSearch();
vm.$nextTick(() => {
expect(vm.$refs.searchInput.focus).toHaveBeenCalled();
done();
});
});
});
describe('searchMergeRequests', () => {
beforeEach(() => {
spyOn(vm, 'loadMergeRequests');
jasmine.clock().install();
});
afterEach(() => {
jasmine.clock().uninstall();
});
it('calls loadMergeRequests on input in search field', () => {
const event = new Event('input');
vm.$el.querySelector('input').dispatchEvent(event);
jasmine.clock().tick(300);
expect(vm.loadMergeRequests).toHaveBeenCalled();
});
});
});
...@@ -8,7 +8,9 @@ import actions, { ...@@ -8,7 +8,9 @@ import actions, {
receiveMergeRequestsSuccess, receiveMergeRequestsSuccess,
fetchMergeRequests, fetchMergeRequests,
resetMergeRequests, resetMergeRequests,
openMergeRequest,
} from '~/ide/stores/modules/merge_requests/actions'; } from '~/ide/stores/modules/merge_requests/actions';
import router from '~/ide/ide_router';
import { mergeRequests } from '../../../mock_data'; import { mergeRequests } from '../../../mock_data';
import testAction from '../../../../helpers/vuex_action_helper'; import testAction from '../../../../helpers/vuex_action_helper';
...@@ -29,9 +31,9 @@ describe('IDE merge requests actions', () => { ...@@ -29,9 +31,9 @@ describe('IDE merge requests actions', () => {
it('should should commit request', done => { it('should should commit request', done => {
testAction( testAction(
requestMergeRequests, requestMergeRequests,
null, 'created',
mockedState, mockedState,
[{ type: types.REQUEST_MERGE_REQUESTS }], [{ type: types.REQUEST_MERGE_REQUESTS, payload: 'created' }],
[], [],
done, done,
); );
...@@ -48,16 +50,16 @@ describe('IDE merge requests actions', () => { ...@@ -48,16 +50,16 @@ describe('IDE merge requests actions', () => {
it('should should commit error', done => { it('should should commit error', done => {
testAction( testAction(
receiveMergeRequestsError, receiveMergeRequestsError,
null, 'created',
mockedState, mockedState,
[{ type: types.RECEIVE_MERGE_REQUESTS_ERROR }], [{ type: types.RECEIVE_MERGE_REQUESTS_ERROR, payload: 'created' }],
[], [],
done, done,
); );
}); });
it('creates flash message', () => { it('creates flash message', () => {
receiveMergeRequestsError({ commit() {} }); receiveMergeRequestsError({ commit() {} }, 'created');
expect(flashSpy).toHaveBeenCalled(); expect(flashSpy).toHaveBeenCalled();
}); });
...@@ -67,9 +69,14 @@ describe('IDE merge requests actions', () => { ...@@ -67,9 +69,14 @@ describe('IDE merge requests actions', () => {
it('should commit received data', done => { it('should commit received data', done => {
testAction( testAction(
receiveMergeRequestsSuccess, receiveMergeRequestsSuccess,
'data', { type: 'created', data: 'data' },
mockedState, mockedState,
[{ type: types.RECEIVE_MERGE_REQUESTS_SUCCESS, payload: 'data' }], [
{
type: types.RECEIVE_MERGE_REQUESTS_SUCCESS,
payload: { type: 'created', data: 'data' },
},
],
[], [],
done, done,
); );
...@@ -86,14 +93,14 @@ describe('IDE merge requests actions', () => { ...@@ -86,14 +93,14 @@ describe('IDE merge requests actions', () => {
mock.onGet(/\/api\/v4\/merge_requests(.*)$/).replyOnce(200, mergeRequests); mock.onGet(/\/api\/v4\/merge_requests(.*)$/).replyOnce(200, mergeRequests);
}); });
it('calls API with params from state', () => { it('calls API with params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchMergeRequests({ dispatch() {}, state: mockedState }); fetchMergeRequests({ dispatch() {}, state: mockedState }, { type: 'created' });
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), { expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
params: { params: {
scope: 'assigned-to-me', scope: 'created-by-me',
state: 'opened', state: 'opened',
search: '', search: '',
}, },
...@@ -103,11 +110,14 @@ describe('IDE merge requests actions', () => { ...@@ -103,11 +110,14 @@ describe('IDE merge requests actions', () => {
it('calls API with search', () => { it('calls API with search', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchMergeRequests({ dispatch() {}, state: mockedState }, 'testing search'); fetchMergeRequests(
{ dispatch() {}, state: mockedState },
{ type: 'created', search: 'testing search' },
);
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), { expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
params: { params: {
scope: 'assigned-to-me', scope: 'created-by-me',
state: 'opened', state: 'opened',
search: 'testing search', search: 'testing search',
}, },
...@@ -117,7 +127,7 @@ describe('IDE merge requests actions', () => { ...@@ -117,7 +127,7 @@ describe('IDE merge requests actions', () => {
it('dispatches request', done => { it('dispatches request', done => {
testAction( testAction(
fetchMergeRequests, fetchMergeRequests,
null, { type: 'created' },
mockedState, mockedState,
[], [],
[ [
...@@ -132,13 +142,16 @@ describe('IDE merge requests actions', () => { ...@@ -132,13 +142,16 @@ describe('IDE merge requests actions', () => {
it('dispatches success with received data', done => { it('dispatches success with received data', done => {
testAction( testAction(
fetchMergeRequests, fetchMergeRequests,
null, { type: 'created' },
mockedState, mockedState,
[], [],
[ [
{ type: 'requestMergeRequests' }, { type: 'requestMergeRequests' },
{ type: 'resetMergeRequests' }, { type: 'resetMergeRequests' },
{ type: 'receiveMergeRequestsSuccess', payload: mergeRequests }, {
type: 'receiveMergeRequestsSuccess',
payload: { type: 'created', data: mergeRequests },
},
], ],
done, done,
); );
...@@ -153,7 +166,7 @@ describe('IDE merge requests actions', () => { ...@@ -153,7 +166,7 @@ describe('IDE merge requests actions', () => {
it('dispatches error', done => { it('dispatches error', done => {
testAction( testAction(
fetchMergeRequests, fetchMergeRequests,
null, { type: 'created' },
mockedState, mockedState,
[], [],
[ [
...@@ -171,12 +184,47 @@ describe('IDE merge requests actions', () => { ...@@ -171,12 +184,47 @@ describe('IDE merge requests actions', () => {
it('commits reset', done => { it('commits reset', done => {
testAction( testAction(
resetMergeRequests, resetMergeRequests,
null, 'created',
mockedState, mockedState,
[{ type: types.RESET_MERGE_REQUESTS }], [{ type: types.RESET_MERGE_REQUESTS, payload: 'created' }],
[], [],
done, done,
); );
}); });
}); });
describe('openMergeRequest', () => {
beforeEach(() => {
spyOn(router, 'push');
});
it('commits reset mutations and actions', done => {
testAction(
openMergeRequest,
{ projectPath: 'gitlab-org/gitlab-ce', id: '1' },
mockedState,
[
{ type: 'CLEAR_PROJECTS' },
{ type: 'SET_CURRENT_MERGE_REQUEST', payload: '1' },
{ type: 'RESET_OPEN_FILES' },
],
[
{ type: 'pipelines/stopPipelinePolling' },
{ type: 'pipelines/clearEtagPoll' },
{ type: 'pipelines/resetLatestPipeline' },
{ type: 'setCurrentBranchId', payload: '' },
],
done,
);
});
it('pushes new route', () => {
openMergeRequest(
{ commit() {}, dispatch() {} },
{ projectPath: 'gitlab-org/gitlab-ce', id: '1' },
);
expect(router.push).toHaveBeenCalledWith('/project/gitlab-org/gitlab-ce/merge_requests/1');
});
});
}); });
...@@ -12,26 +12,29 @@ describe('IDE merge requests mutations', () => { ...@@ -12,26 +12,29 @@ describe('IDE merge requests mutations', () => {
describe(types.REQUEST_MERGE_REQUESTS, () => { describe(types.REQUEST_MERGE_REQUESTS, () => {
it('sets loading to true', () => { it('sets loading to true', () => {
mutations[types.REQUEST_MERGE_REQUESTS](mockedState); mutations[types.REQUEST_MERGE_REQUESTS](mockedState, 'created');
expect(mockedState.isLoading).toBe(true); expect(mockedState.created.isLoading).toBe(true);
}); });
}); });
describe(types.RECEIVE_MERGE_REQUESTS_ERROR, () => { describe(types.RECEIVE_MERGE_REQUESTS_ERROR, () => {
it('sets loading to false', () => { it('sets loading to false', () => {
mutations[types.RECEIVE_MERGE_REQUESTS_ERROR](mockedState); mutations[types.RECEIVE_MERGE_REQUESTS_ERROR](mockedState, 'created');
expect(mockedState.isLoading).toBe(false); expect(mockedState.created.isLoading).toBe(false);
}); });
}); });
describe(types.RECEIVE_MERGE_REQUESTS_SUCCESS, () => { describe(types.RECEIVE_MERGE_REQUESTS_SUCCESS, () => {
it('sets merge requests', () => { it('sets merge requests', () => {
gon.gitlab_url = gl.TEST_HOST; gon.gitlab_url = gl.TEST_HOST;
mutations[types.RECEIVE_MERGE_REQUESTS_SUCCESS](mockedState, mergeRequests); mutations[types.RECEIVE_MERGE_REQUESTS_SUCCESS](mockedState, {
type: 'created',
data: mergeRequests,
});
expect(mockedState.mergeRequests).toEqual([ expect(mockedState.created.mergeRequests).toEqual([
{ {
id: 1, id: 1,
iid: 1, iid: 1,
...@@ -47,9 +50,9 @@ describe('IDE merge requests mutations', () => { ...@@ -47,9 +50,9 @@ describe('IDE merge requests mutations', () => {
it('clears merge request array', () => { it('clears merge request array', () => {
mockedState.mergeRequests = ['test']; mockedState.mergeRequests = ['test'];
mutations[types.RESET_MERGE_REQUESTS](mockedState); mutations[types.RESET_MERGE_REQUESTS](mockedState, 'created');
expect(mockedState.mergeRequests).toEqual([]); expect(mockedState.created.mergeRequests).toEqual([]);
}); });
}); });
}); });
require 'spec_helper'
describe Gitlab::GithubImport::Importer::LfsObjectImporter do
let(:project) { create(:project) }
let(:download_link) { "http://www.gitlab.com/lfs_objects/oid" }
let(:github_lfs_object) do
Gitlab::GithubImport::Representation::LfsObject.new(
oid: 'oid', download_link: download_link
)
end
let(:importer) { described_class.new(github_lfs_object, project, nil) }
describe '#execute' do
it 'calls the LfsDownloadService with the lfs object attributes' do
expect_any_instance_of(Projects::LfsPointers::LfsDownloadService)
.to receive(:execute).with('oid', download_link)
importer.execute
end
end
end
require 'spec_helper'
describe Gitlab::GithubImport::Importer::LfsObjectsImporter do
let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
let(:client) { double(:client) }
let(:download_link) { "http://www.gitlab.com/lfs_objects/oid" }
let(:github_lfs_object) { ['oid', download_link] }
describe '#parallel?' do
it 'returns true when running in parallel mode' do
importer = described_class.new(project, client)
expect(importer).to be_parallel
end
it 'returns false when running in sequential mode' do
importer = described_class.new(project, client, parallel: false)
expect(importer).not_to be_parallel
end
end
describe '#execute' do
context 'when running in parallel mode' do
it 'imports lfs objects in parallel' do
importer = described_class.new(project, client)
expect(importer).to receive(:parallel_import)
importer.execute
end
end
context 'when running in sequential mode' do
it 'imports lfs objects in sequence' do
importer = described_class.new(project, client, parallel: false)
expect(importer).to receive(:sequential_import)
importer.execute
end
end
end
describe '#sequential_import' do
it 'imports each lfs object in sequence' do
importer = described_class.new(project, client, parallel: false)
lfs_object_importer = double(:lfs_object_importer)
allow(importer)
.to receive(:each_object_to_import)
.and_yield(['oid', download_link])
expect(Gitlab::GithubImport::Importer::LfsObjectImporter)
.to receive(:new)
.with(
an_instance_of(Gitlab::GithubImport::Representation::LfsObject),
project,
client
)
.and_return(lfs_object_importer)
expect(lfs_object_importer).to receive(:execute)
importer.sequential_import
end
end
describe '#parallel_import' do
it 'imports each lfs object in parallel' do
importer = described_class.new(project, client)
allow(importer)
.to receive(:each_object_to_import)
.and_yield(github_lfs_object)
expect(Gitlab::GithubImport::ImportLfsObjectWorker)
.to receive(:perform_async)
.with(project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
expect(waiter.jobs_remaining).to eq(1)
end
end
describe '#collection_options' do
it 'returns an empty Hash' do
importer = described_class.new(project, client)
expect(importer.collection_options).to eq({})
end
end
end
...@@ -14,7 +14,8 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do ...@@ -14,7 +14,8 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do
disk_path: 'foo', disk_path: 'foo',
repository: repository, repository: repository,
create_wiki: true, create_wiki: true,
import_state: import_state import_state: import_state,
lfs_enabled?: true
) )
end end
......
...@@ -91,4 +91,23 @@ describe Gitlab::ImportSources do ...@@ -91,4 +91,23 @@ describe Gitlab::ImportSources do
end end
end end
end end
describe 'imports_repository? checker' do
let(:allowed_importers) { %w[github gitlab_project] }
it 'fails if any importer other than the allowed ones implements this method' do
current_importers = described_class.values.select { |kind| described_class.importer(kind).try(:imports_repository?) }
not_allowed_importers = current_importers - allowed_importers
expect(not_allowed_importers).to be_empty, failure_message(not_allowed_importers)
end
def failure_message(importers_class_names)
<<-MSG
It looks like the #{importers_class_names.join(', ')} importers implements its own way to import the repository.
That means that the lfs object download must be handled for each of them. You can use 'LfsImportService' and
'LfsDownloadService' to implement it. After that, add the importer name to the list of allowed importers in this spec.
MSG
end
end
end end
...@@ -1844,6 +1844,31 @@ describe Project do ...@@ -1844,6 +1844,31 @@ describe Project do
end end
end end
describe '#human_import_status_name' do
context 'when import_state exists' do
it 'returns the humanized status name' do
project = create(:project)
create(:import_state, :started, project: project)
expect(project.human_import_status_name).to eq("started")
end
end
context 'when import_state was not created yet' do
let(:project) { create(:project, :import_started) }
it 'ensures import_state is created and returns humanized status name' do
expect do
project.human_import_status_name
end.to change { ProjectImportState.count }.from(0).to(1)
end
it 'returns humanized status name' do
expect(project.human_import_status_name).to eq("started")
end
end
end
describe 'Project import job' do describe 'Project import job' do
let(:project) { create(:project, import_url: generate(:url)) } let(:project) { create(:project, import_url: generate(:url)) }
......
...@@ -279,6 +279,7 @@ describe API::Commits do ...@@ -279,6 +279,7 @@ describe API::Commits do
expect(json_response['committer_email']).to eq(user.email) expect(json_response['committer_email']).to eq(user.email)
end end
<<<<<<< HEAD
it 'does not call the metrics using access token authentication' do it 'does not call the metrics using access token authentication' do
stub_licensed_features(ide: true) stub_licensed_features(ide: true)
...@@ -287,6 +288,8 @@ describe API::Commits do ...@@ -287,6 +288,8 @@ describe API::Commits do
expect_any_instance_of(::Gitlab::Metrics::MultiFileEditor).not_to receive(:record) expect_any_instance_of(::Gitlab::Metrics::MultiFileEditor).not_to receive(:record)
end end
=======
>>>>>>> upstream/master
it 'returns a 400 bad request if file exists' do it 'returns a 400 bad request if file exists' do
post api(url, user), invalid_c_params post api(url, user), invalid_c_params
......
...@@ -3,9 +3,17 @@ require 'spec_helper' ...@@ -3,9 +3,17 @@ require 'spec_helper'
describe Projects::ImportService do describe Projects::ImportService do
let!(:project) { create(:project) } let!(:project) { create(:project) }
let(:user) { project.creator } let(:user) { project.creator }
let(:import_url) { 'http://www.gitlab.com/demo/repo.git' }
let(:oid_download_links) { { 'oid1' => "#{import_url}/gitlab-lfs/objects/oid1", 'oid2' => "#{import_url}/gitlab-lfs/objects/oid2" } }
subject { described_class.new(project, user) } subject { described_class.new(project, user) }
before do
allow(project).to receive(:lfs_enabled?).and_return(true)
allow_any_instance_of(Projects::LfsPointers::LfsDownloadService).to receive(:execute)
allow_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute).and_return(oid_download_links)
end
describe '#async?' do describe '#async?' do
it 'returns true for an asynchronous importer' do it 'returns true for an asynchronous importer' do
importer_class = double(:importer, async?: true) importer_class = double(:importer, async?: true)
...@@ -63,6 +71,15 @@ describe Projects::ImportService do ...@@ -63,6 +71,15 @@ describe Projects::ImportService do
expect(result[:status]).to eq :error expect(result[:status]).to eq :error
expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.full_path} - The repository could not be created." expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.full_path} - The repository could not be created."
end end
context 'when repository creation succeeds' do
it 'does not download lfs files' do
expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
expect_any_instance_of(Projects::LfsPointers::LfsDownloadService).not_to receive(:execute)
subject.execute
end
end
end end
context 'with known url' do context 'with known url' do
...@@ -91,6 +108,15 @@ describe Projects::ImportService do ...@@ -91,6 +108,15 @@ describe Projects::ImportService do
expect(result[:status]).to eq :error expect(result[:status]).to eq :error
end end
context 'when repository import scheduled' do
it 'does not download lfs objects' do
expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
expect_any_instance_of(Projects::LfsPointers::LfsDownloadService).not_to receive(:execute)
subject.execute
end
end
end end
context 'with a non Github repository' do context 'with a non Github repository' do
...@@ -99,9 +125,10 @@ describe Projects::ImportService do ...@@ -99,9 +125,10 @@ describe Projects::ImportService do
project.import_type = 'bitbucket' project.import_type = 'bitbucket'
end end
it 'succeeds if repository import is successfully' do it 'succeeds if repository import is successfull' do
expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_return(true) expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_return(true)
expect_any_instance_of(Gitlab::BitbucketImport::Importer).to receive(:execute).and_return(true) expect_any_instance_of(Gitlab::BitbucketImport::Importer).to receive(:execute).and_return(true)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute).and_return({})
result = subject.execute result = subject.execute
...@@ -116,6 +143,29 @@ describe Projects::ImportService do ...@@ -116,6 +143,29 @@ describe Projects::ImportService do
expect(result[:status]).to eq :error expect(result[:status]).to eq :error
expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.full_path} - Failed to import the repository" expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.full_path} - Failed to import the repository"
end end
context 'when repository import scheduled' do
before do
allow_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_return(true)
allow(subject).to receive(:import_data)
end
it 'downloads lfs objects if lfs_enabled is enabled for project' do
allow(project).to receive(:lfs_enabled?).and_return(true)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute).and_return(oid_download_links)
expect_any_instance_of(Projects::LfsPointers::LfsDownloadService).to receive(:execute).twice
subject.execute
end
it 'does not download lfs objects if lfs_enabled is not enabled for project' do
allow(project).to receive(:lfs_enabled?).and_return(false)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
expect_any_instance_of(Projects::LfsPointers::LfsDownloadService).not_to receive(:execute)
subject.execute
end
end
end end
end end
...@@ -147,6 +197,26 @@ describe Projects::ImportService do ...@@ -147,6 +197,26 @@ describe Projects::ImportService do
expect(result[:status]).to eq :error expect(result[:status]).to eq :error
end end
context 'when importer' do
it 'has a custom repository importer it does not download lfs objects' do
allow(Gitlab::GithubImport::ParallelImporter).to receive(:imports_repository?).and_return(true)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).not_to receive(:execute)
expect_any_instance_of(Projects::LfsPointers::LfsDownloadService).not_to receive(:execute)
subject.execute
end
it 'does not have a custom repository importer downloads lfs objects' do
allow(Gitlab::GithubImport::ParallelImporter).to receive(:imports_repository?).and_return(false)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute).and_return(oid_download_links)
expect_any_instance_of(Projects::LfsPointers::LfsDownloadService).to receive(:execute)
subject.execute
end
end
end end
context 'with blocked import_URL' do context 'with blocked import_URL' do
......
require 'spec_helper'
describe Projects::LfsPointers::LfsDownloadLinkListService do
let(:import_url) { 'http://www.gitlab.com/demo/repo.git' }
let(:lfs_endpoint) { "#{import_url}/info/lfs/objects/batch" }
let!(:project) { create(:project, import_url: import_url) }
let(:new_oids) { { 'oid1' => 123, 'oid2' => 125 } }
let(:remote_uri) { URI.parse(lfs_endpoint) }
let(:objects_response) do
body = new_oids.map do |oid, size|
{
'oid' => oid,
'size' => size,
'actions' => {
'download' => { 'href' => "#{import_url}/gitlab-lfs/objects/#{oid}" }
}
}
end
Struct.new(:success?, :objects).new(true, body)
end
let(:invalid_object_response) do
[
'oid' => 'whatever',
'size' => 123
]
end
subject { described_class.new(project, remote_uri: remote_uri) }
before do
allow(project).to receive(:lfs_enabled?).and_return(true)
allow(Gitlab::HTTP).to receive(:post).and_return(objects_response)
end
describe '#execute' do
it 'retrieves each download link of every non existent lfs object' do
subject.execute(new_oids).each do |oid, link|
expect(link).to eq "#{import_url}/gitlab-lfs/objects/#{oid}"
end
end
context 'credentials' do
context 'when the download link and the lfs_endpoint have the same host' do
context 'when lfs_endpoint has credentials' do
let(:import_url) { 'http://user:password@www.gitlab.com/demo/repo.git' }
it 'adds credentials to the download_link' do
result = subject.execute(new_oids)
result.each do |oid, link|
expect(link.starts_with?('http://user:password@')).to be_truthy
end
end
end
context 'when lfs_endpoint does not have any credentials' do
it 'does not add any credentials' do
result = subject.execute(new_oids)
result.each do |oid, link|
expect(link.starts_with?('http://user:password@')).to be_falsey
end
end
end
end
context 'when the download link and the lfs_endpoint have different hosts' do
let(:import_url_with_credentials) { 'http://user:password@www.otherdomain.com/demo/repo.git' }
let(:lfs_endpoint) { "#{import_url_with_credentials}/info/lfs/objects/batch" }
it 'downloads without any credentials' do
result = subject.execute(new_oids)
result.each do |oid, link|
expect(link.starts_with?('http://user:password@')).to be_falsey
end
end
end
end
end
describe '#get_download_links' do
it 'raise errorif request fails' do
allow(Gitlab::HTTP).to receive(:post).and_return(Struct.new(:success?, :message).new(false, 'Failed request'))
expect { subject.send(:get_download_links, new_oids) }.to raise_error(described_class::DownloadLinksError)
end
end
describe '#parse_response_links' do
it 'does not add oid entry if href not found' do
expect(Rails.logger).to receive(:error).with("Link for Lfs Object with oid whatever not found or invalid.")
result = subject.send(:parse_response_links, invalid_object_response)
expect(result).to be_empty
end
end
end
require 'spec_helper'
describe Projects::LfsPointers::LfsDownloadService do
let(:project) { create(:project) }
let(:oid) { '9e548e25631dd9ce6b43afd6359ab76da2819d6a5b474e66118c7819e1d8b3e8' }
let(:download_link) { "http://gitlab.com/#{oid}" }
let(:lfs_content) do
<<~HEREDOC
whatever
HEREDOC
end
subject { described_class.new(project) }
before do
allow(project).to receive(:lfs_enabled?).and_return(true)
WebMock.stub_request(:get, download_link).to_return(body: lfs_content)
end
describe '#execute' do
context 'when file download succeeds' do
it 'a new lfs object is created' do
expect { subject.execute(oid, download_link) }.to change { LfsObject.count }.from(0).to(1)
end
it 'has the same oid' do
subject.execute(oid, download_link)
expect(LfsObject.first.oid).to eq oid
end
it 'stores the content' do
subject.execute(oid, download_link)
expect(File.read(LfsObject.first.file.file.file)).to eq lfs_content
end
end
context 'when file download fails' do
it 'no lfs object is created' do
expect { subject.execute(oid, download_link) }.to change { LfsObject.count }
end
end
context 'when credentials present' do
let(:download_link_with_credentials) { "http://user:password@gitlab.com/#{oid}" }
before do
WebMock.stub_request(:get, download_link).with(headers: { 'Authorization' => 'Basic dXNlcjpwYXNzd29yZA==' }).to_return(body: lfs_content)
end
it 'the request adds authorization headers' do
subject.execute(oid, download_link_with_credentials)
end
end
context 'when an lfs object with the same oid already exists' do
before do
create(:lfs_object, oid: 'oid')
end
it 'does not download the file' do
expect(subject).not_to receive(:download_and_save_file)
subject.execute('oid', download_link)
end
end
end
end
require 'spec_helper'
describe Projects::LfsPointers::LfsImportService do
let(:import_url) { 'http://www.gitlab.com/demo/repo.git' }
let(:default_endpoint) { "#{import_url}/info/lfs/objects/batch"}
let(:group) { create(:group, lfs_enabled: true)}
let!(:project) { create(:project, namespace: group, import_url: import_url, lfs_enabled: true) }
let!(:lfs_objects_project) { create_list(:lfs_objects_project, 2, project: project) }
let!(:existing_lfs_objects) { LfsObject.pluck(:oid, :size).to_h }
let(:oids) { { 'oid1' => 123, 'oid2' => 125 } }
let(:oid_download_links) { { 'oid1' => "#{import_url}/gitlab-lfs/objects/oid1", 'oid2' => "#{import_url}/gitlab-lfs/objects/oid2" } }
let(:all_oids) { existing_lfs_objects.merge(oids) }
let(:remote_uri) { URI.parse(lfs_endpoint) }
subject { described_class.new(project) }
before do
allow(project.repository).to receive(:lfsconfig_for).and_return(nil)
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
allow_any_instance_of(Projects::LfsPointers::LfsListService).to receive(:execute).and_return(all_oids)
end
describe '#execute' do
context 'when no lfs pointer is linked' do
before do
allow_any_instance_of(Projects::LfsPointers::LfsLinkService).to receive(:execute).and_return([])
allow_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).and_return(oid_download_links)
expect(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:new).with(project, remote_uri: URI.parse(default_endpoint)).and_call_original
end
it 'retrieves all lfs pointers in the project repository' do
expect_any_instance_of(Projects::LfsPointers::LfsListService).to receive(:execute)
subject.execute
end
it 'links existent lfs objects to the project' do
expect_any_instance_of(Projects::LfsPointers::LfsLinkService).to receive(:execute)
subject.execute
end
it 'retrieves the download links of non existent objects' do
expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).with(all_oids)
subject.execute
end
end
context 'when some lfs objects are linked' do
before do
allow_any_instance_of(Projects::LfsPointers::LfsLinkService).to receive(:execute).and_return(existing_lfs_objects.keys)
allow_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).and_return(oid_download_links)
end
it 'retrieves the download links of non existent objects' do
expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).with(oids)
subject.execute
end
end
context 'when all lfs objects are linked' do
before do
allow_any_instance_of(Projects::LfsPointers::LfsLinkService).to receive(:execute).and_return(all_oids.keys)
allow_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute)
end
it 'retrieves no download links' do
expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).with({}).and_call_original
expect(subject.execute).to be_empty
end
end
context 'when lfsconfig file exists' do
before do
allow(project.repository).to receive(:lfsconfig_for).and_return("[lfs]\n\turl = #{lfs_endpoint}\n")
end
context 'when url points to the same import url host' do
let(:lfs_endpoint) { "#{import_url}/different_endpoint" }
let(:service) { double }
before do
allow(service).to receive(:execute)
end
it 'downloads lfs object using the new endpoint' do
expect(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:new).with(project, remote_uri: remote_uri).and_return(service)
subject.execute
end
context 'when import url has credentials' do
let(:import_url) { 'http://user:password@www.gitlab.com/demo/repo.git'}
it 'adds the credentials to the new endpoint' do
expect(Projects::LfsPointers::LfsDownloadLinkListService)
.to receive(:new).with(project, remote_uri: URI.parse("http://user:password@www.gitlab.com/demo/repo.git/different_endpoint"))
.and_return(service)
subject.execute
end
context 'when url has its own credentials' do
let(:lfs_endpoint) { "http://user1:password1@www.gitlab.com/demo/repo.git/different_endpoint" }
it 'does not add the import url credentials' do
expect(Projects::LfsPointers::LfsDownloadLinkListService)
.to receive(:new).with(project, remote_uri: remote_uri)
.and_return(service)
subject.execute
end
end
end
end
context 'when url points to a third party service' do
let(:lfs_endpoint) { 'http://third_party_service.com/info/lfs/objects/' }
it 'disables lfs from the project' do
expect(project.lfs_enabled?).to be_truthy
subject.execute
expect(project.lfs_enabled?).to be_falsey
end
it 'does not download anything' do
expect_any_instance_of(Projects::LfsPointers::LfsListService).not_to receive(:execute)
subject.execute
end
end
end
end
describe '#default_endpoint_uri' do
let(:import_url) { 'http://www.gitlab.com/demo/repo' }
it 'adds suffix .git if the url does not have it' do
expect(subject.send(:default_endpoint_uri).path).to match(/repo.git/)
end
end
end
require 'spec_helper'
describe Projects::LfsPointers::LfsLinkService do
let!(:project) { create(:project, lfs_enabled: true) }
let!(:lfs_objects_project) { create_list(:lfs_objects_project, 2, project: project) }
let(:new_oids) { { 'oid1' => 123, 'oid2' => 125 } }
let(:all_oids) { LfsObject.pluck(:oid, :size).to_h.merge(new_oids) }
let(:new_lfs_object) { create(:lfs_object) }
let(:new_oid_list) { all_oids.merge(new_lfs_object.oid => new_lfs_object.size) }
subject { described_class.new(project) }
before do
allow(project).to receive(:lfs_enabled?).and_return(true)
end
describe '#execute' do
it 'links existing lfs objects to the project' do
expect(project.all_lfs_objects.count).to eq 2
linked = subject.execute(new_oid_list.keys)
expect(project.all_lfs_objects.count).to eq 3
expect(linked.size).to eq 3
end
it 'returns linked oids' do
linked = lfs_objects_project.map(&:lfs_object).map(&:oid) << new_lfs_object.oid
expect(subject.execute(new_oid_list.keys)).to eq linked
end
end
end
require 'spec_helper'
describe Gitlab::GithubImport::Stage::ImportLfsObjectsWorker do
let(:project) { create(:project) }
let(:worker) { described_class.new }
describe '#import' do
it 'imports all the lfs objects' do
importer = double(:importer)
waiter = Gitlab::JobWaiter.new(2, '123')
expect(Gitlab::GithubImport::Importer::LfsObjectsImporter)
.to receive(:new)
.with(project, nil)
.and_return(importer)
expect(importer)
.to receive(:execute)
.and_return(waiter)
expect(Gitlab::GithubImport::AdvanceStageWorker)
.to receive(:perform_async)
.with(project.id, { '123' => 2 }, :finish)
worker.import(project)
end
end
end
...@@ -21,7 +21,7 @@ describe Gitlab::GithubImport::Stage::ImportNotesWorker do ...@@ -21,7 +21,7 @@ describe Gitlab::GithubImport::Stage::ImportNotesWorker do
expect(Gitlab::GithubImport::AdvanceStageWorker) expect(Gitlab::GithubImport::AdvanceStageWorker)
.to receive(:perform_async) .to receive(:perform_async)
.with(project.id, { '123' => 2 }, :finish) .with(project.id, { '123' => 2 }, :lfs_objects)
worker.import(client, project) worker.import(client, project)
end end
......
...@@ -10,6 +10,7 @@ ...@@ -10,6 +10,7 @@
# Test jobs may be disabled by setting environment variables: # Test jobs may be disabled by setting environment variables:
# * test: TEST_DISABLED # * test: TEST_DISABLED
# * code_quality: CODE_QUALITY_DISABLED # * code_quality: CODE_QUALITY_DISABLED
# * license_management: LICENSE_MANAGEMENT_DISABLED
# * performance: PERFORMANCE_DISABLED # * performance: PERFORMANCE_DISABLED
# * sast: SAST_DISABLED # * sast: SAST_DISABLED
# * dependency_scanning: DEPENDENCY_SCANNING_DISABLED # * dependency_scanning: DEPENDENCY_SCANNING_DISABLED
...@@ -108,6 +109,22 @@ code_quality: ...@@ -108,6 +109,22 @@ code_quality:
variables: variables:
- $CODE_QUALITY_DISABLED - $CODE_QUALITY_DISABLED
license_management:
image: docker:stable
variables:
DOCKER_DRIVER: overlay2
allow_failure: true
services:
- docker:stable-dind
script:
- setup_docker
- license_management
artifacts:
paths: [gl-license-management-report.json]
except:
variables:
- $LICENSE_MANAGEMENT_DISABLED
performance: performance:
stage: performance stage: performance
image: docker:stable image: docker:stable
...@@ -462,6 +479,18 @@ rollout 100%: ...@@ -462,6 +479,18 @@ rollout 100%:
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
} }
function license_management() {
if echo $GITLAB_FEATURES |grep license_management > /dev/null ; then
# Extract "MAJOR.MINOR" from CI_SERVER_VERSION and generate "MAJOR-MINOR-stable"
LICENSE_MANAGEMENT_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
docker run --volume "$PWD:/code" \
"registry.gitlab.com/gitlab-org/security-products/license-management:$LICENSE_MANAGEMENT_VERSION" analyze /code
else
echo "License management is not available in your subscription"
fi
}
function sast() { function sast() {
case "$CI_SERVER_VERSION" in case "$CI_SERVER_VERSION" in
*-ee) *-ee)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment