Commit 4c0e8125 authored by Sean McGivern's avatar Sean McGivern

Merge branch 'ce-to-ee-2018-03-21' into 'master'

CE upstream - 2018-03-21 21:28 UTC

Closes gitlab-runner#1621 and gitaly#1092

See merge request gitlab-org/gitlab-ee!5070
parents dbf82adb ad76a2b4
# --- Special code for migrating to Rails 5.0 ---
def rails5?
%w[1 true].include?(ENV["RAILS5"])
end
gem_versions = {}
gem_versions['activerecord_sane_schema_dumper'] = rails5? ? '1.0' : '0.2'
gem_versions['default_value_for'] = rails5? ? '~> 3.0.5' : '~> 3.0.0'
gem_versions['html-pipeline'] = rails5? ? '~> 2.6.0' : '~> 1.11.0'
gem_versions['rails'] = rails5? ? '5.0.6' : '4.2.10'
gem_versions['rails-i18n'] = rails5? ? '~> 5.1' : '~> 4.0.9'
# --- The end of special code for migrating to Rails 5.0 ---
source 'https://rubygems.org' source 'https://rubygems.org'
gem 'rails', '4.2.10' gem 'rails', gem_versions['rails']
gem 'rails-deprecated_sanitizer', '~> 1.0.3' gem 'rails-deprecated_sanitizer', '~> 1.0.3'
# Responders respond_to and respond_with # Responders respond_to and respond_with
...@@ -9,7 +22,7 @@ gem 'responders', '~> 2.0' ...@@ -9,7 +22,7 @@ gem 'responders', '~> 2.0'
gem 'sprockets', '~> 3.7.0' gem 'sprockets', '~> 3.7.0'
# Default values for AR models # Default values for AR models
gem 'default_value_for', '~> 3.0.0' gem 'default_value_for', gem_versions['default_value_for']
# Supported DBs # Supported DBs
gem 'mysql2', '~> 0.4.10', group: :mysql gem 'mysql2', '~> 0.4.10', group: :mysql
...@@ -24,7 +37,7 @@ gem 'faraday', '~> 0.12' ...@@ -24,7 +37,7 @@ gem 'faraday', '~> 0.12'
gem 'devise', '~> 4.2' gem 'devise', '~> 4.2'
gem 'doorkeeper', '~> 4.3' gem 'doorkeeper', '~> 4.3'
gem 'doorkeeper-openid_connect', '~> 1.3' gem 'doorkeeper-openid_connect', '~> 1.3'
gem 'omniauth', '~> 1.4.2' gem 'omniauth', '~> 1.8'
gem 'omniauth-auth0', '~> 1.4.1' gem 'omniauth-auth0', '~> 1.4.1'
gem 'omniauth-azure-oauth2', '~> 0.0.9' gem 'omniauth-azure-oauth2', '~> 0.0.9'
gem 'omniauth-cas3', '~> 1.1.4' gem 'omniauth-cas3', '~> 1.1.4'
...@@ -132,7 +145,7 @@ gem 'aws-sdk' ...@@ -132,7 +145,7 @@ gem 'aws-sdk'
gem 'faraday_middleware-aws-signers-v4' gem 'faraday_middleware-aws-signers-v4'
# Markdown and HTML processing # Markdown and HTML processing
gem 'html-pipeline', '~> 1.11.0' gem 'html-pipeline', gem_versions['html-pipeline']
gem 'deckar01-task_list', '2.0.0' gem 'deckar01-task_list', '2.0.0'
gem 'gitlab-markup', '~> 1.6.2' gem 'gitlab-markup', '~> 1.6.2'
gem 'redcarpet', '~> 3.4' gem 'redcarpet', '~> 3.4'
...@@ -231,7 +244,7 @@ gem 'babosa', '~> 1.0.2' ...@@ -231,7 +244,7 @@ gem 'babosa', '~> 1.0.2'
gem 'loofah', '~> 2.0.3' gem 'loofah', '~> 2.0.3'
# Working with license # Working with license
gem 'licensee', '~> 8.7.0' gem 'licensee', '~> 8.9'
# Protect against bruteforcing # Protect against bruteforcing
gem 'rack-attack', '~> 4.4.1' gem 'rack-attack', '~> 4.4.1'
...@@ -278,9 +291,9 @@ gem 'premailer-rails', '~> 1.9.7' ...@@ -278,9 +291,9 @@ gem 'premailer-rails', '~> 1.9.7'
# I18n # I18n
gem 'ruby_parser', '~> 3.8', require: false gem 'ruby_parser', '~> 3.8', require: false
gem 'rails-i18n', '~> 4.0.9' gem 'rails-i18n', gem_versions['rails-i18n']
gem 'gettext_i18n_rails', '~> 1.8.0' gem 'gettext_i18n_rails', '~> 1.8.0'
gem 'gettext_i18n_rails_js', '~> 1.2.0' gem 'gettext_i18n_rails_js', '~> 1.3'
gem 'gettext', '~> 3.2.2', require: false, group: :development gem 'gettext', '~> 3.2.2', require: false, group: :development
gem 'batch-loader', '~> 1.2.1' gem 'batch-loader', '~> 1.2.1'
...@@ -369,7 +382,7 @@ group :development, :test do ...@@ -369,7 +382,7 @@ group :development, :test do
gem 'license_finder', '~> 3.1', require: false gem 'license_finder', '~> 3.1', require: false
gem 'knapsack', '~> 1.16' gem 'knapsack', '~> 1.16'
gem 'activerecord_sane_schema_dumper', '0.2' gem 'activerecord_sane_schema_dumper', gem_versions['activerecord_sane_schema_dumper']
gem 'stackprof', '~> 0.2.10', require: false gem 'stackprof', '~> 0.2.10', require: false
......
...@@ -235,7 +235,7 @@ GEM ...@@ -235,7 +235,7 @@ GEM
faraday_middleware faraday_middleware
multi_json multi_json
fast_blank (1.0.0) fast_blank (1.0.0)
fast_gettext (1.4.0) fast_gettext (1.6.0)
ffaker (2.4.0) ffaker (2.4.0)
ffi (1.9.18) ffi (1.9.18)
flay (2.10.0) flay (2.10.0)
...@@ -302,12 +302,12 @@ GEM ...@@ -302,12 +302,12 @@ GEM
gemojione (3.3.0) gemojione (3.3.0)
json json
get_process_mem (0.2.0) get_process_mem (0.2.0)
gettext (3.2.2) gettext (3.2.9)
locale (>= 2.0.5) locale (>= 2.0.5)
text (>= 1.3.0) text (>= 1.3.0)
gettext_i18n_rails (1.8.0) gettext_i18n_rails (1.8.0)
fast_gettext (>= 0.9.0) fast_gettext (>= 0.9.0)
gettext_i18n_rails_js (1.2.0) gettext_i18n_rails_js (1.3.0)
gettext (>= 3.0.2) gettext (>= 3.0.2)
gettext_i18n_rails (>= 0.7.1) gettext_i18n_rails (>= 0.7.1)
po_to_json (>= 1.0.0) po_to_json (>= 1.0.0)
...@@ -503,7 +503,7 @@ GEM ...@@ -503,7 +503,7 @@ GEM
toml (= 0.1.2) toml (= 0.1.2)
with_env (> 1.0) with_env (> 1.0)
xml-simple xml-simple
licensee (8.7.0) licensee (8.9.2)
rugged (~> 0.24) rugged (~> 0.24)
little-plugger (1.1.4) little-plugger (1.1.4)
locale (2.1.2) locale (2.1.2)
...@@ -527,7 +527,7 @@ GEM ...@@ -527,7 +527,7 @@ GEM
mime-types-data (~> 3.2015) mime-types-data (~> 3.2015)
mime-types-data (3.2016.0521) mime-types-data (3.2016.0521)
mimemagic (0.3.0) mimemagic (0.3.0)
mini_mime (0.1.4) mini_mime (1.0.0)
mini_portile2 (2.3.0) mini_portile2 (2.3.0)
minitest (5.7.0) minitest (5.7.0)
mousetrap-rails (1.4.6) mousetrap-rails (1.4.6)
...@@ -554,8 +554,8 @@ GEM ...@@ -554,8 +554,8 @@ GEM
rack (>= 1.2, < 3) rack (>= 1.2, < 3)
octokit (4.8.0) octokit (4.8.0)
sawyer (~> 0.8.0, >= 0.5.3) sawyer (~> 0.8.0, >= 0.5.3)
omniauth (1.4.3) omniauth (1.8.1)
hashie (>= 1.2, < 4) hashie (>= 3.4.6, < 3.6.0)
rack (>= 1.6.2, < 3) rack (>= 1.6.2, < 3)
omniauth-auth0 (1.4.1) omniauth-auth0 (1.4.1)
omniauth-oauth2 (~> 1.1) omniauth-oauth2 (~> 1.1)
...@@ -1090,7 +1090,7 @@ DEPENDENCIES ...@@ -1090,7 +1090,7 @@ DEPENDENCIES
gemojione (~> 3.3) gemojione (~> 3.3)
gettext (~> 3.2.2) gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0) gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.2.0) gettext_i18n_rails_js (~> 1.3)
gitaly-proto (~> 0.88.0) gitaly-proto (~> 0.88.0)
github-linguist (~> 5.3.3) github-linguist (~> 5.3.3)
gitlab-flowdock-git-hook (~> 1.0.1) gitlab-flowdock-git-hook (~> 1.0.1)
...@@ -1128,7 +1128,7 @@ DEPENDENCIES ...@@ -1128,7 +1128,7 @@ DEPENDENCIES
kubeclient (~> 3.0) kubeclient (~> 3.0)
letter_opener_web (~> 1.3.0) letter_opener_web (~> 1.3.0)
license_finder (~> 3.1) license_finder (~> 3.1)
licensee (~> 8.7.0) licensee (~> 8.9)
lograge (~> 0.5) lograge (~> 0.5)
loofah (~> 2.0.3) loofah (~> 2.0.3)
mail_room (~> 0.9.1) mail_room (~> 0.9.1)
...@@ -1142,7 +1142,7 @@ DEPENDENCIES ...@@ -1142,7 +1142,7 @@ DEPENDENCIES
nokogiri (~> 1.8.2) nokogiri (~> 1.8.2)
oauth2 (~> 1.4) oauth2 (~> 1.4)
octokit (~> 4.8) octokit (~> 4.8)
omniauth (~> 1.4.2) omniauth (~> 1.8)
omniauth-auth0 (~> 1.4.1) omniauth-auth0 (~> 1.4.1)
omniauth-authentiq (~> 0.3.1) omniauth-authentiq (~> 0.3.1)
omniauth-azure-oauth2 (~> 0.0.9) omniauth-azure-oauth2 (~> 0.0.9)
......
# BUNDLE_GEMFILE=Gemfile.rails5 bundle install
ENV["RAILS5"] = "true"
gemfile = File.expand_path("../Gemfile", __FILE__)
eval(File.read(gemfile), nil, gemfile)
This diff is collapsed.
...@@ -4,4 +4,3 @@ ...@@ -4,4 +4,3 @@
# #
web: RAILS_ENV=development bin/web start_foreground web: RAILS_ENV=development bin/web start_foreground
worker: RAILS_ENV=development bin/background_jobs start_foreground worker: RAILS_ENV=development bin/background_jobs start_foreground
# mail_room: bundle exec mail_room -q -c config/mail_room.yml
<script> <script>
import projectAvatarImage from '~/vue_shared/components/project_avatar/image.vue'; import ProjectAvatarImage from '~/vue_shared/components/project_avatar/image.vue';
import branchesTree from './ide_project_branches_tree.vue'; import Identicon from '../../vue_shared/components/identicon.vue';
import externalLinks from './ide_external_links.vue'; import BranchesTree from './ide_project_branches_tree.vue';
import ExternalLinks from './ide_external_links.vue';
export default { export default {
components: { components: {
branchesTree, BranchesTree,
externalLinks, ExternalLinks,
projectAvatarImage, ProjectAvatarImage,
Identicon,
}, },
props: { props: {
project: { project: {
...@@ -25,7 +27,10 @@ export default { ...@@ -25,7 +27,10 @@ export default {
:title="project.name" :title="project.name"
:href="project.web_url" :href="project.web_url"
> >
<div class="avatar-container s40 project-avatar"> <div
v-if="project.avatar_url"
class="avatar-container s40 project-avatar"
>
<project-avatar-image <project-avatar-image
class="avatar-container project-avatar" class="avatar-container project-avatar"
:link-href="project.path" :link-href="project.path"
...@@ -34,6 +39,12 @@ export default { ...@@ -34,6 +39,12 @@ export default {
:img-size="40" :img-size="40"
/> />
</div> </div>
<identicon
v-else
size-class="s40"
:entity-id="project.id"
:entity-name="project.name"
/>
<div class="sidebar-context-title"> <div class="sidebar-context-title">
{{ project.name }} {{ project.name }}
</div> </div>
......
...@@ -43,6 +43,7 @@ export default { ...@@ -43,6 +43,7 @@ export default {
'file-open': this.isBlob && this.file.opened, 'file-open': this.isBlob && this.file.opened,
'file-active': this.isBlob && this.file.active, 'file-active': this.isBlob && this.file.active,
folder: this.isTree, folder: this.isTree,
'is-open': this.file.opened,
}; };
}, },
}, },
......
...@@ -54,41 +54,61 @@ const router = new VueRouter({ ...@@ -54,41 +54,61 @@ const router = new VueRouter({
router.beforeEach((to, from, next) => { router.beforeEach((to, from, next) => {
if (to.params.namespace && to.params.project) { if (to.params.namespace && to.params.project) {
store.dispatch('getProjectData', { store
namespace: to.params.namespace, .dispatch('getProjectData', {
projectId: to.params.project, namespace: to.params.namespace,
}) projectId: to.params.project,
.then(() => { })
const fullProjectId = `${to.params.namespace}/${to.params.project}`; .then(() => {
const fullProjectId = `${to.params.namespace}/${to.params.project}`;
if (to.params.branch) { if (to.params.branch) {
store.dispatch('getBranchData', { store.dispatch('getBranchData', {
projectId: fullProjectId, projectId: fullProjectId,
branchId: to.params.branch, branchId: to.params.branch,
}); });
store.dispatch('getFiles', { store
projectId: fullProjectId, .dispatch('getFiles', {
branchId: to.params.branch, projectId: fullProjectId,
}) branchId: to.params.branch,
.then(() => { })
if (to.params[0]) { .then(() => {
const treeEntry = store.state.entries[to.params[0]]; if (to.params[0]) {
if (treeEntry) { const path =
store.dispatch('handleTreeEntryAction', treeEntry); to.params[0].slice(-1) === '/'
} ? to.params[0].slice(0, -1)
} : to.params[0];
}) const treeEntry = store.state.entries[path];
.catch((e) => { if (treeEntry) {
flash('Error while loading the branch files. Please try again.', 'alert', document, null, false, true); store.dispatch('handleTreeEntryAction', treeEntry);
throw e; }
}); }
} })
}) .catch(e => {
.catch((e) => { flash(
flash('Error while loading the project data. Please try again.', 'alert', document, null, false, true); 'Error while loading the branch files. Please try again.',
throw e; 'alert',
}); document,
null,
false,
true,
);
throw e;
});
}
})
.catch(e => {
flash(
'Error while loading the project data. Please try again.',
'alert',
document,
null,
false,
true,
);
throw e;
});
} }
next(); next();
......
import { import { decorateData, sortTree } from '../utils';
decorateData,
sortTree,
} from '../utils';
self.addEventListener('message', (e) => { self.addEventListener('message', e => {
const { data, projectId, branchId, tempFile = false, content = '', base64 = false } = e.data; const {
data,
projectId,
branchId,
tempFile = false,
content = '',
base64 = false,
} = e.data;
const treeList = []; const treeList = [];
let file; let file;
...@@ -15,7 +19,9 @@ self.addEventListener('message', (e) => { ...@@ -15,7 +19,9 @@ self.addEventListener('message', (e) => {
if (pathSplit.length > 0) { if (pathSplit.length > 0) {
pathSplit.reduce((pathAcc, folderName) => { pathSplit.reduce((pathAcc, folderName) => {
const parentFolder = acc[pathAcc[pathAcc.length - 1]]; const parentFolder = acc[pathAcc[pathAcc.length - 1]];
const folderPath = `${(parentFolder ? `${parentFolder.path}/` : '')}${folderName}`; const folderPath = `${
parentFolder ? `${parentFolder.path}/` : ''
}${folderName}`;
const foundEntry = acc[folderPath]; const foundEntry = acc[folderPath];
if (!foundEntry) { if (!foundEntry) {
...@@ -25,9 +31,11 @@ self.addEventListener('message', (e) => { ...@@ -25,9 +31,11 @@ self.addEventListener('message', (e) => {
id: folderPath, id: folderPath,
name: folderName, name: folderName,
path: folderPath, path: folderPath,
url: `/${projectId}/tree/${branchId}/${folderPath}`, url: `/${projectId}/tree/${branchId}/${folderPath}/`,
type: 'tree', type: 'tree',
parentTreeUrl: parentFolder ? parentFolder.url : `/${projectId}/tree/${branchId}/`, parentTreeUrl: parentFolder
? parentFolder.url
: `/${projectId}/tree/${branchId}/`,
tempFile, tempFile,
changed: tempFile, changed: tempFile,
opened: tempFile, opened: tempFile,
...@@ -62,7 +70,9 @@ self.addEventListener('message', (e) => { ...@@ -62,7 +70,9 @@ self.addEventListener('message', (e) => {
path, path,
url: `/${projectId}/blob/${branchId}/${path}`, url: `/${projectId}/blob/${branchId}/${path}`,
type: 'blob', type: 'blob',
parentTreeUrl: fileFolder ? fileFolder.url : `/${projectId}/blob/${branchId}`, parentTreeUrl: fileFolder
? fileFolder.url
: `/${projectId}/blob/${branchId}`,
tempFile, tempFile,
changed: tempFile, changed: tempFile,
content, content,
......
import stopwatchSvg from 'icons/_icon_stopwatch.svg';
import { abbreviateTime } from '../../../lib/utils/pretty_time';
export default {
name: 'time-tracking-collapsed-state',
props: {
showComparisonState: {
type: Boolean,
required: true,
},
showSpentOnlyState: {
type: Boolean,
required: true,
},
showEstimateOnlyState: {
type: Boolean,
required: true,
},
showNoTimeTrackingState: {
type: Boolean,
required: true,
},
timeSpentHumanReadable: {
type: String,
required: false,
default: '',
},
timeEstimateHumanReadable: {
type: String,
required: false,
default: '',
},
},
computed: {
timeSpent() {
return this.abbreviateTime(this.timeSpentHumanReadable);
},
timeEstimate() {
return this.abbreviateTime(this.timeEstimateHumanReadable);
},
divClass() {
if (this.showComparisonState) {
return 'compare';
} else if (this.showEstimateOnlyState) {
return 'estimate-only';
} else if (this.showSpentOnlyState) {
return 'spend-only';
} else if (this.showNoTimeTrackingState) {
return 'no-tracking';
}
return '';
},
spanClass() {
if (this.showComparisonState) {
return '';
} else if (this.showEstimateOnlyState || this.showSpentOnlyState) {
return 'bold';
} else if (this.showNoTimeTrackingState) {
return 'no-value';
}
return '';
},
text() {
if (this.showComparisonState) {
return `${this.timeSpent} / ${this.timeEstimate}`;
} else if (this.showEstimateOnlyState) {
return `-- / ${this.timeEstimate}`;
} else if (this.showSpentOnlyState) {
return `${this.timeSpent} / --`;
} else if (this.showNoTimeTrackingState) {
return 'None';
}
return '';
},
},
methods: {
abbreviateTime(timeStr) {
return abbreviateTime(timeStr);
},
},
template: `
<div class="sidebar-collapsed-icon">
${stopwatchSvg}
<div class="time-tracking-collapsed-summary">
<div :class="divClass">
<span :class="spanClass">
{{ text }}
</span>
</div>
</div>
</div>
`,
};
<script>
import icon from '../../../vue_shared/components/icon.vue';
import { abbreviateTime } from '../../../lib/utils/pretty_time';
export default {
name: 'TimeTrackingCollapsedState',
components: {
icon,
},
props: {
showComparisonState: {
type: Boolean,
required: true,
},
showSpentOnlyState: {
type: Boolean,
required: true,
},
showEstimateOnlyState: {
type: Boolean,
required: true,
},
showNoTimeTrackingState: {
type: Boolean,
required: true,
},
timeSpentHumanReadable: {
type: String,
required: false,
default: '',
},
timeEstimateHumanReadable: {
type: String,
required: false,
default: '',
},
},
computed: {
timeSpent() {
return this.abbreviateTime(this.timeSpentHumanReadable);
},
timeEstimate() {
return this.abbreviateTime(this.timeEstimateHumanReadable);
},
divClass() {
if (this.showComparisonState) {
return 'compare';
} else if (this.showEstimateOnlyState) {
return 'estimate-only';
} else if (this.showSpentOnlyState) {
return 'spend-only';
} else if (this.showNoTimeTrackingState) {
return 'no-tracking';
}
return '';
},
spanClass() {
if (this.showComparisonState) {
return '';
} else if (this.showEstimateOnlyState || this.showSpentOnlyState) {
return 'bold';
} else if (this.showNoTimeTrackingState) {
return 'no-value';
}
return '';
},
text() {
if (this.showComparisonState) {
return `${this.timeSpent} / ${this.timeEstimate}`;
} else if (this.showEstimateOnlyState) {
return `-- / ${this.timeEstimate}`;
} else if (this.showSpentOnlyState) {
return `${this.timeSpent} / --`;
} else if (this.showNoTimeTrackingState) {
return 'None';
}
return '';
},
},
methods: {
abbreviateTime(timeStr) {
return abbreviateTime(timeStr);
},
},
};
</script>
<template>
<div class="sidebar-collapsed-icon">
<icon name="timer" />
<div class="time-tracking-collapsed-summary">
<div :class="divClass">
<span :class="spanClass">
{{ text }}
</span>
</div>
</div>
</div>
</template>
<script> <script>
import timeTrackingHelpState from './help_state'; import timeTrackingHelpState from './help_state';
import timeTrackingCollapsedState from './collapsed_state'; import TimeTrackingCollapsedState from './collapsed_state.vue';
import timeTrackingSpentOnlyPane from './spent_only_pane'; import timeTrackingSpentOnlyPane from './spent_only_pane';
import timeTrackingNoTrackingPane from './no_tracking_pane'; import timeTrackingNoTrackingPane from './no_tracking_pane';
import timeTrackingEstimateOnlyPane from './estimate_only_pane'; import timeTrackingEstimateOnlyPane from './estimate_only_pane';
...@@ -11,7 +11,7 @@ import eventHub from '../../event_hub'; ...@@ -11,7 +11,7 @@ import eventHub from '../../event_hub';
export default { export default {
name: 'IssuableTimeTracker', name: 'IssuableTimeTracker',
components: { components: {
'time-tracking-collapsed-state': timeTrackingCollapsedState, TimeTrackingCollapsedState,
'time-tracking-estimate-only-pane': timeTrackingEstimateOnlyPane, 'time-tracking-estimate-only-pane': timeTrackingEstimateOnlyPane,
'time-tracking-spent-only-pane': timeTrackingSpentOnlyPane, 'time-tracking-spent-only-pane': timeTrackingSpentOnlyPane,
'time-tracking-no-tracking-pane': timeTrackingNoTrackingPane, 'time-tracking-no-tracking-pane': timeTrackingNoTrackingPane,
......
...@@ -501,10 +501,8 @@ ...@@ -501,10 +501,8 @@
-moz-osx-font-smoothing: grayscale; -moz-osx-font-smoothing: grayscale;
} }
&.dropdown-menu-user-link { &.dropdown-menu-user-link::before {
&::before { top: 50%;
top: 50%;
}
} }
} }
......
...@@ -39,12 +39,35 @@ ...@@ -39,12 +39,35 @@
svg { svg {
fill: currentColor; fill: currentColor;
&.s8 { @include svg-size(8px); } &.s8 {
&.s12 { @include svg-size(12px); } @include svg-size(8px);
&.s16 { @include svg-size(16px); } }
&.s18 { @include svg-size(18px); }
&.s24 { @include svg-size(24px); } &.s12 {
&.s32 { @include svg-size(32px); } @include svg-size(12px);
&.s48 { @include svg-size(48px); } }
&.s72 { @include svg-size(72px); }
&.s16 {
@include svg-size(16px);
}
&.s18 {
@include svg-size(18px);
}
&.s24 {
@include svg-size(24px);
}
&.s32 {
@include svg-size(32px);
}
&.s48 {
@include svg-size(48px);
}
&.s72 {
@include svg-size(72px);
}
} }
...@@ -382,4 +382,11 @@ module IssuablesHelper ...@@ -382,4 +382,11 @@ module IssuablesHelper
def parent def parent
@project || @group @project || @group
end end
def issuable_milestone_tooltip_title(issuable)
if issuable.milestone
milestone_tooltip = milestone_tooltip_title(issuable.milestone)
_('Milestone') + (milestone_tooltip ? ': ' + milestone_tooltip : '')
end
end
end end
...@@ -7,7 +7,9 @@ ...@@ -7,7 +7,9 @@
.issue-main-info .issue-main-info
.issue-title.title .issue-title.title
%span.issue-title-text %span.issue-title-text
= confidential_icon(issue) - if issue.confidential?
%span.has-tooltip{ title: _('Confidential') }
= confidential_icon(issue)
= link_to issue.title, issue_path(issue) = link_to issue.title, issue_path(issue)
- if issue.tasks? - if issue.tasks?
%span.task-status.hidden-xs %span.task-status.hidden-xs
...@@ -24,11 +26,11 @@ ...@@ -24,11 +26,11 @@
- if issue.milestone - if issue.milestone
%span.issuable-milestone.hidden-xs %span.issuable-milestone.hidden-xs
&nbsp; &nbsp;
= link_to project_issues_path(issue.project, milestone_title: issue.milestone.title), data: { html: 1, toggle: 'tooltip', title: milestone_tooltip_title(issue.milestone) } do = link_to project_issues_path(issue.project, milestone_title: issue.milestone.title), data: { html: 1, toggle: 'tooltip', title: issuable_milestone_tooltip_title(issue) } do
= icon('clock-o') = icon('clock-o')
= issue.milestone.title = issue.milestone.title
- if issue.due_date - if issue.due_date
%span.issuable-due-date.hidden-xs{ class: "#{'cred' if issue.overdue?}" } %span.issuable-due-date.hidden-xs.has-tooltip{ class: "#{'cred' if issue.overdue?}", title: _('Due date') }
&nbsp; &nbsp;
= icon('calendar') = icon('calendar')
= issue.due_date.to_s(:medium) = issue.due_date.to_s(:medium)
......
...@@ -23,11 +23,11 @@ ...@@ -23,11 +23,11 @@
- if merge_request.milestone - if merge_request.milestone
%span.issuable-milestone.hidden-xs %span.issuable-milestone.hidden-xs
&nbsp; &nbsp;
= link_to project_merge_requests_path(merge_request.project, milestone_title: merge_request.milestone.title), data: { html: 1, toggle: 'tooltip', title: milestone_tooltip_title(merge_request.milestone) } do = link_to project_merge_requests_path(merge_request.project, milestone_title: merge_request.milestone.title), data: { html: 1, toggle: 'tooltip', title: issuable_milestone_tooltip_title(merge_request) } do
= icon('clock-o') = icon('clock-o')
= merge_request.milestone.title = merge_request.milestone.title
- if merge_request.target_project.default_branch != merge_request.target_branch - if merge_request.target_project.default_branch != merge_request.target_branch
%span.project-ref-path %span.project-ref-path.has-tooltip{ title: _('Target branch') }
&nbsp; &nbsp;
= link_to project_ref_path(merge_request.project, merge_request.target_branch), class: 'ref-name' do = link_to project_ref_path(merge_request.project, merge_request.target_branch), class: 'ref-name' do
= sprite_icon('fork', size: 12, css_class: 'fork-sprite') = sprite_icon('fork', size: 12, css_class: 'fork-sprite')
...@@ -51,11 +51,11 @@ ...@@ -51,11 +51,11 @@
= render_pipeline_status(merge_request.head_pipeline) = render_pipeline_status(merge_request.head_pipeline)
- if merge_request.open? && merge_request.broken? - if merge_request.open? && merge_request.broken?
%li.issuable-pipeline-broken.hidden-xs %li.issuable-pipeline-broken.hidden-xs
= link_to merge_request_path(merge_request), class: "has-tooltip", title: "Cannot be merged automatically", data: { container: 'body' } do = link_to merge_request_path(merge_request), class: "has-tooltip", title: _('Cannot be merged automatically') do
= icon('exclamation-triangle') = icon('exclamation-triangle')
- if merge_request.assignee - if merge_request.assignee
%li %li
= link_to_member(merge_request.source_project, merge_request.assignee, name: false, title: "Assigned to :name") = link_to_member(merge_request.source_project, merge_request.assignee, name: false, title: _('Assigned to :name'))
= render 'shared/issuable_meta_data', issuable: merge_request = render 'shared/issuable_meta_data', issuable: merge_request
......
...@@ -79,7 +79,7 @@ ...@@ -79,7 +79,7 @@
= render 'projects/find_file_link' = render 'projects/find_file_link'
= succeed " " do = succeed " " do
= link_to ide_edit_path(@project, @id), class: 'btn btn-default' do = link_to ide_edit_path(@project, @id, ""), class: 'btn btn-default' do
= _('Web IDE') = _('Web IDE')
= render 'projects/buttons/download', project: @project, ref: @ref = render 'projects/buttons/download', project: @project, ref: @ref
...@@ -5,21 +5,21 @@ ...@@ -5,21 +5,21 @@
- issuable_mr = @issuable_meta_data[issuable.id].merge_requests_count - issuable_mr = @issuable_meta_data[issuable.id].merge_requests_count
- if issuable_mr > 0 - if issuable_mr > 0
%li.issuable-mr.hidden-xs %li.issuable-mr.hidden-xs.has-tooltip{ title: _('Related merge requests') }
= image_tag('icon-merge-request-unmerged.svg', class: 'icon-merge-request-unmerged') = image_tag('icon-merge-request-unmerged.svg', class: 'icon-merge-request-unmerged')
= issuable_mr = issuable_mr
- if upvotes > 0 - if upvotes > 0
%li.issuable-upvotes.hidden-xs %li.issuable-upvotes.hidden-xs.has-tooltip{ title: _('Upvotes') }
= icon('thumbs-up') = icon('thumbs-up')
= upvotes = upvotes
- if downvotes > 0 - if downvotes > 0
%li.issuable-downvotes.hidden-xs %li.issuable-downvotes.hidden-xs.has-tooltip{ title: _('Downvotes') }
= icon('thumbs-down') = icon('thumbs-down')
= downvotes = downvotes
%li.issuable-comments.hidden-xs %li.issuable-comments.hidden-xs
= link_to issuable_url, class: ('no-comments' if note_count.zero?) do = link_to issuable_url, class: ['has-tooltip', ('no-comments' if note_count.zero?)], title: _('Comments') do
= icon('comments') = icon('comments')
= note_count = note_count
...@@ -4,7 +4,7 @@ ...@@ -4,7 +4,7 @@
%header.board-header{ ":class" => '{ "has-border": list.label && list.label.color }', ":style" => "{ borderTopColor: (list.label && list.label.color ? list.label.color : null) }", "@click" => "toggleExpanded($event)" } %header.board-header{ ":class" => '{ "has-border": list.label && list.label.color }', ":style" => "{ borderTopColor: (list.label && list.label.color ? list.label.color : null) }", "@click" => "toggleExpanded($event)" }
%h3.board-title.js-board-handle{ ":class" => '{ "user-can-drag": (!disabled && !list.preset) }' } %h3.board-title.js-board-handle{ ":class" => '{ "user-can-drag": (!disabled && !list.preset) }' }
%i.fa.fa-fw.board-title-expandable-toggle{ "v-if": "list.isExpandable", %i.fa.fa-fw.board-title-expandable-toggle{ "v-if": "list.isExpandable",
":class": "{ \"fa-caret-down\": list.isExpanded, \"fa-caret-right\": !list.isExpanded && list.position === -1, \"fa-caret-left\": !list.isExpanded && list.position !== -1 }", ":class": "{ \"fa-caret-down\": list.isExpanded, \"fa-caret-right\": !list.isExpanded }",
"aria-hidden": "true" } "aria-hidden": "true" }
%span.board-title-text.has-tooltip{ "v-if": "list.type !== \"label\"", %span.board-title-text.has-tooltip{ "v-if": "list.type !== \"label\"",
......
---
title: Apply NestingDepth (level 5) (framework/dropdowns.scss)
merge_request: 17820
author: Takuya Noguchi
type: other
---
title: Add tooltips to icons in lists of issues and merge requests
merge_request: 17700
author:
type: changed
---
title: Add indexes for user activity queries.
merge_request: 17890
author:
type: performance
---
title: Fixed IDE button opening the wrong URL in tree list
merge_request:
author:
type: fixed
---
title: Make project avatar in IDE consistent with the rest of GitLab
merge_request:
author:
type: changed
---
title: Move TimeTrackingCollapsedState vue component
merge_request: 17399
author: George Tsiolis
type: performance
# Port ActiveRecord::Relation#in_batches from ActiveRecord 5. # Remove this file when upgraded to rails 5.0.
# https://github.com/rails/rails/blob/ac027338e4a165273607dccee49a3d38bc836794/activerecord/lib/active_record/relation/batches.rb#L184 unless Gitlab.rails5?
# TODO: this can be removed once we're using AR5. module ActiveRecord
raise "Vendored ActiveRecord 5 code! Delete #{__FILE__}!" if ActiveRecord::VERSION::MAJOR >= 5 module Batches
# Differences from upstream: enumerator support was removed, and custom
module ActiveRecord # order/limit clauses are ignored without a warning.
module Batches def in_batches(of: 1000, start: nil, finish: nil, load: false)
# Differences from upstream: enumerator support was removed, and custom raise "Must provide a block" unless block_given?
# order/limit clauses are ignored without a warning.
def in_batches(of: 1000, start: nil, finish: nil, load: false) relation = self.reorder(batch_order).limit(of)
raise "Must provide a block" unless block_given? relation = relation.where(arel_table[primary_key].gteq(start)) if start
relation = relation.where(arel_table[primary_key].lteq(finish)) if finish
relation = self.reorder(batch_order).limit(of) batch_relation = relation
relation = relation.where(arel_table[primary_key].gteq(start)) if start
relation = relation.where(arel_table[primary_key].lteq(finish)) if finish loop do
batch_relation = relation if load
records = batch_relation.records
loop do ids = records.map(&:id)
if load yielded_relation = self.where(primary_key => ids)
records = batch_relation.records yielded_relation.load_records(records)
ids = records.map(&:id) else
yielded_relation = self.where(primary_key => ids) ids = batch_relation.pluck(primary_key)
yielded_relation.load_records(records) yielded_relation = self.where(primary_key => ids)
else end
ids = batch_relation.pluck(primary_key)
yielded_relation = self.where(primary_key => ids) break if ids.empty?
primary_key_offset = ids.last
raise ArgumentError.new("Primary key not included in the custom select clause") unless primary_key_offset
yield yielded_relation
break if ids.length < of
batch_relation = relation.where(arel_table[primary_key].gt(primary_key_offset))
end end
break if ids.empty?
primary_key_offset = ids.last
raise ArgumentError.new("Primary key not included in the custom select clause") unless primary_key_offset
yield yielded_relation
break if ids.length < of
batch_relation = relation.where(arel_table[primary_key].gt(primary_key_offset))
end end
end end
end end
......
raise "Vendored ActiveRecord 5 code! Delete #{__FILE__}!" if ActiveRecord::VERSION::MAJOR >= 5 # Remove this file when upgraded to rails 5.0.
if !Gitlab.rails5? && Gitlab::Database.postgresql?
if Gitlab::Database.postgresql?
require 'active_record/connection_adapters/postgresql_adapter' require 'active_record/connection_adapters/postgresql_adapter'
require 'active_record/connection_adapters/postgresql/schema_statements' require 'active_record/connection_adapters/postgresql/schema_statements'
......
# Monkey patch lograge until https://github.com/roidrage/lograge/pull/241 is released
module Lograge
class RequestLogSubscriber < ActiveSupport::LogSubscriber
def strip_query_string(path)
index = path.index('?')
index ? path[0, index] : path
end
def extract_location
location = Thread.current[:lograge_location]
return {} unless location
Thread.current[:lograge_location] = nil
{ location: strip_query_string(location) }
end
end
end
# Only use Lograge for Rails # Only use Lograge for Rails
unless Sidekiq.server? unless Sidekiq.server?
filename = File.join(Rails.root, 'log', "#{Rails.env}_json.log") filename = File.join(Rails.root, 'log', "#{Rails.env}_json.log")
......
...@@ -154,7 +154,7 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration ...@@ -154,7 +154,7 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration
end end
def add_foreign_key_if_not_exists(source, target, column:) def add_foreign_key_if_not_exists(source, target, column:)
return if foreign_key_exists?(source, column) return if foreign_key_exists?(source, target, column: column)
add_concurrent_foreign_key(source, target, column: column) add_concurrent_foreign_key(source, target, column: column)
end end
...@@ -175,12 +175,6 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration ...@@ -175,12 +175,6 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration
rescue ArgumentError rescue ArgumentError
end end
def foreign_key_exists?(table, column)
foreign_keys(table).any? do |key|
key.options[:column] == column.to_s
end
end
def connection def connection
# Rails memoizes connection objects, but this causes them to be shared # Rails memoizes connection objects, but this causes them to be shared
# amongst threads; we don't want that. # amongst threads; we don't want that.
......
...@@ -10,13 +10,13 @@ class AddStageIdForeignKeyToBuilds < ActiveRecord::Migration ...@@ -10,13 +10,13 @@ class AddStageIdForeignKeyToBuilds < ActiveRecord::Migration
add_concurrent_index(:ci_builds, :stage_id) add_concurrent_index(:ci_builds, :stage_id)
end end
unless foreign_key_exists?(:ci_builds, :stage_id) unless foreign_key_exists?(:ci_builds, :ci_stages, column: :stage_id)
add_concurrent_foreign_key(:ci_builds, :ci_stages, column: :stage_id, on_delete: :cascade) add_concurrent_foreign_key(:ci_builds, :ci_stages, column: :stage_id, on_delete: :cascade)
end end
end end
def down def down
if foreign_key_exists?(:ci_builds, :stage_id) if foreign_key_exists?(:ci_builds, column: :stage_id)
remove_foreign_key(:ci_builds, column: :stage_id) remove_foreign_key(:ci_builds, column: :stage_id)
end end
...@@ -24,12 +24,4 @@ class AddStageIdForeignKeyToBuilds < ActiveRecord::Migration ...@@ -24,12 +24,4 @@ class AddStageIdForeignKeyToBuilds < ActiveRecord::Migration
remove_concurrent_index(:ci_builds, :stage_id) remove_concurrent_index(:ci_builds, :stage_id)
end end
end end
private
def foreign_key_exists?(table, column)
foreign_keys(:ci_builds).any? do |key|
key.options[:column] == column.to_s
end
end
end end
...@@ -23,23 +23,15 @@ class AddForeignKeyToMergeRequests < ActiveRecord::Migration ...@@ -23,23 +23,15 @@ class AddForeignKeyToMergeRequests < ActiveRecord::Migration
merge_requests.update_all(head_pipeline_id: nil) merge_requests.update_all(head_pipeline_id: nil)
end end
unless foreign_key_exists?(:merge_requests, :head_pipeline_id) unless foreign_key_exists?(:merge_requests, column: :head_pipeline_id)
add_concurrent_foreign_key(:merge_requests, :ci_pipelines, add_concurrent_foreign_key(:merge_requests, :ci_pipelines,
column: :head_pipeline_id, on_delete: :nullify) column: :head_pipeline_id, on_delete: :nullify)
end end
end end
def down def down
if foreign_key_exists?(:merge_requests, :head_pipeline_id) if foreign_key_exists?(:merge_requests, column: :head_pipeline_id)
remove_foreign_key(:merge_requests, column: :head_pipeline_id) remove_foreign_key(:merge_requests, column: :head_pipeline_id)
end end
end end
private
def foreign_key_exists?(table, column)
foreign_keys(table).any? do |key|
key.options[:column] == column.to_s
end
end
end end
class AddIndexesForUserActivityQueries < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :events, [:author_id, :project_id] unless index_exists?(:events, [:author_id, :project_id])
add_concurrent_index :user_interacted_projects, :user_id unless index_exists?(:user_interacted_projects, :user_id)
end
def down
remove_concurrent_index :events, [:author_id, :project_id] if index_exists?(:events, [:author_id, :project_id])
patch_foreign_keys do
remove_concurrent_index :user_interacted_projects, :user_id if index_exists?(:user_interacted_projects, :user_id)
end
end
private
def patch_foreign_keys
return yield if Gitlab::Database.postgresql?
# MySQL doesn't like to remove the index with a foreign key using it.
remove_foreign_key :user_interacted_projects, :users if fk_exists?(:user_interacted_projects, :user_id)
yield
# Let's re-add the foreign key using the existing index on (user_id, project_id)
add_concurrent_foreign_key :user_interacted_projects, :users, column: :user_id unless fk_exists?(:user_interacted_projects, :user_id)
end
def fk_exists?(table, column)
foreign_keys(table).any? do |key|
key.options[:column] == column.to_s
end
end
end
...@@ -26,11 +26,11 @@ class BuildUserInteractedProjectsTable < ActiveRecord::Migration ...@@ -26,11 +26,11 @@ class BuildUserInteractedProjectsTable < ActiveRecord::Migration
def down def down
execute "TRUNCATE user_interacted_projects" execute "TRUNCATE user_interacted_projects"
if foreign_key_exists?(:user_interacted_projects, :user_id) if foreign_key_exists?(:user_interacted_projects, :users)
remove_foreign_key :user_interacted_projects, :users remove_foreign_key :user_interacted_projects, :users
end end
if foreign_key_exists?(:user_interacted_projects, :project_id) if foreign_key_exists?(:user_interacted_projects, :projects)
remove_foreign_key :user_interacted_projects, :projects remove_foreign_key :user_interacted_projects, :projects
end end
...@@ -115,7 +115,7 @@ class BuildUserInteractedProjectsTable < ActiveRecord::Migration ...@@ -115,7 +115,7 @@ class BuildUserInteractedProjectsTable < ActiveRecord::Migration
end end
def create_fk(table, target, column) def create_fk(table, target, column)
return if foreign_key_exists?(table, column) return if foreign_key_exists?(table, target, column: column)
add_foreign_key table, target, column: column, on_delete: :cascade add_foreign_key table, target, column: column, on_delete: :cascade
end end
...@@ -158,11 +158,11 @@ class BuildUserInteractedProjectsTable < ActiveRecord::Migration ...@@ -158,11 +158,11 @@ class BuildUserInteractedProjectsTable < ActiveRecord::Migration
add_concurrent_index :user_interacted_projects, [:project_id, :user_id], unique: true, name: UNIQUE_INDEX_NAME add_concurrent_index :user_interacted_projects, [:project_id, :user_id], unique: true, name: UNIQUE_INDEX_NAME
end end
unless foreign_key_exists?(:user_interacted_projects, :user_id) unless foreign_key_exists?(:user_interacted_projects, :users, column: :user_id)
add_concurrent_foreign_key :user_interacted_projects, :users, column: :user_id, on_delete: :cascade add_concurrent_foreign_key :user_interacted_projects, :users, column: :user_id, on_delete: :cascade
end end
unless foreign_key_exists?(:user_interacted_projects, :project_id) unless foreign_key_exists?(:user_interacted_projects, :projects, column: :project_id)
add_concurrent_foreign_key :user_interacted_projects, :projects, column: :project_id, on_delete: :cascade add_concurrent_foreign_key :user_interacted_projects, :projects, column: :project_id, on_delete: :cascade
end end
end end
......
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180314174825) do ActiveRecord::Schema.define(version: 20180320182229) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -876,6 +876,7 @@ ActiveRecord::Schema.define(version: 20180314174825) do ...@@ -876,6 +876,7 @@ ActiveRecord::Schema.define(version: 20180314174825) do
end end
add_index "events", ["action"], name: "index_events_on_action", using: :btree add_index "events", ["action"], name: "index_events_on_action", using: :btree
add_index "events", ["author_id", "project_id"], name: "index_events_on_author_id_and_project_id", using: :btree
add_index "events", ["author_id"], name: "index_events_on_author_id", using: :btree add_index "events", ["author_id"], name: "index_events_on_author_id", using: :btree
add_index "events", ["project_id", "id"], name: "index_events_on_project_id_and_id", using: :btree add_index "events", ["project_id", "id"], name: "index_events_on_project_id_and_id", using: :btree
add_index "events", ["target_type", "target_id"], name: "index_events_on_target_type_and_target_id", using: :btree add_index "events", ["target_type", "target_id"], name: "index_events_on_target_type_and_target_id", using: :btree
...@@ -2425,6 +2426,7 @@ ActiveRecord::Schema.define(version: 20180314174825) do ...@@ -2425,6 +2426,7 @@ ActiveRecord::Schema.define(version: 20180314174825) do
end end
add_index "user_interacted_projects", ["project_id", "user_id"], name: "index_user_interacted_projects_on_project_id_and_user_id", unique: true, using: :btree add_index "user_interacted_projects", ["project_id", "user_id"], name: "index_user_interacted_projects_on_project_id_and_user_id", unique: true, using: :btree
add_index "user_interacted_projects", ["user_id"], name: "index_user_interacted_projects_on_user_id", using: :btree
create_table "user_synced_attributes_metadata", force: :cascade do |t| create_table "user_synced_attributes_metadata", force: :cascade do |t|
t.boolean "name_synced", default: false t.boolean "name_synced", default: false
......
...@@ -66,7 +66,8 @@ learn how to leverage its potential even more. ...@@ -66,7 +66,8 @@ learn how to leverage its potential even more.
environments and use them for different purposes like testing, building and environments and use them for different purposes like testing, building and
deploying deploying
- [Job artifacts](../user/project/pipelines/job_artifacts.md) - [Job artifacts](../user/project/pipelines/job_artifacts.md)
- [Git submodules](git_submodules.md): How to run your CI jobs when Git - [Caching dependencies](caching/index.md)
- [Git submodules](git_submodules.md) - How to run your CI jobs when Git
submodules are involved submodules are involved
- [Use SSH keys in your build environment](ssh_keys/README.md) - [Use SSH keys in your build environment](ssh_keys/README.md)
and status of each CI environment running on Kubernetes and status of each CI environment running on Kubernetes
......
This diff is collapsed.
...@@ -146,24 +146,7 @@ To protect/unprotect Runners: ...@@ -146,24 +146,7 @@ To protect/unprotect Runners:
## Manually clearing the Runners cache ## Manually clearing the Runners cache
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/issues/41249) in GitLab 10.4. Read [clearing the cache](../caching/index.md#clearing-the-cache).
GitLab Runners use [cache](../yaml/README.md#cache) to speed up the execution
of your jobs by reusing existing data. This however, can sometimes lead to an
inconsistent behavior.
To start with a fresh copy of the cache, you can easily do it via GitLab's UI:
1. Navigate to your project's **CI/CD > Pipelines** page.
1. Click on the **Clear Runner caches** to clean up the cache.
1. On the next push, your CI/CD job will use a new cache.
That way, you don't have to change the [cache key](../yaml/README.md#cache-key)
in your `.gitlab-ci.yml`.
Behind the scenes, this works by increasing a counter in the database, and the
value of that counter is used to create the key for the cache. After a push, a
new key is generated and the old cache is not valid anymore.
## How shared Runners pick jobs ## How shared Runners pick jobs
...@@ -227,15 +210,16 @@ that it may encounter on the projects it's shared over. This would be ...@@ -227,15 +210,16 @@ that it may encounter on the projects it's shared over. This would be
problematic for large amounts of projects, if it wasn't for tags. problematic for large amounts of projects, if it wasn't for tags.
By tagging a Runner for the types of jobs it can handle, you can make sure By tagging a Runner for the types of jobs it can handle, you can make sure
shared Runners will only run the jobs they are equipped to run. shared Runners will [only run the jobs they are equipped to run](../yaml/README.md#tags).
For instance, at GitLab we have Runners tagged with "rails" if they contain For instance, at GitLab we have Runners tagged with "rails" if they contain
the appropriate dependencies to run Rails test suites. the appropriate dependencies to run Rails test suites.
### Preventing Runners with tags from picking jobs without tags ### Preventing Runners with tags from picking jobs without tags
You can configure a Runner to prevent it from picking jobs with tags when You can configure a Runner to prevent it from picking
the Runner does not have tags assigned. This setting can be enabled the first [jobs with tags](../yaml/README.md#tags) when the Runner does not have tags
assigned. This setting can be enabled the first
time you [register a Runner][register] and can be changed afterwards under time you [register a Runner][register] and can be changed afterwards under
each Runner's settings. each Runner's settings.
......
...@@ -674,6 +674,10 @@ as Review Apps. You can see a simple example using Review Apps at ...@@ -674,6 +674,10 @@ as Review Apps. You can see a simple example using Review Apps at
by default. by default.
- From GitLab 9.2, caches are restored before [artifacts](#artifacts). - From GitLab 9.2, caches are restored before [artifacts](#artifacts).
TIP: **Learn more:**
Read how caching works and find out some good practices in the
[caching dependencies documentation](../caching/index.md).
`cache` is used to specify a list of files and directories which should be `cache` is used to specify a list of files and directories which should be
cached between jobs. You can only use paths that are within the project cached between jobs. You can only use paths that are within the project
workspace. workspace.
...@@ -681,35 +685,20 @@ workspace. ...@@ -681,35 +685,20 @@ workspace.
If `cache` is defined outside the scope of jobs, it means it is set If `cache` is defined outside the scope of jobs, it means it is set
globally and all jobs will use that definition. globally and all jobs will use that definition.
Cache all files in `binaries` and `.config`: ### `cache:paths`
```yaml
rspec:
script: test
cache:
paths:
- binaries/
- .config
```
Cache all Git untracked files:
```yaml Use the `paths` directive to choose which files or directories will be cached.
rspec: Wildcards can be used as well.
script: test
cache:
untracked: true
```
Cache all Git untracked files and files in `binaries`: Cache all files in `binaries` that end in `.apk` and the `.config` file:
```yaml ```yaml
rspec: rspec:
script: test script: test
cache: cache:
untracked: true
paths: paths:
- binaries/ - binaries/*.apk
- .config
``` ```
Locally defined cache overrides globally defined options. The following `rspec` Locally defined cache overrides globally defined options. The following `rspec`
...@@ -723,33 +712,26 @@ cache: ...@@ -723,33 +712,26 @@ cache:
rspec: rspec:
script: test script: test
cache: cache:
key: rspec
paths: paths:
- binaries/ - binaries/
``` ```
Note that since cache is shared between jobs, if you're using different
paths for different jobs, you should also set a different **cache:key**
otherwise cache content can be overwritten.
NOTE: **Note:**
The cache is provided on a best-effort basis, so don't expect that the cache
will be always present.
### `cache:key` ### `cache:key`
> Introduced in GitLab Runner v1.0.0. > Introduced in GitLab Runner v1.0.0.
The `key` directive allows you to define the affinity of caching Since the cache is shared between jobs, if you're using different
between jobs, allowing to have a single cache for all jobs, paths for different jobs, you should also set a different `cache:key`
cache per-job, cache per-branch or any other way that fits your needs. otherwise cache content can be overwritten.
This way, you can fine tune caching, allowing you to cache data between The `key` directive allows you to define the affinity of caching between jobs,
different jobs or even different branches. allowing to have a single cache for all jobs, cache per-job, cache per-branch
or any other way that fits your workflow. This way, you can fine tune caching,
allowing you to cache data between different jobs or even different branches.
The `cache:key` variable can use any of the The `cache:key` variable can use any of the
[predefined variables](../variables/README.md), and the default key, if not set, [predefined variables](../variables/README.md), and the default key, if not set,
is set as `$CI_JOB_NAME-$CI_COMMIT_REF_NAME` which translates as "per-job and is `$CI_JOB_NAME-$CI_COMMIT_REF_NAME` which translates as "per-job and
per-branch". It is the default across the project, therefore everything is per-branch". It is the default across the project, therefore everything is
shared between pipelines and jobs running on the same branch by default. shared between pipelines and jobs running on the same branch by default.
...@@ -757,56 +739,56 @@ NOTE: **Note:** ...@@ -757,56 +739,56 @@ NOTE: **Note:**
The `cache:key` variable cannot contain the `/` character, or the equivalent The `cache:key` variable cannot contain the `/` character, or the equivalent
URI-encoded `%2F`; a value made only of dots (`.`, `%2E`) is also forbidden. URI-encoded `%2F`; a value made only of dots (`.`, `%2E`) is also forbidden.
**Example configurations** For example, to enable per-branch caching:
To enable per-job caching:
```yaml
cache:
key: "$CI_JOB_NAME"
untracked: true
```
To enable per-branch caching:
```yaml ```yaml
cache: cache:
key: "$CI_COMMIT_REF_SLUG" key: "$CI_COMMIT_REF_SLUG"
untracked: true paths:
- binaries/
``` ```
To enable per-job and per-branch caching: If you use **Windows Batch** to run your shell scripts you need to replace
`$` with `%`:
```yaml ```yaml
cache: cache:
key: "$CI_JOB_NAME-$CI_COMMIT_REF_SLUG" key: "%CI_JOB_STAGE%-%CI_COMMIT_REF_SLUG%"
untracked: true paths:
- binaries/
``` ```
To enable per-branch and per-stage caching: If you use **Windows PowerShell** to run your shell scripts you need to replace
`$` with `$env:`:
```yaml ```yaml
cache: cache:
key: "$CI_JOB_STAGE-$CI_COMMIT_REF_SLUG" key: "$env:CI_JOB_STAGE-$env:CI_COMMIT_REF_SLUG"
untracked: true paths:
- binaries/
``` ```
If you use **Windows Batch** to run your shell scripts you need to replace ### `cache:untracked`
`$` with `%`:
Set `untracked: true` to cache all files that are untracked in your Git
repository:
```yaml ```yaml
cache: rspec:
key: "%CI_JOB_STAGE%-%CI_COMMIT_REF_SLUG%" script: test
untracked: true cache:
untracked: true
``` ```
If you use **Windows PowerShell** to run your shell scripts you need to replace Cache all Git untracked files and files in `binaries`:
`$` with `$env:`:
```yaml ```yaml
cache: rspec:
key: "$env:CI_JOB_STAGE-$env:CI_COMMIT_REF_SLUG" script: test
untracked: true cache:
untracked: true
paths:
- binaries/
``` ```
### `cache:policy` ### `cache:policy`
......
...@@ -60,16 +60,10 @@ See the [Rails guides] for more info. ...@@ -60,16 +60,10 @@ See the [Rails guides] for more info.
As mentioned, the part after `+` is ignored, and this will end up in the mailbox for `gitlab-incoming@gmail.com`. As mentioned, the part after `+` is ignored, and this will end up in the mailbox for `gitlab-incoming@gmail.com`.
1. Uncomment the `mail_room` line in your `Procfile`: 1. Run this command in the GitLab root directory to launch `mail_room`:
```yaml
mail_room: bundle exec mail_room -q -c config/mail_room.yml
```
1. Restart GitLab:
```sh ```sh
bundle exec foreman start bundle exec mail_room -q -c config/mail_room.yml
``` ```
1. Verify that everything is configured correctly: 1. Verify that everything is configured correctly:
......
...@@ -136,11 +136,14 @@ class MyMigration < ActiveRecord::Migration ...@@ -136,11 +136,14 @@ class MyMigration < ActiveRecord::Migration
disable_ddl_transaction! disable_ddl_transaction!
def up def up
remove_concurrent_index :table_name, :column_name if index_exists?(:table_name, :column_name) remove_concurrent_index :table_name, :column_name
end end
end end
``` ```
Note that it is not necessary to check if the index exists prior to
removing it.
## Adding indexes ## Adding indexes
If you need to add a unique index please keep in mind there is the possibility If you need to add a unique index please keep in mind there is the possibility
......
...@@ -6,7 +6,7 @@ class AddGroupBoardsIndexes < ActiveRecord::Migration ...@@ -6,7 +6,7 @@ class AddGroupBoardsIndexes < ActiveRecord::Migration
DOWNTIME = false DOWNTIME = false
def up def up
return if foreign_key_exists?(:boards, :group_id) return if foreign_key_exists?(:boards, :groups, column: :group_id)
add_concurrent_foreign_key :boards, :namespaces, column: :group_id, on_delete: :cascade add_concurrent_foreign_key :boards, :namespaces, column: :group_id, on_delete: :cascade
...@@ -14,18 +14,10 @@ class AddGroupBoardsIndexes < ActiveRecord::Migration ...@@ -14,18 +14,10 @@ class AddGroupBoardsIndexes < ActiveRecord::Migration
end end
def down def down
return unless foreign_key_exists?(:boards, :group_id) return unless foreign_key_exists?(:boards, :groups, column: :group_id)
remove_foreign_key :boards, column: :group_id remove_foreign_key :boards, column: :group_id
remove_concurrent_index :boards, :group_id remove_concurrent_index :boards, :group_id
end end
private
def foreign_key_exists?(table, column)
foreign_keys(table).any? do |key|
key.options[:column] == column.to_s
end
end
end end
...@@ -109,7 +109,7 @@ module API ...@@ -109,7 +109,7 @@ module API
# Return the Gitaly Address if it is enabled # Return the Gitaly Address if it is enabled
def gitaly_payload(action) def gitaly_payload(action)
return unless %w[git-receive-pack git-upload-pack].include?(action) return unless %w[git-receive-pack git-upload-pack git-upload-archive].include?(action)
{ {
repository: repository.gitaly_repository, repository: repository.gitaly_repository,
......
...@@ -59,6 +59,11 @@ module Gitlab ...@@ -59,6 +59,11 @@ module Gitlab
disable_statement_timeout disable_statement_timeout
end end
if index_exists?(table_name, column_name, options)
Rails.logger.warn "Index not created because it already exists (this may be due to an aborted migration or similar): table_name: #{table_name}, column_name: #{column_name}"
return
end
add_index(table_name, column_name, options) add_index(table_name, column_name, options)
end end
...@@ -83,6 +88,11 @@ module Gitlab ...@@ -83,6 +88,11 @@ module Gitlab
disable_statement_timeout disable_statement_timeout
end end
unless index_exists?(table_name, column_name, options)
Rails.logger.warn "Index not removed because it does not exist (this may be due to an aborted migration or similar): table_name: #{table_name}, column_name: #{column_name}"
return
end
remove_index(table_name, options.merge({ column: column_name })) remove_index(table_name, options.merge({ column: column_name }))
end end
...@@ -107,6 +117,11 @@ module Gitlab ...@@ -107,6 +117,11 @@ module Gitlab
disable_statement_timeout disable_statement_timeout
end end
unless index_exists_by_name?(table_name, index_name)
Rails.logger.warn "Index not removed because it does not exist (this may be due to an aborted migration or similar): table_name: #{table_name}, index_name: #{index_name}"
return
end
remove_index(table_name, options.merge({ name: index_name })) remove_index(table_name, options.merge({ name: index_name }))
end end
...@@ -140,6 +155,13 @@ module Gitlab ...@@ -140,6 +155,13 @@ module Gitlab
# of PostgreSQL's "VALIDATE CONSTRAINT". As a result we'll just fall # of PostgreSQL's "VALIDATE CONSTRAINT". As a result we'll just fall
# back to the normal foreign key procedure. # back to the normal foreign key procedure.
if Database.mysql? if Database.mysql?
if foreign_key_exists?(source, target, column: column)
Rails.logger.warn "Foreign key not created because it exists already " \
"(this may be due to an aborted migration or similar): " \
"source: #{source}, target: #{target}, column: #{column}"
return
end
return add_foreign_key(source, target, return add_foreign_key(source, target,
column: column, column: column,
on_delete: on_delete) on_delete: on_delete)
...@@ -151,25 +173,43 @@ module Gitlab ...@@ -151,25 +173,43 @@ module Gitlab
key_name = concurrent_foreign_key_name(source, column) key_name = concurrent_foreign_key_name(source, column)
# Using NOT VALID allows us to create a key without immediately unless foreign_key_exists?(source, target, column: column)
# validating it. This means we keep the ALTER TABLE lock only for a Rails.logger.warn "Foreign key not created because it exists already " \
# short period of time. The key _is_ enforced for any newly created "(this may be due to an aborted migration or similar): " \
# data. "source: #{source}, target: #{target}, column: #{column}"
execute <<-EOF.strip_heredoc
ALTER TABLE #{source} # Using NOT VALID allows us to create a key without immediately
ADD CONSTRAINT #{key_name} # validating it. This means we keep the ALTER TABLE lock only for a
FOREIGN KEY (#{column}) # short period of time. The key _is_ enforced for any newly created
REFERENCES #{target} (id) # data.
#{on_delete ? "ON DELETE #{on_delete.upcase}" : ''} execute <<-EOF.strip_heredoc
NOT VALID; ALTER TABLE #{source}
EOF ADD CONSTRAINT #{key_name}
FOREIGN KEY (#{column})
REFERENCES #{target} (id)
#{on_delete ? "ON DELETE #{on_delete.upcase}" : ''}
NOT VALID;
EOF
end
# Validate the existing constraint. This can potentially take a very # Validate the existing constraint. This can potentially take a very
# long time to complete, but fortunately does not lock the source table # long time to complete, but fortunately does not lock the source table
# while running. # while running.
#
# Note this is a no-op in case the constraint is VALID already
execute("ALTER TABLE #{source} VALIDATE CONSTRAINT #{key_name};") execute("ALTER TABLE #{source} VALIDATE CONSTRAINT #{key_name};")
end end
def foreign_key_exists?(source, target = nil, column: nil)
foreign_keys(source).any? do |key|
if column
key.options[:column].to_s == column.to_s
else
key.to_table.to_s == target.to_s
end
end
end
# Returns the name for a concurrent foreign key. # Returns the name for a concurrent foreign key.
# #
# PostgreSQL constraint names have a limit of 63 bytes. The logic used # PostgreSQL constraint names have a limit of 63 bytes. The logic used
...@@ -860,12 +900,6 @@ into similar problems in the future (e.g. when new tables are created). ...@@ -860,12 +900,6 @@ into similar problems in the future (e.g. when new tables are created).
end end
end end
def foreign_key_exists?(table, column)
foreign_keys(table).any? do |key|
key.options[:column] == column.to_s
end
end
# Rails' index_exists? doesn't work when you only give it a table and index # Rails' index_exists? doesn't work when you only give it a table and index
# name. As such we have to use some extra code to check if an index exists for # name. As such we have to use some extra code to check if an index exists for
# a given name. # a given name.
......
...@@ -2,14 +2,5 @@ unless Rails.env.production? ...@@ -2,14 +2,5 @@ unless Rails.env.production?
require 'haml_lint/rake_task' require 'haml_lint/rake_task'
require 'haml_lint/inline_javascript' require 'haml_lint/inline_javascript'
# Workaround for warnings from parser/current
# TODO: Remove this after we update parser gem
task :haml_lint do
require 'parser'
def Parser.warn(*args)
puts(*args) # static-analysis ignores stdout if status is 0
end
end
HamlLint::RakeTask.new HamlLint::RakeTask.new
end end
...@@ -25,4 +25,18 @@ feature 'Projects tree' do ...@@ -25,4 +25,18 @@ feature 'Projects tree' do
expect(page).to have_selector('.label-lfs', text: 'LFS') expect(page).to have_selector('.label-lfs', text: 'LFS')
end end
end end
context 'web IDE', :js do
before do
visit project_tree_path(project, File.join('master', 'bar'))
click_link 'Web IDE'
find('.ide-file-list')
end
it 'opens folder in IDE' do
expect(page).to have_selector('.is-open', text: 'bar')
end
end
end end
import Vue from 'vue';
import ProjectTree from '~/ide/components/ide_project_tree.vue';
import createComponent from 'spec/helpers/vue_mount_component_helper';
describe('IDE project tree', () => {
const Component = Vue.extend(ProjectTree);
let vm;
beforeEach(() => {
vm = createComponent(Component, {
project: {
id: 1,
name: 'test',
web_url: gl.TEST_HOST,
avatar_url: '',
branches: [],
},
});
});
afterEach(() => {
vm.$destroy();
});
it('renders identicon when projct has no avatar', () => {
expect(vm.$el.querySelector('.identicon')).not.toBeNull();
});
it('renders avatar image if project has avatar', done => {
vm.project.avatar_url = gl.TEST_HOST;
vm.$nextTick(() => {
expect(vm.$el.querySelector('.identicon')).toBeNull();
expect(vm.$el.querySelector('img.avatar')).not.toBeNull();
done();
});
});
});
...@@ -67,17 +67,35 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -67,17 +67,35 @@ describe Gitlab::Database::MigrationHelpers do
model.add_concurrent_index(:users, :foo, unique: true) model.add_concurrent_index(:users, :foo, unique: true)
end end
it 'does nothing if the index exists already' do
expect(model).to receive(:index_exists?)
.with(:users, :foo, { algorithm: :concurrently, unique: true }).and_return(true)
expect(model).not_to receive(:add_index)
model.add_concurrent_index(:users, :foo, unique: true)
end
end end
context 'using MySQL' do context 'using MySQL' do
it 'creates a regular index' do before do
expect(Gitlab::Database).to receive(:postgresql?).and_return(false) allow(Gitlab::Database).to receive(:postgresql?).and_return(false)
end
it 'creates a regular index' do
expect(model).to receive(:add_index) expect(model).to receive(:add_index)
.with(:users, :foo, {}) .with(:users, :foo, {})
model.add_concurrent_index(:users, :foo) model.add_concurrent_index(:users, :foo)
end end
it 'does nothing if the index exists already' do
expect(model).to receive(:index_exists?)
.with(:users, :foo, { unique: true }).and_return(true)
expect(model).not_to receive(:add_index)
model.add_concurrent_index(:users, :foo, unique: true)
end
end end
end end
...@@ -95,6 +113,7 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -95,6 +113,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'outside a transaction' do context 'outside a transaction' do
before do before do
allow(model).to receive(:transaction_open?).and_return(false) allow(model).to receive(:transaction_open?).and_return(false)
allow(model).to receive(:index_exists?).and_return(true)
end end
context 'using PostgreSQL' do context 'using PostgreSQL' do
...@@ -103,18 +122,41 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -103,18 +122,41 @@ describe Gitlab::Database::MigrationHelpers do
allow(model).to receive(:disable_statement_timeout) allow(model).to receive(:disable_statement_timeout)
end end
it 'removes the index concurrently by column name' do describe 'by column name' do
expect(model).to receive(:remove_index) it 'removes the index concurrently' do
.with(:users, { algorithm: :concurrently, column: :foo }) expect(model).to receive(:remove_index)
.with(:users, { algorithm: :concurrently, column: :foo })
model.remove_concurrent_index(:users, :foo) model.remove_concurrent_index(:users, :foo)
end
it 'does nothing if the index does not exist' do
expect(model).to receive(:index_exists?)
.with(:users, :foo, { algorithm: :concurrently, unique: true }).and_return(false)
expect(model).not_to receive(:remove_index)
model.remove_concurrent_index(:users, :foo, unique: true)
end
end end
it 'removes the index concurrently by index name' do describe 'by index name' do
expect(model).to receive(:remove_index) before do
.with(:users, { algorithm: :concurrently, name: "index_x_by_y" }) allow(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(true)
end
it 'removes the index concurrently by index name' do
expect(model).to receive(:remove_index)
.with(:users, { algorithm: :concurrently, name: "index_x_by_y" })
model.remove_concurrent_index_by_name(:users, "index_x_by_y")
end
it 'does nothing if the index does not exist' do
expect(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(false)
expect(model).not_to receive(:remove_index)
model.remove_concurrent_index_by_name(:users, "index_x_by_y") model.remove_concurrent_index_by_name(:users, "index_x_by_y")
end
end end
end end
...@@ -141,6 +183,10 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -141,6 +183,10 @@ describe Gitlab::Database::MigrationHelpers do
end end
describe '#add_concurrent_foreign_key' do describe '#add_concurrent_foreign_key' do
before do
allow(model).to receive(:foreign_key_exists?).and_return(false)
end
context 'inside a transaction' do context 'inside a transaction' do
it 'raises an error' do it 'raises an error' do
expect(model).to receive(:transaction_open?).and_return(true) expect(model).to receive(:transaction_open?).and_return(true)
...@@ -157,14 +203,23 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -157,14 +203,23 @@ describe Gitlab::Database::MigrationHelpers do
end end
context 'using MySQL' do context 'using MySQL' do
it 'creates a regular foreign key' do before do
allow(Gitlab::Database).to receive(:mysql?).and_return(true) allow(Gitlab::Database).to receive(:mysql?).and_return(true)
end
it 'creates a regular foreign key' do
expect(model).to receive(:add_foreign_key) expect(model).to receive(:add_foreign_key)
.with(:projects, :users, column: :user_id, on_delete: :cascade) .with(:projects, :users, column: :user_id, on_delete: :cascade)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id) model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end end
it 'does not create a foreign key if it exists already' do
expect(model).to receive(:foreign_key_exists?).with(:projects, :users, column: :user_id).and_return(true)
expect(model).not_to receive(:add_foreign_key)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end
end end
context 'using PostgreSQL' do context 'using PostgreSQL' do
...@@ -189,6 +244,14 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -189,6 +244,14 @@ describe Gitlab::Database::MigrationHelpers do
column: :user_id, column: :user_id,
on_delete: :nullify) on_delete: :nullify)
end end
it 'does not create a foreign key if it exists already' do
expect(model).to receive(:foreign_key_exists?).with(:projects, :users, column: :user_id).and_return(true)
expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
expect(model).to receive(:execute).with(/VALIDATE CONSTRAINT/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end
end end
end end
end end
...@@ -203,6 +266,29 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -203,6 +266,29 @@ describe Gitlab::Database::MigrationHelpers do
end end
end end
describe '#foreign_key_exists?' do
before do
key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(:projects, :users, { column: :non_standard_id })
allow(model).to receive(:foreign_keys).with(:projects).and_return([key])
end
it 'finds existing foreign keys by column' do
expect(model.foreign_key_exists?(:projects, :users, column: :non_standard_id)).to be_truthy
end
it 'finds existing foreign keys by target table only' do
expect(model.foreign_key_exists?(:projects, :users)).to be_truthy
end
it 'compares by column name if given' do
expect(model.foreign_key_exists?(:projects, :users, column: :user_id)).to be_falsey
end
it 'compares by target if no column given' do
expect(model.foreign_key_exists?(:projects, :other_table)).to be_falsey
end
end
describe '#disable_statement_timeout' do describe '#disable_statement_timeout' do
context 'using PostgreSQL' do context 'using PostgreSQL' do
it 'disables statement timeouts' do it 'disables statement timeouts' do
......
...@@ -927,7 +927,7 @@ describe Repository do ...@@ -927,7 +927,7 @@ describe Repository do
end end
it 'returns nil when the content is not recognizable' do it 'returns nil when the content is not recognizable' do
repository.create_file(user, 'LICENSE', 'Copyright!', repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
message: 'Add LICENSE', branch_name: 'master') message: 'Add LICENSE', branch_name: 'master')
expect(repository.license_key).to be_nil expect(repository.license_key).to be_nil
...@@ -971,7 +971,7 @@ describe Repository do ...@@ -971,7 +971,7 @@ describe Repository do
end end
it 'returns nil when the content is not recognizable' do it 'returns nil when the content is not recognizable' do
repository.create_file(user, 'LICENSE', 'Copyright!', repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
message: 'Add LICENSE', branch_name: 'master') message: 'Add LICENSE', branch_name: 'master')
expect(repository.license).to be_nil expect(repository.license).to be_nil
......
...@@ -447,6 +447,12 @@ describe API::Internal do ...@@ -447,6 +447,12 @@ describe API::Internal do
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy expect(json_response["status"]).to be_truthy
expect(json_response["gitaly"]).not_to be_nil
expect(json_response["gitaly"]["repository"]).not_to be_nil
expect(json_response["gitaly"]["repository"]["storage_name"]).to eq(project.repository.gitaly_repository.storage_name)
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
end end
end end
......
...@@ -65,7 +65,7 @@ describe API::Templates do ...@@ -65,7 +65,7 @@ describe API::Templates do
expect(json_response['description']).to include('A short and simple permissive license with conditions') expect(json_response['description']).to include('A short and simple permissive license with conditions')
expect(json_response['conditions']).to eq(%w[include-copyright]) expect(json_response['conditions']).to eq(%w[include-copyright])
expect(json_response['permissions']).to eq(%w[commercial-use modifications distribution private-use]) expect(json_response['permissions']).to eq(%w[commercial-use modifications distribution private-use])
expect(json_response['limitations']).to eq(%w[no-liability]) expect(json_response['limitations']).to eq(%w[liability warranty])
expect(json_response['content']).to include('MIT License') expect(json_response['content']).to include('MIT License')
end end
end end
......
...@@ -57,7 +57,7 @@ describe API::V3::Templates do ...@@ -57,7 +57,7 @@ describe API::V3::Templates do
expect(json_response['description']).to include('A short and simple permissive license with conditions') expect(json_response['description']).to include('A short and simple permissive license with conditions')
expect(json_response['conditions']).to eq(%w[include-copyright]) expect(json_response['conditions']).to eq(%w[include-copyright])
expect(json_response['permissions']).to eq(%w[commercial-use modifications distribution private-use]) expect(json_response['permissions']).to eq(%w[commercial-use modifications distribution private-use])
expect(json_response['limitations']).to eq(%w[no-liability]) expect(json_response['limitations']).to eq(%w[liability warranty])
expect(json_response['content']).to include('MIT License') expect(json_response['content']).to include('MIT License')
end end
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment