Commit 93410c60 authored by Valery Sizov's avatar Valery Sizov

Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ce into ce_upstream[ci skip]

parents ab42a638 bfb5107a
# Documentation
- source: /doc/(.+?)\.md/ # doc/administration/build_artifacts.md
public: '\1.html' # doc/administration/build_artifacts.html
...@@ -121,7 +121,8 @@ linters: ...@@ -121,7 +121,8 @@ linters:
# Avoid nesting selectors too deeply. # Avoid nesting selectors too deeply.
NestingDepth: NestingDepth:
enabled: false enabled: true
max_depth: 6
# Always use placeholder selectors in @extend. # Always use placeholder selectors in @extend.
PlaceholderInExtend: PlaceholderInExtend:
......
0.49.0 0.50.0
\ No newline at end of file
import autosize from 'vendor/autosize'; import Autosize from 'autosize';
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
const autosizeEls = document.querySelectorAll('.js-autosize'); const autosizeEls = document.querySelectorAll('.js-autosize');
autosize(autosizeEls); Autosize(autosizeEls);
autosize.update(autosizeEls); Autosize.update(autosizeEls);
}); });
...@@ -25,11 +25,14 @@ gl.issueBoards.BoardsStore = { ...@@ -25,11 +25,14 @@ gl.issueBoards.BoardsStore = {
this.state.lists = []; this.state.lists = [];
this.filter.path = getUrlParamsArray().join('&'); this.filter.path = getUrlParamsArray().join('&');
this.detail = { issue: {} }; this.detail = { issue: {} };
<<<<<<< HEAD
}, },
createNewListDropdownData() { createNewListDropdownData() {
this.state.currentBoard = {}; this.state.currentBoard = {};
this.state.currentPage = ''; this.state.currentPage = '';
this.state.reload = false; this.state.reload = false;
=======
>>>>>>> bfb5107ae720232a15060ee55feba213ee7dd097
}, },
addList (listObj, defaultAvatar) { addList (listObj, defaultAvatar) {
const list = new List(listObj, defaultAvatar); const list = new List(listObj, defaultAvatar);
......
...@@ -64,19 +64,16 @@ export default class Clusters { ...@@ -64,19 +64,16 @@ export default class Clusters {
this.poll = new Poll({ this.poll = new Poll({
resource: this.service, resource: this.service,
method: 'fetchData', method: 'fetchData',
successCallback: (data) => { successCallback: data => this.handleSuccess(data),
const { status, status_reason } = data.data; errorCallback: () => Clusters.handleError(),
this.updateContainer(status, status_reason);
},
errorCallback: () => {
Flash(s__('ClusterIntegration|Something went wrong on our end.'));
},
}); });
if (!Visibility.hidden()) { if (!Visibility.hidden()) {
this.poll.makeRequest(); this.poll.makeRequest();
} else { } else {
this.service.fetchData(); this.service.fetchData()
.then(data => this.handleSuccess(data))
.catch(() => Clusters.handleError());
} }
Visibility.change(() => { Visibility.change(() => {
...@@ -88,6 +85,15 @@ export default class Clusters { ...@@ -88,6 +85,15 @@ export default class Clusters {
}); });
} }
static handleError() {
Flash(s__('ClusterIntegration|Something went wrong on our end.'));
}
handleSuccess(data) {
const { status, status_reason } = data.data;
this.updateContainer(status, status_reason);
}
hideAll() { hideAll() {
this.errorContainer.classList.add('hidden'); this.errorContainer.classList.add('hidden');
this.successContainer.classList.add('hidden'); this.successContainer.classList.add('hidden');
......
...@@ -256,12 +256,16 @@ import initGroupAnalytics from './init_group_analytics'; ...@@ -256,12 +256,16 @@ import initGroupAnalytics from './init_group_analytics';
case 'projects:milestones:new': case 'projects:milestones:new':
case 'projects:milestones:edit': case 'projects:milestones:edit':
case 'projects:milestones:update': case 'projects:milestones:update':
new ZenMode();
new DueDateSelectors();
new GLForm($('.milestone-form'), true);
break;
case 'groups:milestones:new': case 'groups:milestones:new':
case 'groups:milestones:edit': case 'groups:milestones:edit':
case 'groups:milestones:update': case 'groups:milestones:update':
new ZenMode(); new ZenMode();
new DueDateSelectors(); new DueDateSelectors();
new GLForm($('.milestone-form'), true); new GLForm($('.milestone-form'), false);
break; break;
case 'projects:compare:show': case 'projects:compare:show':
new Diff(); new Diff();
......
/* eslint-disable no-new */ /* eslint-disable no-new */
/* global MilestoneSelect */ /* global MilestoneSelect */
/* global LabelsSelect */ /* global LabelsSelect */
<<<<<<< HEAD
/* global WeightSelect */ /* global WeightSelect */
=======
>>>>>>> bfb5107ae720232a15060ee55feba213ee7dd097
import IssuableContext from './issuable_context'; import IssuableContext from './issuable_context';
/* global Sidebar */ /* global Sidebar */
......
...@@ -20,7 +20,10 @@ export default class IssuableForm { ...@@ -20,7 +20,10 @@ export default class IssuableForm {
new GfmAutoComplete(gl.GfmAutoComplete && gl.GfmAutoComplete.dataSources).setup(); new GfmAutoComplete(gl.GfmAutoComplete && gl.GfmAutoComplete.dataSources).setup();
new UsersSelect(); new UsersSelect();
<<<<<<< HEAD
groupsSelect(); groupsSelect();
=======
>>>>>>> bfb5107ae720232a15060ee55feba213ee7dd097
new ZenMode(); new ZenMode();
this.titleField = this.form.find('input[name*="[title]"]'); this.titleField = this.form.find('input[name*="[title]"]');
......
...@@ -148,6 +148,7 @@ import _ from 'underscore'; ...@@ -148,6 +148,7 @@ import _ from 'underscore';
$(`[data-milestone-id="${selectedMilestone}"] > a`, $el).addClass('is-active'); $(`[data-milestone-id="${selectedMilestone}"] > a`, $el).addClass('is-active');
}, },
vue: $dropdown.hasClass('js-issue-board-sidebar'), vue: $dropdown.hasClass('js-issue-board-sidebar'),
<<<<<<< HEAD
hideRow: function(milestone) { hideRow: function(milestone) {
if ($('html').hasClass('issue-boards-page') && !$dropdown.hasClass('js-issue-board-sidebar') && if ($('html').hasClass('issue-boards-page') && !$dropdown.hasClass('js-issue-board-sidebar') &&
!$dropdown.closest('.add-issues-modal').length && gl.issueBoards.BoardsStore.state.currentBoard.milestone) { !$dropdown.closest('.add-issues-modal').length && gl.issueBoards.BoardsStore.state.currentBoard.milestone) {
...@@ -164,6 +165,8 @@ import _ from 'underscore'; ...@@ -164,6 +165,8 @@ import _ from 'underscore';
return true; return true;
}, },
=======
>>>>>>> bfb5107ae720232a15060ee55feba213ee7dd097
clicked: function(options) { clicked: function(options) {
const { $el, e } = options; const { $el, e } = options;
let selected = options.selectedObj; let selected = options.selectedObj;
......
...@@ -12,7 +12,7 @@ newline-per-chained-call, no-useless-escape, class-methods-use-this */ ...@@ -12,7 +12,7 @@ newline-per-chained-call, no-useless-escape, class-methods-use-this */
import $ from 'jquery'; import $ from 'jquery';
import _ from 'underscore'; import _ from 'underscore';
import Cookies from 'js-cookie'; import Cookies from 'js-cookie';
import autosize from 'vendor/autosize'; import Autosize from 'autosize';
import 'vendor/jquery.caret'; // required by jquery.atwho import 'vendor/jquery.caret'; // required by jquery.atwho
import 'vendor/jquery.atwho'; import 'vendor/jquery.atwho';
import AjaxCache from '~/lib/utils/ajax_cache'; import AjaxCache from '~/lib/utils/ajax_cache';
...@@ -25,7 +25,7 @@ import TaskList from './task_list'; ...@@ -25,7 +25,7 @@ import TaskList from './task_list';
import { ajaxPost, isInViewport, getPagePath, scrollToElement, isMetaKey } from './lib/utils/common_utils'; import { ajaxPost, isInViewport, getPagePath, scrollToElement, isMetaKey } from './lib/utils/common_utils';
import imageDiffHelper from './image_diff/helpers/index'; import imageDiffHelper from './image_diff/helpers/index';
window.autosize = autosize; window.autosize = Autosize;
function normalizeNewlines(str) { function normalizeNewlines(str) {
return str.replace(/\r\n/g, '\n'); return str.replace(/\r\n/g, '\n');
......
<script> <script>
import { mapActions, mapGetters } from 'vuex'; import { mapActions, mapGetters } from 'vuex';
import _ from 'underscore'; import _ from 'underscore';
import autosize from 'vendor/autosize'; import Autosize from 'autosize';
import Flash from '../../flash'; import Flash from '../../flash';
import Autosave from '../../autosave'; import Autosave from '../../autosave';
import TaskList from '../../task_list'; import TaskList from '../../task_list';
...@@ -219,7 +219,7 @@ ...@@ -219,7 +219,7 @@
}, },
resizeTextarea() { resizeTextarea() {
this.$nextTick(() => { this.$nextTick(() => {
autosize.update(this.$refs.textarea); Autosize.update(this.$refs.textarea);
}); });
}, },
}, },
......
...@@ -7,9 +7,14 @@ import ProjectSelectComboButton from './project_select_combo_button'; ...@@ -7,9 +7,14 @@ import ProjectSelectComboButton from './project_select_combo_button';
function ProjectSelect() { function ProjectSelect() {
$('.ajax-project-select').each(function(i, select) { $('.ajax-project-select').each(function(i, select) {
var placeholder; var placeholder;
const simpleFilter = $(select).data('simple-filter') || false;
this.groupId = $(select).data('group-id'); this.groupId = $(select).data('group-id');
this.includeGroups = $(select).data('include-groups'); this.includeGroups = $(select).data('include-groups');
<<<<<<< HEAD
this.allProjects = $(select).data('allprojects') || false; this.allProjects = $(select).data('allprojects') || false;
=======
this.allProjects = $(select).data('all-projects') || false;
>>>>>>> bfb5107ae720232a15060ee55feba213ee7dd097
this.orderBy = $(select).data('order-by') || 'id'; this.orderBy = $(select).data('order-by') || 'id';
this.withIssuesEnabled = $(select).data('with-issues-enabled'); this.withIssuesEnabled = $(select).data('with-issues-enabled');
this.withMergeRequestsEnabled = $(select).data('with-merge-requests-enabled'); this.withMergeRequestsEnabled = $(select).data('with-merge-requests-enabled');
...@@ -52,12 +57,17 @@ import ProjectSelectComboButton from './project_select_combo_button'; ...@@ -52,12 +57,17 @@ import ProjectSelectComboButton from './project_select_combo_button';
order_by: _this.orderBy, order_by: _this.orderBy,
with_issues_enabled: _this.withIssuesEnabled, with_issues_enabled: _this.withIssuesEnabled,
with_merge_requests_enabled: _this.withMergeRequestsEnabled, with_merge_requests_enabled: _this.withMergeRequestsEnabled,
<<<<<<< HEAD
membership: !_this.allProjects membership: !_this.allProjects
=======
membership: !_this.allProjects,
>>>>>>> bfb5107ae720232a15060ee55feba213ee7dd097
}, projectsCallback); }, projectsCallback);
} }
}; };
})(this), })(this),
id: function(project) { id: function(project) {
if (simpleFilter) return project.id;
return JSON.stringify({ return JSON.stringify({
name: project.name, name: project.name,
url: project.web_url, url: project.web_url,
...@@ -68,7 +78,7 @@ import ProjectSelectComboButton from './project_select_combo_button'; ...@@ -68,7 +78,7 @@ import ProjectSelectComboButton from './project_select_combo_button';
}, },
dropdownCssClass: "ajax-project-dropdown" dropdownCssClass: "ajax-project-dropdown"
}); });
if (simpleFilter) return select;
return new ProjectSelectComboButton(select); return new ProjectSelectComboButton(select);
}); });
} }
......
...@@ -3,10 +3,12 @@ ...@@ -3,10 +3,12 @@
import RepoHelper from '../../helpers/repo_helper'; import RepoHelper from '../../helpers/repo_helper';
import eventHub from '../../event_hub'; import eventHub from '../../event_hub';
import newModal from './modal.vue'; import newModal from './modal.vue';
import upload from './upload.vue';
export default { export default {
components: { components: {
newModal, newModal,
upload,
}, },
data() { data() {
return { return {
...@@ -23,10 +25,12 @@ ...@@ -23,10 +25,12 @@
toggleModalOpen() { toggleModalOpen() {
this.openModal = !this.openModal; this.openModal = !this.openModal;
}, },
createNewEntryInStore(name, type) { createNewEntryInStore(options, openEditMode = true) {
RepoHelper.createNewEntry(name, type); RepoHelper.createNewEntry(options, openEditMode);
if (options.toggleModal) {
this.toggleModalOpen(); this.toggleModalOpen();
}
}, },
}, },
created() { created() {
...@@ -64,6 +68,11 @@ ...@@ -64,6 +68,11 @@
{{ __('New file') }} {{ __('New file') }}
</a> </a>
</li> </li>
<li>
<upload
:current-path="currentPath"
/>
</li>
<li> <li>
<a <a
href="#" href="#"
......
...@@ -24,7 +24,11 @@ ...@@ -24,7 +24,11 @@
}, },
methods: { methods: {
createEntryInStore() { createEntryInStore() {
eventHub.$emit('createNewEntry', this.entryName, this.type); eventHub.$emit('createNewEntry', {
name: this.entryName,
type: this.type,
toggleModal: true,
});
}, },
toggleModalOpen() { toggleModalOpen() {
this.$emit('toggle'); this.$emit('toggle');
......
<script>
import eventHub from '../../event_hub';
export default {
props: {
currentPath: {
type: String,
required: true,
},
},
methods: {
createFile(target, file, isText) {
const { name } = file;
const nameWithPath = `${this.currentPath !== '' ? `${this.currentPath}/` : ''}${name}`;
let { result } = target;
if (!isText) {
result = result.split('base64,')[1];
}
eventHub.$emit('createNewEntry', {
name: nameWithPath,
type: 'blob',
content: result,
toggleModal: false,
base64: !isText,
}, isText);
},
readFile(file) {
const reader = new FileReader();
const isText = file.type.match(/text.*/) !== null;
reader.addEventListener('load', e => this.createFile(e.target, file, isText), { once: true });
if (isText) {
reader.readAsText(file);
} else {
reader.readAsDataURL(file);
}
},
openFile() {
Array.from(this.$refs.fileUpload.files).forEach(file => this.readFile(file));
},
},
mounted() {
this.$refs.fileUpload.addEventListener('change', this.openFile);
},
beforeDestroy() {
this.$refs.fileUpload.removeEventListener('change', this.openFile);
},
};
</script>
<template>
<label
role="button"
class="menu-item"
>
{{ __('Upload file') }}
<input
id="file-upload"
type="file"
class="hidden"
ref="fileUpload"
/>
</label>
</template>
...@@ -52,6 +52,7 @@ export default { ...@@ -52,6 +52,7 @@ export default {
action: f.tempFile ? 'create' : 'update', action: f.tempFile ? 'create' : 'update',
file_path: f.path, file_path: f.path,
content: f.newContent, content: f.newContent,
encoding: f.base64 ? 'base64' : 'text',
})); }));
const branch = newBranch ? `${this.currentBranch}-${this.currentShortHash}` : this.currentBranch; const branch = newBranch ? `${this.currentBranch}-${this.currentShortHash}` : this.currentBranch;
const payload = { const payload = {
......
...@@ -49,6 +49,13 @@ export default { ...@@ -49,6 +49,13 @@ export default {
v-if="!activeFile.render_error" v-if="!activeFile.render_error"
v-html="activeFile.html"> v-html="activeFile.html">
</div> </div>
<div
v-else-if="activeFile.tempFile"
class="vertical-center render-error">
<p class="text-center">
The source could not be displayed for this temporary file.
</p>
</div>
<div <div
v-else-if="activeFile.tooLarge" v-else-if="activeFile.tooLarge"
class="vertical-center render-error"> class="vertical-center render-error">
......
...@@ -155,7 +155,7 @@ const RepoHelper = { ...@@ -155,7 +155,7 @@ const RepoHelper = {
if (newFile.render_error === 'too_large' || newFile.render_error === 'collapsed') { if (newFile.render_error === 'too_large' || newFile.render_error === 'collapsed') {
newFile.tooLarge = true; newFile.tooLarge = true;
} }
newFile.newContent = ''; newFile.newContent = file.newContent ? file.newContent : '';
Store.addToOpenedFiles(newFile); Store.addToOpenedFiles(newFile);
Store.setActiveFiles(newFile); Store.setActiveFiles(newFile);
...@@ -276,7 +276,13 @@ const RepoHelper = { ...@@ -276,7 +276,13 @@ const RepoHelper = {
removeAllTmpFiles(storeFilesKey) { removeAllTmpFiles(storeFilesKey) {
Store[storeFilesKey] = Store[storeFilesKey].filter(f => !f.tempFile); Store[storeFilesKey] = Store[storeFilesKey].filter(f => !f.tempFile);
}, },
createNewEntry(name, type) { createNewEntry(options, openEditMode = true) {
const {
name,
type,
content = '',
base64 = false,
} = options;
const originalPath = Store.path; const originalPath = Store.path;
let entryName = name; let entryName = name;
...@@ -304,9 +310,24 @@ const RepoHelper = { ...@@ -304,9 +310,24 @@ const RepoHelper = {
if ((type === 'tree' && tree.tempFile) || type === 'blob') { if ((type === 'tree' && tree.tempFile) || type === 'blob') {
const file = this.findOrCreateEntry('blob', tree, fileName); const file = this.findOrCreateEntry('blob', tree, fileName);
if (!file.exists) { if (file.exists) {
this.setFile(file.entry, file.entry); Flash(`The name "${file.entry.name}" is already taken in this directory.`);
} else {
const { entry } = file;
entry.newContent = content;
entry.base64 = base64;
if (entry.base64) {
entry.render_error = true;
}
this.setFile(entry, entry);
if (openEditMode) {
this.openEditMode(); this.openEditMode();
} else {
file.entry.render_error = 'asdsad';
}
} }
} }
......
...@@ -19,7 +19,7 @@ const RepoService = { ...@@ -19,7 +19,7 @@ const RepoService = {
getRaw(file) { getRaw(file) {
if (file.tempFile) { if (file.tempFile) {
return Promise.resolve({ return Promise.resolve({
data: '', data: file.newContent ? file.newContent : '',
}); });
} }
......
...@@ -776,12 +776,15 @@ ...@@ -776,12 +776,15 @@
a, a,
button, button,
.menu-item { .menu-item {
margin-bottom: 0;
border-radius: 0; border-radius: 0;
box-shadow: none; box-shadow: none;
padding: 8px 16px; padding: 8px 16px;
text-align: left; text-align: left;
white-space: normal; white-space: normal;
width: 100%; width: 100%;
font-weight: $gl-font-weight-normal;
line-height: normal;
&.dropdown-menu-user-link { &.dropdown-menu-user-link {
white-space: nowrap; white-space: nowrap;
......
...@@ -216,15 +216,12 @@ body { ...@@ -216,15 +216,12 @@ body {
color: $theme-gray-900; color: $theme-gray-900;
} }
&.active > a { &.active > a,
color: $white-light; &.active > a:hover {
&:hover {
color: $white-light; color: $white-light;
} }
} }
} }
}
.container-fluid { .container-fluid {
.navbar-toggle, .navbar-toggle,
......
...@@ -239,13 +239,11 @@ ...@@ -239,13 +239,11 @@
fill: currentColor; fill: currentColor;
} }
&.header-user-dropdown-toggle { &.header-user-dropdown-toggle .header-user-avatar {
.header-user-avatar {
border-color: $white-light; border-color: $white-light;
} }
} }
} }
}
.header-new-dropdown-toggle { .header-new-dropdown-toggle {
margin-right: 0; margin-right: 0;
......
...@@ -269,7 +269,7 @@ ul.notes { ...@@ -269,7 +269,7 @@ ul.notes {
display: none; display: none;
} }
&.system-note-commit-list { &.system-note-commit-list:not(.hide-shade) {
max-height: 70px; max-height: 70px;
overflow: hidden; overflow: hidden;
display: block; display: block;
...@@ -291,16 +291,6 @@ ul.notes { ...@@ -291,16 +291,6 @@ ul.notes {
bottom: 0; bottom: 0;
background: linear-gradient(rgba($white-light, 0.1) -100px, $white-light 100%); background: linear-gradient(rgba($white-light, 0.1) -100px, $white-light 100%);
} }
&.hide-shade {
max-height: 100%;
overflow: auto;
&::after {
display: none;
background: transparent;
}
}
} }
} }
} }
......
...@@ -467,7 +467,7 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -467,7 +467,7 @@ class ApplicationSetting < ActiveRecord::Base
# the enabling/disabling is `performance_bar_allowed_group_id` # the enabling/disabling is `performance_bar_allowed_group_id`
# - If `enable` is false, we set `performance_bar_allowed_group_id` to `nil` # - If `enable` is false, we set `performance_bar_allowed_group_id` to `nil`
def performance_bar_enabled=(enable) def performance_bar_enabled=(enable)
return if enable return if Gitlab::Utils.to_boolean(enable)
self.performance_bar_allowed_group_id = nil self.performance_bar_allowed_group_id = nil
end end
......
...@@ -404,6 +404,10 @@ class MergeRequest < ActiveRecord::Base ...@@ -404,6 +404,10 @@ class MergeRequest < ActiveRecord::Base
end end
def merge_ongoing? def merge_ongoing?
# While the MergeRequest is locked, it should present itself as 'merge ongoing'.
# The unlocking process is handled by StuckMergeJobsWorker scheduled in Cron.
return true if locked?
!!merge_jid && !merged? && Gitlab::SidekiqStatus.running?(merge_jid) !!merge_jid && !merged? && Gitlab::SidekiqStatus.running?(merge_jid)
end end
...@@ -899,7 +903,7 @@ class MergeRequest < ActiveRecord::Base ...@@ -899,7 +903,7 @@ class MergeRequest < ActiveRecord::Base
# #
def all_commit_shas def all_commit_shas
if persisted? if persisted?
column_shas = MergeRequestDiffCommit.where(merge_request_diff: merge_request_diffs).pluck('DISTINCT(sha)') column_shas = MergeRequestDiffCommit.where(merge_request_diff: merge_request_diffs).limit(10_000).pluck('sha')
serialised_shas = merge_request_diffs.where.not(st_commits: nil).flat_map(&:commit_shas) serialised_shas = merge_request_diffs.where.not(st_commits: nil).flat_map(&:commit_shas)
(column_shas + serialised_shas).uniq (column_shas + serialised_shas).uniq
......
...@@ -29,7 +29,15 @@ class Project < ActiveRecord::Base ...@@ -29,7 +29,15 @@ class Project < ActiveRecord::Base
NUMBER_OF_PERMITTED_BOARDS = 1 NUMBER_OF_PERMITTED_BOARDS = 1
UNKNOWN_IMPORT_URL = 'http://unknown.git'.freeze UNKNOWN_IMPORT_URL = 'http://unknown.git'.freeze
LATEST_STORAGE_VERSION = 1 # Hashed Storage versions handle rolling out new storage to project and dependents models:
# nil: legacy
# 1: repository
# 2: attachments
LATEST_STORAGE_VERSION = 2
HASHED_STORAGE_FEATURES = {
repository: 1,
attachments: 2
}.freeze
cache_markdown_field :description, pipeline: :description cache_markdown_field :description, pipeline: :description
...@@ -124,6 +132,7 @@ class Project < ActiveRecord::Base ...@@ -124,6 +132,7 @@ class Project < ActiveRecord::Base
has_one :mock_deployment_service has_one :mock_deployment_service
has_one :mock_monitoring_service has_one :mock_monitoring_service
has_one :microsoft_teams_service has_one :microsoft_teams_service
has_one :packagist_service
# TODO: replace these relations with the fork network versions # TODO: replace these relations with the fork network versions
has_one :forked_project_link, foreign_key: "forked_to_project_id" has_one :forked_project_link, foreign_key: "forked_to_project_id"
...@@ -1394,6 +1403,19 @@ class Project < ActiveRecord::Base ...@@ -1394,6 +1403,19 @@ class Project < ActiveRecord::Base
end end
end end
def after_rename_repo
path_before_change = previous_changes['path'].first
# We need to check if project had been rolled out to move resource to hashed storage or not and decide
# if we need execute any take action or no-op.
unless hashed_storage?(:attachments)
Gitlab::UploadsTransfer.new.rename_project(path_before_change, self.path, namespace.full_path)
end
Gitlab::PagesTransfer.new.rename_project(path_before_change, self.path, namespace.full_path)
end
def rename_repo_notify! def rename_repo_notify!
send_move_instructions(full_path_was) send_move_instructions(full_path_was)
expires_full_path_cache expires_full_path_cache
...@@ -1404,13 +1426,6 @@ class Project < ActiveRecord::Base ...@@ -1404,13 +1426,6 @@ class Project < ActiveRecord::Base
reload_repository! reload_repository!
end end
def after_rename_repo
path_before_change = previous_changes['path'].first
Gitlab::UploadsTransfer.new.rename_project(path_before_change, self.path, namespace.full_path)
Gitlab::PagesTransfer.new.rename_project(path_before_change, self.path, namespace.full_path)
end
def running_or_pending_build_count(force: false) def running_or_pending_build_count(force: false)
Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do
builds.running_or_pending.count(:all) builds.running_or_pending.count(:all)
...@@ -1600,8 +1615,13 @@ class Project < ActiveRecord::Base ...@@ -1600,8 +1615,13 @@ class Project < ActiveRecord::Base
[nil, 0].include?(self.storage_version) [nil, 0].include?(self.storage_version)
end end
def hashed_storage? # Check if Hashed Storage is enabled for the project with at least informed feature rolled out
self.storage_version && self.storage_version >= 1 #
# @param [Symbol] feature that needs to be rolled out for the project (:repository, :attachments)
def hashed_storage?(feature)
raise ArgumentError, "Invalid feature" unless HASHED_STORAGE_FEATURES.include?(feature)
self.storage_version && self.storage_version >= HASHED_STORAGE_FEATURES[feature]
end end
def renamed? def renamed?
...@@ -1637,7 +1657,7 @@ class Project < ActiveRecord::Base ...@@ -1637,7 +1657,7 @@ class Project < ActiveRecord::Base
end end
def migrate_to_hashed_storage! def migrate_to_hashed_storage!
return if hashed_storage? return if hashed_storage?(:repository)
update!(repository_read_only: true) update!(repository_read_only: true)
...@@ -1662,7 +1682,7 @@ class Project < ActiveRecord::Base ...@@ -1662,7 +1682,7 @@ class Project < ActiveRecord::Base
def storage def storage
@storage ||= @storage ||=
if hashed_storage? if hashed_storage?(:repository)
Storage::HashedProject.new(self) Storage::HashedProject.new(self)
else else
Storage::LegacyProject.new(self) Storage::LegacyProject.new(self)
......
class PackagistService < Service
include HTTParty
prop_accessor :username, :token, :server
validates :username, presence: true, if: :activated?
validates :token, presence: true, if: :activated?
default_value_for :push_events, true
default_value_for :tag_push_events, true
after_save :compose_service_hook, if: :activated?
def title
'Packagist'
end
def description
'Update your project on Packagist, the main Composer repository'
end
def self.to_param
'packagist'
end
def fields
[
{ type: 'text', name: 'username', placeholder: '', required: true },
{ type: 'text', name: 'token', placeholder: '', required: true },
{ type: 'text', name: 'server', placeholder: 'https://packagist.org', required: false }
]
end
def self.supported_events
%w(push merge_request tag_push)
end
def execute(data)
return unless supported_events.include?(data[:object_kind])
service_hook.execute(data)
end
def test(data)
begin
result = execute(data)
return { success: false, result: result[:message] } if result[:http_status] != 202
rescue StandardError => error
return { success: false, result: error }
end
{ success: true, result: result[:message] }
end
def compose_service_hook
hook = service_hook || build_service_hook
hook.url = hook_url
hook.save
end
def hook_url
base_url = server.present? ? server : 'https://packagist.org'
"#{base_url}/api/update-package?username=#{username}&apiToken=#{token}"
end
end
...@@ -240,6 +240,7 @@ class Service < ActiveRecord::Base ...@@ -240,6 +240,7 @@ class Service < ActiveRecord::Base
kubernetes kubernetes
mattermost_slash_commands mattermost_slash_commands
mattermost mattermost
packagist
pipelines_email pipelines_email
pivotaltracker pivotaltracker
prometheus prometheus
......
...@@ -9,6 +9,7 @@ module Issues ...@@ -9,6 +9,7 @@ module Issues
notification_service.reopen_issue(issue, current_user) notification_service.reopen_issue(issue, current_user)
execute_hooks(issue, 'reopen') execute_hooks(issue, 'reopen')
invalidate_cache_counts(issue, users: issue.assignees) invalidate_cache_counts(issue, users: issue.assignees)
issue.update_project_counter_caches
end end
issue issue
......
...@@ -106,16 +106,9 @@ module MergeRequests ...@@ -106,16 +106,9 @@ module MergeRequests
@merge_request.can_remove_source_branch?(branch_deletion_user) @merge_request.can_remove_source_branch?(branch_deletion_user)
end end
# Logs merge error message and cleans `MergeRequest#merge_jid`.
#
def handle_merge_error(log_message:, save_message_on_model: false) def handle_merge_error(log_message:, save_message_on_model: false)
Rails.logger.error("MergeService ERROR: #{merge_request_info} - #{log_message}") Rails.logger.error("MergeService ERROR: #{merge_request_info} - #{log_message}")
@merge_request.update(merge_error: log_message) if save_message_on_model
if save_message_on_model
@merge_request.update(merge_error: log_message, merge_jid: nil)
else
clean_merge_jid
end
end end
def merge_request_info def merge_request_info
......
...@@ -11,6 +11,7 @@ module MergeRequests ...@@ -11,6 +11,7 @@ module MergeRequests
merge_request.reload_diff(current_user) merge_request.reload_diff(current_user)
merge_request.mark_as_unchecked merge_request.mark_as_unchecked
invalidate_cache_counts(merge_request, users: merge_request.assignees) invalidate_cache_counts(merge_request, users: merge_request.assignees)
merge_request.update_project_counter_caches
end end
merge_request merge_request
......
...@@ -12,7 +12,7 @@ module Projects ...@@ -12,7 +12,7 @@ module Projects
end end
def execute def execute
return if project.hashed_storage? return if project.hashed_storage?(:repository)
@old_disk_path = project.disk_path @old_disk_path = project.disk_path
has_wiki = project.wiki.repository_exists? has_wiki = project.wiki.repository_exists?
......
...@@ -30,7 +30,7 @@ class FileUploader < GitlabUploader ...@@ -30,7 +30,7 @@ class FileUploader < GitlabUploader
# #
# Returns a String without a trailing slash # Returns a String without a trailing slash
def self.dynamic_path_segment(model) def self.dynamic_path_segment(model)
File.join(CarrierWave.root, base_dir, model.full_path) File.join(CarrierWave.root, base_dir, model.disk_path)
end end
attr_accessor :model attr_accessor :model
......
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
= f.label :description, "Description", class: "control-label" = f.label :description, "Description", class: "control-label"
.col-sm-10 .col-sm-10
= render layout: 'projects/md_preview', locals: { url: group_preview_markdown_path } do = render layout: 'projects/md_preview', locals: { url: group_preview_markdown_path } do
= render 'projects/zen', f: f, attr: :description, classes: 'note-textarea', placeholder: 'Write milestone description...' = render 'projects/zen', f: f, attr: :description, classes: 'note-textarea', placeholder: 'Write milestone description...', supports_autocomplete: false
.clearfix .clearfix
.error-alert .error-alert
......
...@@ -23,7 +23,7 @@ class StuckMergeJobsWorker ...@@ -23,7 +23,7 @@ class StuckMergeJobsWorker
merge_requests = MergeRequest.where(id: completed_ids) merge_requests = MergeRequest.where(id: completed_ids)
merge_requests.where.not(merge_commit_sha: nil).update_all(state: :merged) merge_requests.where.not(merge_commit_sha: nil).update_all(state: :merged)
merge_requests.where(merge_commit_sha: nil).update_all(state: :opened) merge_requests.where(merge_commit_sha: nil).update_all(state: :opened, merge_jid: nil)
Rails.logger.info("Updated state of locked merge jobs. JIDs: #{completed_jids.join(', ')}") Rails.logger.info("Updated state of locked merge jobs. JIDs: #{completed_jids.join(', ')}")
end end
......
---
title: Hashed Storage support for Attachments
merge_request: 15068
author:
type: added
---
title: Stop merge requests with thousands of commits from timing out
merge_request: 15063
author:
type: performance
---
title: Allow to disable the Performance Bar
merge_request: 15084
author:
type: fixed
---
title: Enable NestingDepth (level 6) on scss-lint
merge_request: 15073
author: Takuya Noguchi
type: other
---
title: Refresh open Issue and Merge Request project counter caches when re-opening.
merge_request: 15085
author: Rob Ede @robjtede
type: fixed
---
title: Only set Auto-Submitted header once for emails on push
merge_request:
author:
type: fixed
---
title: Fix namespacing for MergeWhenPipelineSucceedsService in MR API
merge_request:
author:
type: fixed
---
title: Adds callback functions for initial request in clusters page
merge_request:
author:
type: fixed
---
title: Add Packagist project service
merge_request: 14493
author: Matt Coleman
type: added
---
title: Fix 500 errors caused by empty diffs in some discussions
merge_request: 14945
author: Alexander Popov
type: fixed
---
title: Use project select dropdown not only as a combobutton
merge_request: 15043
author:
type: fixed
---
title: Fix widget of locked merge requests not being presented
merge_request:
author:
type: fixed
---
title: Allow files to uploaded in the multi-file editor
merge_request:
author:
type: added
---
title: Fix broken Members link when relative URL root paths are used
merge_request:
author:
type: fixed
---
title: Update i18n section in FE docs for marking and interpolation
merge_request:
author:
type: changed
...@@ -31,7 +31,10 @@ scope(constraints: { username: Gitlab::PathRegex.root_namespace_route_regex }) d ...@@ -31,7 +31,10 @@ scope(constraints: { username: Gitlab::PathRegex.root_namespace_route_regex }) d
get :contributed, as: :contributed_projects get :contributed, as: :contributed_projects
get :snippets get :snippets
get :exists get :exists
<<<<<<< HEAD
get :pipelines_quota get :pipelines_quota
=======
>>>>>>> bfb5107ae720232a15060ee55feba213ee7dd097
get '/', to: redirect('%{username}'), as: nil get '/', to: redirect('%{username}'), as: nil
end end
......
...@@ -28,6 +28,12 @@ will be allowed to display the Performance Bar. ...@@ -28,6 +28,12 @@ will be allowed to display the Performance Bar.
Make sure _Enable the Performance Bar_ is checked and hit Make sure _Enable the Performance Bar_ is checked and hit
**Save** to save the changes. **Save** to save the changes.
Once the Performance Bar is enabled, you will need to press the [<kbd>p</kbd> +
<kbd>b</kbd> keyboard shortcut](../../../workflow/shortcuts.md) to actually
display it.
You can toggle the Bar using the same shortcut.
--- ---
![GitLab Performance Bar Admin Settings](img/performance_bar_configuration_settings.png) ![GitLab Performance Bar Admin Settings](img/performance_bar_configuration_settings.png)
......
...@@ -45,6 +45,8 @@ In this experimental phase, only a few metrics are available: ...@@ -45,6 +45,8 @@ In this experimental phase, only a few metrics are available:
| redis_ping_success | Gauge | 9.4 | Whether or not the last redis ping succeeded | | redis_ping_success | Gauge | 9.4 | Whether or not the last redis ping succeeded |
| redis_ping_latency_seconds | Gauge | 9.4 | Round trip time of the redis ping | | redis_ping_latency_seconds | Gauge | 9.4 | Round trip time of the redis ping |
| user_session_logins_total | Counter | 9.4 | Counter of how many users have logged in | | user_session_logins_total | Counter | 9.4 | Counter of how many users have logged in |
| filesystem_circuitbreaker_latency_seconds | Histogram | 9.5 | Latency of the stat check the circuitbreaker uses to probe a shard |
| filesystem_circuitbreaker | Gauge | 9.5 | Wether or not the circuit for a certain shard is broken or not |
## Metrics shared directory ## Metrics shared directory
......
...@@ -27,6 +27,9 @@ of load in big installations, and can be even worst if they are using any type o ...@@ -27,6 +27,9 @@ of load in big installations, and can be even worst if they are using any type o
Last, for GitLab Geo, this storage type means we have to synchronize the disk state, replicate renames in the correct Last, for GitLab Geo, this storage type means we have to synchronize the disk state, replicate renames in the correct
order or we may end-up with wrong repository or missing data temporarily. order or we may end-up with wrong repository or missing data temporarily.
This pattern also exists in other objects stored in GitLab, like issue Attachments, GitLab Pages artifacts,
Docker Containers for the integrated Registry, etc.
## Hashed Storage ## Hashed Storage
Hashed Storage is the new storage behavior we are rolling out with 10.0. It's not enabled by default yet, but we Hashed Storage is the new storage behavior we are rolling out with 10.0. It's not enabled by default yet, but we
...@@ -67,3 +70,23 @@ To migrate your existing projects to the new storage type, check the specific [r ...@@ -67,3 +70,23 @@ To migrate your existing projects to the new storage type, check the specific [r
[ce-28283]: https://gitlab.com/gitlab-org/gitlab-ce/issues/28283 [ce-28283]: https://gitlab.com/gitlab-org/gitlab-ce/issues/28283
[rake tasks]: raketasks/storage.md#migrate-existing-projects-to-hashed-storage [rake tasks]: raketasks/storage.md#migrate-existing-projects-to-hashed-storage
[storage-paths]: repository_storage_types.md [storage-paths]: repository_storage_types.md
### Hashed Storage coverage
We are incrementally moving every storable object in GitLab to the Hashed Storage pattern. You can check the current
coverage status below.
Not that things stored in S3 compatible endpoint, will not have the downsides mentioned earlier, if they are not
prefixed with `#{namespace}/#{project_name}`, which is true for CI Cache and LFS Objects.
| Storable Object | Legacy Storage | Hashed Storage | S3 Compatible | GitLab Version |
| ----------------| -------------- | -------------- | ------------- | -------------- |
| Repository | Yes | Yes | - | 10.0 |
| Attachments | Yes | Yes | - | 10.2 |
| Avatars | Yes | No | - | - |
| Pages | Yes | No | - | - |
| Docker Registry | Yes | No | - | - |
| CI Build Logs | No | No | - | - |
| CI Artifacts | No | No | - | - |
| CI Cache | No | No | Yes | - |
| LFS Objects | Yes | No | Yes (EEP) | - |
...@@ -57,7 +57,7 @@ GET /projects/:id/pipelines/:pipeline_id ...@@ -57,7 +57,7 @@ GET /projects/:id/pipelines/:pipeline_id
| `pipeline_id` | integer | yes | The ID of a pipeline | | `pipeline_id` | integer | yes | The ID of a pipeline |
``` ```
curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/projects/1/pipeline/46" curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/projects/1/pipelines/46"
``` ```
Example of response Example of response
......
...@@ -582,6 +582,40 @@ Delete Mattermost slash command service for a project. ...@@ -582,6 +582,40 @@ Delete Mattermost slash command service for a project.
DELETE /projects/:id/services/mattermost-slash-commands DELETE /projects/:id/services/mattermost-slash-commands
``` ```
## Packagist
Update your project on Packagist, the main Composer repository, when commits or tags are pushed to GitLab.
### Create/Edit Packagist service
Set Packagist service for a project.
```
PUT /projects/:id/services/packagist
```
Parameters:
- `username` (**required**)
- `token` (**required**)
- `server` (optional)
### Delete Packagist service
Delete Packagist service for a project.
```
DELETE /projects/:id/services/packagist
```
### Get Packagist service settings
Get Packagist service settings for a project.
```
GET /projects/:id/services/packagist
```
## Pipeline-Emails ## Pipeline-Emails
Get emails for GitLab CI pipelines. Get emails for GitLab CI pipelines.
......
...@@ -106,6 +106,10 @@ Frontend security practices. ...@@ -106,6 +106,10 @@ Frontend security practices.
## [Accessibility](accessibility.md) ## [Accessibility](accessibility.md)
Our accessibility standards and resources. Our accessibility standards and resources.
## [Internationalization (i18n) and Translations](../i18n/externalization.md)
Frontend internationalization support is described in [this document](../i18n/).
The [externalization part of the guide](../i18n/externalization.md) explains the helpers/methods available.
[rails]: http://rubyonrails.org/ [rails]: http://rubyonrails.org/
[haml]: http://haml.info/ [haml]: http://haml.info/
......
...@@ -180,15 +180,43 @@ aren't in the message with id `1 pipeline`. ...@@ -180,15 +180,43 @@ aren't in the message with id `1 pipeline`.
## Working with special content ## Working with special content
### Just marking content for parsing
- In Ruby/HAML:
```ruby
_('Subscribe')
```
- In JavaScript:
```js
import { __ } from '../../../locale';
const label = __('Subscribe');
```
Sometimes there are some dynamic translations that can't be found by the
parser when running `bundle exec rake gettext:find`. For these scenarios you can
use the [`_N` method](https://github.com/grosser/gettext_i18n_rails/blob/c09e38d481e0899ca7d3fc01786834fa8e7aab97/Readme.md#unfound-translations-with-rake-gettextfind).
There is also and alternative method to [translate messages from validation errors](https://github.com/grosser/gettext_i18n_rails/blob/c09e38d481e0899ca7d3fc01786834fa8e7aab97/Readme.md#option-a).
### Interpolation ### Interpolation
- In Ruby/HAML: - In Ruby/HAML:
```ruby ```ruby
_("Hello %{name}") % { name: 'Joe' } _("Hello %{name}") % { name: 'Joe' } => 'Hello Joe'
``` ```
- In JavaScript: Not supported at this moment. - In JavaScript:
```js
import { __, sprintf } from '../../../locale';
sprintf(__('Hello %{username}'), { username: 'Joe' }) => 'Hello Joe'
```
### Plurals ### Plurals
...@@ -234,14 +262,6 @@ Sometimes you need to add some context to the text that you want to translate ...@@ -234,14 +262,6 @@ Sometimes you need to add some context to the text that you want to translate
s__('OpenedNDaysAgo|Opened') s__('OpenedNDaysAgo|Opened')
``` ```
### Just marking content for parsing
Sometimes there are some dynamic translations that can't be found by the
parser when running `bundle exec rake gettext:find`. For these scenarios you can
use the [`_N` method](https://github.com/grosser/gettext_i18n_rails/blob/c09e38d481e0899ca7d3fc01786834fa8e7aab97/Readme.md#unfound-translations-with-rake-gettextfind).
There is also and alternative method to [translate messages from validation errors](https://github.com/grosser/gettext_i18n_rails/blob/c09e38d481e0899ca7d3fc01786834fa8e7aab97/Readme.md#option-a).
## Adding a new language ## Adding a new language
Let's suppose you want to add translations for a new language, let's say French. Let's suppose you want to add translations for a new language, let's say French.
......
...@@ -136,25 +136,33 @@ In the example below we use Amazon S3 for storage, but Fog also lets you use ...@@ -136,25 +136,33 @@ In the example below we use Amazon S3 for storage, but Fog also lets you use
for AWS, Google, OpenStack Swift, Rackspace and Aliyun as well. A local driver is for AWS, Google, OpenStack Swift, Rackspace and Aliyun as well. A local driver is
[also available](#uploading-to-locally-mounted-shares). [also available](#uploading-to-locally-mounted-shares).
For omnibus packages, add the following to `/etc/gitlab/gitlab.rb`: #### Using Amazon S3
```ruby For Omnibus GitLab packages:
gitlab_rails['backup_upload_connection'] = {
1. Add the following to `/etc/gitlab/gitlab.rb`:
```ruby
gitlab_rails['backup_upload_connection'] = {
'provider' => 'AWS', 'provider' => 'AWS',
'region' => 'eu-west-1', 'region' => 'eu-west-1',
'aws_access_key_id' => 'AKIAKIAKI', 'aws_access_key_id' => 'AKIAKIAKI',
'aws_secret_access_key' => 'secret123' 'aws_secret_access_key' => 'secret123'
# If using an IAM Profile, don't configure aws_access_key_id & aws_secret_access_key # If using an IAM Profile, don't configure aws_access_key_id & aws_secret_access_key
# 'use_iam_profile' => true # 'use_iam_profile' => true
} }
gitlab_rails['backup_upload_remote_directory'] = 'my.s3.bucket' gitlab_rails['backup_upload_remote_directory'] = 'my.s3.bucket'
``` ```
1. [Reconfigure GitLab] for the changes to take effect
Make sure to run `sudo gitlab-ctl reconfigure` after editing `/etc/gitlab/gitlab.rb` to reflect the changes. ---
For installations from source: For installations from source:
```yaml 1. Edit `home/git/gitlab/config/gitlab.yml`:
```yaml
backup: backup:
# snip # snip
upload: upload:
...@@ -173,7 +181,9 @@ For installations from source: ...@@ -173,7 +181,9 @@ For installations from source:
# encryption: 'AES256' # encryption: 'AES256'
# Specifies Amazon S3 storage class to use for backups, this is optional # Specifies Amazon S3 storage class to use for backups, this is optional
# storage_class: 'STANDARD' # storage_class: 'STANDARD'
``` ```
1. [Restart GitLab] for the changes to take effect
If you are uploading your backups to S3 you will probably want to create a new If you are uploading your backups to S3 you will probably want to create a new
IAM user with restricted access rights. To give the upload user access only for IAM user with restricted access rights. To give the upload user access only for
...@@ -226,6 +236,50 @@ with the name of your bucket: ...@@ -226,6 +236,50 @@ with the name of your bucket:
} }
``` ```
#### Using Google Cloud Storage
If you want to use Google Cloud Storage to save backups, you'll have to create
an access key from the Google console first:
1. Go to the storage settings page https://console.cloud.google.com/storage/settings
1. Select "Interoperability" and create an access key
1. Make note of the "Access Key" and "Secret" and replace them in the
configurations below
1. Make sure you already have a bucket created
For Omnibus GitLab packages:
1. Edit `/etc/gitlab/gitlab.rb`:
```ruby
gitlab_rails['backup_upload_connection'] = {
'provider' => 'Google',
'google_storage_access_key_id' => 'Access Key',
'google_storage_secret_access_key' => 'Secret'
}
gitlab_rails['backup_upload_remote_directory'] = 'my.google.bucket'
```
1. [Reconfigure GitLab] for the changes to take effect
---
For installations from source:
1. Edit `home/git/gitlab/config/gitlab.yml`:
```yaml
backup:
upload:
connection:
provider: 'Google'
google_storage_access_key_id: 'Access Key'
google_storage_secret_access_key: 'Secret'
remote_directory: 'my.google.bucket'
```
1. [Restart GitLab] for the changes to take effect
### Uploading to locally mounted shares ### Uploading to locally mounted shares
You may also send backups to a mounted share (`NFS` / `CIFS` / `SMB` / etc.) by You may also send backups to a mounted share (`NFS` / `CIFS` / `SMB` / etc.) by
...@@ -554,3 +608,6 @@ The rake task runs this as the `gitlab` user which does not have the superuser a ...@@ -554,3 +608,6 @@ The rake task runs this as the `gitlab` user which does not have the superuser a
Those objects have no influence on the database backup/restore but they give this annoying warning. Those objects have no influence on the database backup/restore but they give this annoying warning.
For more information see similar questions on postgresql issue tracker[here](http://www.postgresql.org/message-id/201110220712.30886.adrian.klaver@gmail.com) and [here](http://www.postgresql.org/message-id/2039.1177339749@sss.pgh.pa.us) as well as [stack overflow](http://stackoverflow.com/questions/4368789/error-must-be-owner-of-language-plpgsql). For more information see similar questions on postgresql issue tracker[here](http://www.postgresql.org/message-id/201110220712.30886.adrian.klaver@gmail.com) and [here](http://www.postgresql.org/message-id/2039.1177339749@sss.pgh.pa.us) as well as [stack overflow](http://stackoverflow.com/questions/4368789/error-must-be-owner-of-language-plpgsql).
[reconfigure GitLab]: ../administration/restart_gitlab.md#omnibus-gitlab-reconfigure
[restart GitLab]: ../administration/restart_gitlab.md#installations-from-source
...@@ -44,6 +44,7 @@ Click on the service links to see further configuration instructions and details ...@@ -44,6 +44,7 @@ Click on the service links to see further configuration instructions and details
| [Mattermost slash commands](mattermost_slash_commands.md) | Mattermost chat and ChatOps slash commands | | [Mattermost slash commands](mattermost_slash_commands.md) | Mattermost chat and ChatOps slash commands |
| [Mattermost Notifications](mattermost.md) | Receive event notifications in Mattermost | | [Mattermost Notifications](mattermost.md) | Receive event notifications in Mattermost |
| [Microsoft teams](microsoft_teams.md) | Receive notifications for actions that happen on GitLab into a room on Microsoft Teams using Office 365 Connectors | | [Microsoft teams](microsoft_teams.md) | Receive notifications for actions that happen on GitLab into a room on Microsoft Teams using Office 365 Connectors |
| Packagist | Update your project on Packagist, the main Composer repository |
| Pipelines emails | Email the pipeline status to a list of recipients | | Pipelines emails | Email the pipeline status to a list of recipients |
| [Slack Notifications](slack.md) | Send GitLab events (e.g. issue created) to Slack as notifications | | [Slack Notifications](slack.md) | Send GitLab events (e.g. issue created) to Slack as notifications |
| [Slack slash commands](slack_slash_commands.md) | Use slash commands in Slack to control GitLab | | [Slack slash commands](slack_slash_commands.md) | Use slash commands in Slack to control GitLab |
......
...@@ -9,7 +9,7 @@ You can see GitLab's keyboard shortcuts by using 'shift + ?' ...@@ -9,7 +9,7 @@ You can see GitLab's keyboard shortcuts by using 'shift + ?'
| <kbd>n</kbd> | Main navigation | | <kbd>n</kbd> | Main navigation |
| <kbd>s</kbd> | Focus search | | <kbd>s</kbd> | Focus search |
| <kbd>f</kbd> | Focus filter | | <kbd>f</kbd> | Focus filter |
| <kbd>p b</kbd> | Show/hide the Performance Bar | | <kbd>p</kbd> + <kbd>b</kbd> | Show/hide the Performance Bar |
| <kbd>?</kbd> | Show/hide this dialog | | <kbd>?</kbd> | Show/hide this dialog |
| <kbd></kbd> + <kbd>shift</kbd> + <kbd>p</kbd> | Toggle markdown preview | | <kbd></kbd> + <kbd>shift</kbd> + <kbd>p</kbd> | Toggle markdown preview |
| <kbd></kbd> | Edit last comment (when focused on an empty textarea) | | <kbd></kbd> | Edit last comment (when focused on an empty textarea) |
......
class AdditionalEmailHeadersInterceptor class AdditionalEmailHeadersInterceptor
def self.delivering_email(message) def self.delivering_email(message)
message.headers( message.header['Auto-Submitted'] ||= 'auto-generated'
'Auto-Submitted' => 'auto-generated', message.header['X-Auto-Response-Suppress'] ||= 'All'
'X-Auto-Response-Suppress' => 'All'
)
end end
end end
...@@ -317,7 +317,7 @@ module API ...@@ -317,7 +317,7 @@ module API
unauthorized! unless merge_request.can_cancel_merge_when_pipeline_succeeds?(current_user) unauthorized! unless merge_request.can_cancel_merge_when_pipeline_succeeds?(current_user)
::MergeRequest::MergeWhenPipelineSucceedsService ::MergeRequests::MergeWhenPipelineSucceedsService
.new(merge_request.target_project, current_user) .new(merge_request.target_project, current_user)
.cancel(merge_request) .cancel(merge_request)
end end
......
...@@ -373,6 +373,26 @@ module API ...@@ -373,6 +373,26 @@ module API
desc: 'The Slack token' desc: 'The Slack token'
} }
], ],
'packagist' => [
{
required: true,
name: :username,
type: String,
desc: 'The username'
},
{
required: true,
name: :token,
type: String,
desc: 'The Packagist API token'
},
{
required: false,
name: :server,
type: String,
desc: 'The server'
}
],
'pipelines-email' => [ 'pipelines-email' => [
{ {
required: true, required: true,
...@@ -597,6 +617,7 @@ module API ...@@ -597,6 +617,7 @@ module API
KubernetesService, KubernetesService,
MattermostSlashCommandsService, MattermostSlashCommandsService,
SlackSlashCommandsService, SlackSlashCommandsService,
PackagistService,
PipelinesEmailService, PipelinesEmailService,
PivotaltrackerService, PivotaltrackerService,
PrometheusService, PrometheusService,
......
...@@ -394,6 +394,26 @@ module API ...@@ -394,6 +394,26 @@ module API
desc: 'The Slack token' desc: 'The Slack token'
} }
], ],
'packagist' => [
{
required: true,
name: :username,
type: String,
desc: 'The username'
},
{
required: true,
name: :token,
type: String,
desc: 'The Packagist API token'
},
{
required: false,
name: :server,
type: String,
desc: 'The server'
}
],
'pipelines-email' => [ 'pipelines-email' => [
{ {
required: true, required: true,
......
...@@ -94,7 +94,9 @@ module Gitlab ...@@ -94,7 +94,9 @@ module Gitlab
end end
def diff_file(repository) def diff_file(repository)
@diff_file ||= begin return @diff_file if defined?(@diff_file)
@diff_file = begin
if RequestStore.active? if RequestStore.active?
key = { key = {
project_id: repository.project.id, project_id: repository.project.id,
...@@ -122,8 +124,8 @@ module Gitlab ...@@ -122,8 +124,8 @@ module Gitlab
def find_diff_file(repository) def find_diff_file(repository)
return unless diff_refs.complete? return unless diff_refs.complete?
return unless comparison = diff_refs.compare_in(repository.project)
diff_refs.compare_in(repository.project).diffs(paths: paths, expanded: true).diff_files.first comparison.diffs(paths: paths, expanded: true).diff_files.first
end end
def get_formatter_class(type) def get_formatter_class(type)
......
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
"webpack-prod": "NODE_ENV=production webpack --config config/webpack.config.js" "webpack-prod": "NODE_ENV=production webpack --config config/webpack.config.js"
}, },
"dependencies": { "dependencies": {
"autosize": "^4.0.0",
"axios": "^0.16.2", "axios": "^0.16.2",
"babel-core": "^6.22.1", "babel-core": "^6.22.1",
"babel-eslint": "^7.2.1", "babel-eslint": "^7.2.1",
......
...@@ -119,7 +119,7 @@ FactoryGirl.define do ...@@ -119,7 +119,7 @@ FactoryGirl.define do
finished_at nil finished_at nil
end end
factory :ci_build_tag do trait :tag do
tag true tag true
end end
......
...@@ -118,6 +118,29 @@ feature 'Admin updates settings' do ...@@ -118,6 +118,29 @@ feature 'Admin updates settings' do
expect(find_field('ED25519 SSH keys').value).to eq(forbidden) expect(find_field('ED25519 SSH keys').value).to eq(forbidden)
end end
scenario 'Change Performance Bar settings' do
group = create(:group)
check 'Enable the Performance Bar'
fill_in 'Allowed group', with: group.path
click_on 'Save'
expect(page).to have_content 'Application settings saved successfully'
expect(find_field('Enable the Performance Bar')).to be_checked
expect(find_field('Allowed group').value).to eq group.path
uncheck 'Enable the Performance Bar'
click_on 'Save'
expect(page).to have_content 'Application settings saved successfully'
expect(find_field('Enable the Performance Bar')).not_to be_checked
expect(find_field('Allowed group').value).to be_nil
end
def check_all_events def check_all_events
page.check('Active') page.check('Active')
page.check('Push') page.check('Push')
......
...@@ -19,9 +19,9 @@ feature 'Group milestones', :js do ...@@ -19,9 +19,9 @@ feature 'Group milestones', :js do
end end
it 'renders description preview' do it 'renders description preview' do
form = find('.gfm-form') description = find('.note-textarea')
form.fill_in(:milestone_description, with: '') description.native.send_keys('')
click_link('Preview') click_link('Preview')
...@@ -31,7 +31,7 @@ feature 'Group milestones', :js do ...@@ -31,7 +31,7 @@ feature 'Group milestones', :js do
click_link('Write') click_link('Write')
form.fill_in(:milestone_description, with: ':+1: Nice') description.native.send_keys(':+1: Nice')
click_link('Preview') click_link('Preview')
...@@ -51,6 +51,13 @@ feature 'Group milestones', :js do ...@@ -51,6 +51,13 @@ feature 'Group milestones', :js do
expect(find('.start_date')).to have_content(Date.today.at_beginning_of_month.strftime('%b %-d, %Y')) expect(find('.start_date')).to have_content(Date.today.at_beginning_of_month.strftime('%b %-d, %Y'))
end end
it 'description input does not support autocomplete' do
description = find('.note-textarea')
description.native.send_keys('!')
expect(page).not_to have_selector('.atwho-view')
end
end end
context 'milestones list' do context 'milestones list' do
......
require 'spec_helper'
describe 'User activates Packagist' do
let(:project) { create(:project) }
let(:user) { create(:user) }
before do
project.add_master(user)
sign_in(user)
visit(project_settings_integrations_path(project))
click_link('Packagist')
end
it 'activates service' do
check('Active')
fill_in('Username', with: 'theUser')
fill_in('Token', with: 'verySecret')
click_button('Save')
expect(page).to have_content('Packagist activated.')
end
end
...@@ -21,5 +21,6 @@ describe 'User views services' do ...@@ -21,5 +21,6 @@ describe 'User views services' do
expect(page).to have_content('JetBrains TeamCity') expect(page).to have_content('JetBrains TeamCity')
expect(page).to have_content('Asana') expect(page).to have_content('Asana')
expect(page).to have_content('Irker (IRC gateway)') expect(page).to have_content('Irker (IRC gateway)')
expect(page).to have_content('Packagist')
end end
end end
require 'spec_helper'
feature 'Multi-file editor upload file', :js do
include WaitForRequests
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:txt_file) { File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt') }
let(:img_file) { File.join(Rails.root, 'spec', 'fixtures', 'dk.png') }
before do
project.add_master(user)
sign_in(user)
page.driver.set_cookie('new_repo', 'true')
visit project_tree_path(project, :master)
wait_for_requests
end
it 'uploads text file' do
find('.add-to-tree').click
# make the field visible so capybara can use it
execute_script('document.querySelector("#file-upload").classList.remove("hidden")')
attach_file('file-upload', txt_file)
find('.add-to-tree').click
expect(page).to have_selector('.repo-tab', text: 'doc_sample.txt')
expect(page).to have_content(File.open(txt_file, &:readline))
end
it 'uploads image file' do
find('.add-to-tree').click
# make the field visible so capybara can use it
execute_script('document.querySelector("#file-upload").classList.remove("hidden")')
attach_file('file-upload', img_file)
find('.add-to-tree').click
expect(page).to have_selector('.repo-tab', text: 'dk.png')
expect(page).not_to have_selector('.monaco-editor')
expect(page).to have_content('The source could not be displayed for this temporary file.')
end
end
import autosize from 'vendor/autosize'; import Autosize from 'autosize';
import GLForm from '~/gl_form'; import GLForm from '~/gl_form';
import '~/lib/utils/text_utility'; import '~/lib/utils/text_utility';
import '~/lib/utils/common_utils'; import '~/lib/utils/common_utils';
window.autosize = autosize; window.autosize = Autosize;
describe('GLForm', () => { describe('GLForm', () => {
describe('when instantiated', function () { describe('when instantiated', function () {
......
/* global Notes */ /* global Notes */
import 'vendor/autosize'; import 'autosize';
import '~/gl_form'; import '~/gl_form';
import '~/lib/utils/text_utility'; import '~/lib/utils/text_utility';
import '~/render_gfm'; import '~/render_gfm';
......
import Vue from 'vue'; import Vue from 'vue';
import autosize from 'vendor/autosize'; import Autosize from 'autosize';
import store from '~/notes/stores'; import store from '~/notes/stores';
import issueCommentForm from '~/notes/components/issue_comment_form.vue'; import issueCommentForm from '~/notes/components/issue_comment_form.vue';
import { loggedOutIssueData, notesDataMock, userDataMock, issueDataMock } from '../mock_data'; import { loggedOutIssueData, notesDataMock, userDataMock, issueDataMock } from '../mock_data';
...@@ -97,14 +97,14 @@ describe('issue_comment_form component', () => { ...@@ -97,14 +97,14 @@ describe('issue_comment_form component', () => {
}); });
it('should resize textarea after note discarded', (done) => { it('should resize textarea after note discarded', (done) => {
spyOn(autosize, 'update'); spyOn(Autosize, 'update');
spyOn(vm, 'discard').and.callThrough(); spyOn(vm, 'discard').and.callThrough();
vm.note = 'foo'; vm.note = 'foo';
vm.discard(); vm.discard();
Vue.nextTick(() => { Vue.nextTick(() => {
expect(autosize.update).toHaveBeenCalled(); expect(Autosize.update).toHaveBeenCalled();
done(); done();
}); });
}); });
......
/* eslint-disable space-before-function-paren, no-unused-expressions, no-var, object-shorthand, comma-dangle, max-len */ /* eslint-disable space-before-function-paren, no-unused-expressions, no-var, object-shorthand, comma-dangle, max-len */
/* global Notes */ /* global Notes */
import 'vendor/autosize'; import 'autosize';
import '~/gl_form'; import '~/gl_form';
import '~/lib/utils/text_utility'; import '~/lib/utils/text_utility';
import '~/render_gfm'; import '~/render_gfm';
......
...@@ -74,25 +74,38 @@ describe('new dropdown component', () => { ...@@ -74,25 +74,38 @@ describe('new dropdown component', () => {
it('closes modal after creating file', () => { it('closes modal after creating file', () => {
vm.openModal = true; vm.openModal = true;
eventHub.$emit('createNewEntry', 'testing', type); eventHub.$emit('createNewEntry', {
name: 'testing',
type,
toggleModal: true,
});
expect(vm.openModal).toBeFalsy(); expect(vm.openModal).toBeFalsy();
}); });
it('sets editMode to true', () => { it('sets editMode to true', () => {
eventHub.$emit('createNewEntry', 'testing', type); eventHub.$emit('createNewEntry', {
name: 'testing',
type,
});
expect(RepoStore.editMode).toBeTruthy(); expect(RepoStore.editMode).toBeTruthy();
}); });
it('toggles blob view', () => { it('toggles blob view', () => {
eventHub.$emit('createNewEntry', 'testing', type); eventHub.$emit('createNewEntry', {
name: 'testing',
type,
});
expect(RepoStore.isPreviewView()).toBeFalsy(); expect(RepoStore.isPreviewView()).toBeFalsy();
}); });
it('adds file into activeFiles', () => { it('adds file into activeFiles', () => {
eventHub.$emit('createNewEntry', 'testing', type); eventHub.$emit('createNewEntry', {
name: 'testing',
type,
});
expect(RepoStore.openedFiles.length).toBe(1); expect(RepoStore.openedFiles.length).toBe(1);
}); });
...@@ -100,7 +113,10 @@ describe('new dropdown component', () => { ...@@ -100,7 +113,10 @@ describe('new dropdown component', () => {
it(`creates ${type} in the current stores path`, () => { it(`creates ${type} in the current stores path`, () => {
RepoStore.path = 'testing'; RepoStore.path = 'testing';
eventHub.$emit('createNewEntry', 'testing/app', type); eventHub.$emit('createNewEntry', {
name: 'testing/app',
type,
});
expect(RepoStore.files[0].path).toBe('testing/app'); expect(RepoStore.files[0].path).toBe('testing/app');
expect(RepoStore.files[0].name).toBe('app'); expect(RepoStore.files[0].name).toBe('app');
...@@ -116,7 +132,10 @@ describe('new dropdown component', () => { ...@@ -116,7 +132,10 @@ describe('new dropdown component', () => {
describe('file', () => { describe('file', () => {
it('creates new file', () => { it('creates new file', () => {
eventHub.$emit('createNewEntry', 'testing', 'blob'); eventHub.$emit('createNewEntry', {
name: 'testing',
type: 'blob',
});
expect(RepoStore.files.length).toBe(1); expect(RepoStore.files.length).toBe(1);
expect(RepoStore.files[0].name).toBe('testing'); expect(RepoStore.files[0].name).toBe('testing');
...@@ -129,7 +148,10 @@ describe('new dropdown component', () => { ...@@ -129,7 +148,10 @@ describe('new dropdown component', () => {
name: 'testing', name: 'testing',
})); }));
eventHub.$emit('createNewEntry', 'testing', 'blob'); eventHub.$emit('createNewEntry', {
name: 'testing',
type: 'blob',
});
expect(RepoStore.files.length).toBe(1); expect(RepoStore.files.length).toBe(1);
expect(RepoStore.files[0].name).toBe('testing'); expect(RepoStore.files[0].name).toBe('testing');
...@@ -140,7 +162,10 @@ describe('new dropdown component', () => { ...@@ -140,7 +162,10 @@ describe('new dropdown component', () => {
describe('tree', () => { describe('tree', () => {
it('creates new tree', () => { it('creates new tree', () => {
eventHub.$emit('createNewEntry', 'testing', 'tree'); eventHub.$emit('createNewEntry', {
name: 'testing',
type: 'tree',
});
expect(RepoStore.files.length).toBe(1); expect(RepoStore.files.length).toBe(1);
expect(RepoStore.files[0].name).toBe('testing'); expect(RepoStore.files[0].name).toBe('testing');
...@@ -151,7 +176,10 @@ describe('new dropdown component', () => { ...@@ -151,7 +176,10 @@ describe('new dropdown component', () => {
}); });
it('creates multiple trees when entryName has slashes', () => { it('creates multiple trees when entryName has slashes', () => {
eventHub.$emit('createNewEntry', 'app/test', 'tree'); eventHub.$emit('createNewEntry', {
name: 'app/test',
type: 'tree',
});
expect(RepoStore.files.length).toBe(1); expect(RepoStore.files.length).toBe(1);
expect(RepoStore.files[0].name).toBe('app'); expect(RepoStore.files[0].name).toBe('app');
...@@ -164,7 +192,10 @@ describe('new dropdown component', () => { ...@@ -164,7 +192,10 @@ describe('new dropdown component', () => {
name: 'app', name: 'app',
})); }));
eventHub.$emit('createNewEntry', 'app/test', 'tree'); eventHub.$emit('createNewEntry', {
name: 'app/test',
type: 'tree',
});
expect(RepoStore.files.length).toBe(1); expect(RepoStore.files.length).toBe(1);
expect(RepoStore.files[0].name).toBe('app'); expect(RepoStore.files[0].name).toBe('app');
...@@ -179,7 +210,10 @@ describe('new dropdown component', () => { ...@@ -179,7 +210,10 @@ describe('new dropdown component', () => {
name: 'app', name: 'app',
})); }));
eventHub.$emit('createNewEntry', 'app', 'tree'); eventHub.$emit('createNewEntry', {
name: 'app',
type: 'tree',
});
expect(RepoStore.files.length).toBe(1); expect(RepoStore.files.length).toBe(1);
expect(RepoStore.files[0].name).toBe('app'); expect(RepoStore.files[0].name).toBe('app');
......
...@@ -70,7 +70,11 @@ describe('new file modal component', () => { ...@@ -70,7 +70,11 @@ describe('new file modal component', () => {
vm.createEntryInStore(); vm.createEntryInStore();
expect(eventHub.$emit).toHaveBeenCalledWith('createNewEntry', 'testing', 'tree'); expect(eventHub.$emit).toHaveBeenCalledWith('createNewEntry', {
name: 'testing',
type: 'tree',
toggleModal: true,
});
}); });
}); });
}); });
import Vue from 'vue';
import upload from '~/repo/components/new_dropdown/upload.vue';
import eventHub from '~/repo/event_hub';
import createComponent from '../../../helpers/vue_mount_component_helper';
describe('new dropdown upload', () => {
let vm;
beforeEach(() => {
const Component = Vue.extend(upload);
vm = createComponent(Component, {
currentPath: '',
});
});
afterEach(() => {
vm.$destroy();
});
describe('readFile', () => {
beforeEach(() => {
spyOn(FileReader.prototype, 'readAsText');
spyOn(FileReader.prototype, 'readAsDataURL');
});
it('calls readAsText for text files', () => {
const file = {
type: 'text/html',
};
vm.readFile(file);
expect(FileReader.prototype.readAsText).toHaveBeenCalledWith(file);
});
it('calls readAsDataURL for non-text files', () => {
const file = {
type: 'images/png',
};
vm.readFile(file);
expect(FileReader.prototype.readAsDataURL).toHaveBeenCalledWith(file);
});
});
describe('createFile', () => {
const target = {
result: 'content',
};
const binaryTarget = {
result: 'base64,base64content',
};
const file = {
name: 'file',
};
beforeEach(() => {
spyOn(eventHub, '$emit');
});
it('emits createNewEntry event', () => {
vm.createFile(target, file, true);
expect(eventHub.$emit).toHaveBeenCalledWith('createNewEntry', {
name: 'file',
type: 'blob',
content: 'content',
toggleModal: false,
base64: false,
}, true);
});
it('createNewEntry event name contains current path', () => {
vm.currentPath = 'testing';
vm.createFile(target, file, true);
expect(eventHub.$emit).toHaveBeenCalledWith('createNewEntry', {
name: 'testing/file',
type: 'blob',
content: 'content',
toggleModal: false,
base64: false,
}, true);
});
it('splits content on base64 if binary', () => {
vm.createFile(binaryTarget, file, false);
expect(eventHub.$emit).toHaveBeenCalledWith('createNewEntry', {
name: 'file',
type: 'blob',
content: 'base64content',
toggleModal: false,
base64: true,
}, false);
});
});
});
require 'spec_helper' require 'spec_helper'
describe AdditionalEmailHeadersInterceptor do describe AdditionalEmailHeadersInterceptor do
it 'adds Auto-Submitted header' do let(:mail) do
mail = ActionMailer::Base.mail(to: 'test@mail.com', from: 'info@mail.com', body: 'hello').deliver ActionMailer::Base.mail(to: 'test@mail.com', from: 'info@mail.com', body: 'hello')
end
before do
mail.deliver_now
end
it 'adds Auto-Submitted header' do
expect(mail.header['To'].value).to eq('test@mail.com') expect(mail.header['To'].value).to eq('test@mail.com')
expect(mail.header['From'].value).to eq('info@mail.com') expect(mail.header['From'].value).to eq('info@mail.com')
expect(mail.header['Auto-Submitted'].value).to eq('auto-generated') expect(mail.header['Auto-Submitted'].value).to eq('auto-generated')
expect(mail.header['X-Auto-Response-Suppress'].value).to eq('All') expect(mail.header['X-Auto-Response-Suppress'].value).to eq('All')
end end
context 'when the same mail object is sent twice' do
before do
mail.deliver_now
end
it 'does not add the Auto-Submitted header twice' do
expect(mail.header['Auto-Submitted'].value).to eq('auto-generated')
expect(mail.header['X-Auto-Response-Suppress'].value).to eq('All')
end
end
end end
...@@ -364,6 +364,43 @@ describe Gitlab::Diff::Position do ...@@ -364,6 +364,43 @@ describe Gitlab::Diff::Position do
end end
end end
describe "position for a missing ref" do
let(:diff_refs) do
Gitlab::Diff::DiffRefs.new(
base_sha: "not_existing_sha",
head_sha: "existing_sha"
)
end
subject do
described_class.new(
old_path: "files/ruby/feature.rb",
new_path: "files/ruby/feature.rb",
old_line: 3,
new_line: nil,
diff_refs: diff_refs
)
end
describe "#diff_file" do
it "does not raise exception" do
expect { subject.diff_file(project.repository) }.not_to raise_error
end
end
describe "#diff_line" do
it "does not raise exception" do
expect { subject.diff_line(project.repository) }.not_to raise_error
end
end
describe "#line_code" do
it "does not raise exception" do
expect { subject.line_code(project.repository) }.not_to raise_error
end
end
end
describe "position for a file in the initial commit" do describe "position for a file in the initial commit" do
let(:commit) { project.commit("1a0b36b3cdad1d2ee32457c102a8c0b7056fa863") } let(:commit) { project.commit("1a0b36b3cdad1d2ee32457c102a8c0b7056fa863") }
......
...@@ -208,6 +208,7 @@ project: ...@@ -208,6 +208,7 @@ project:
- slack_slash_commands_service - slack_slash_commands_service
- gitlab_slack_application_service - gitlab_slack_application_service
- irker_service - irker_service
- packagist_service
- pivotaltracker_service - pivotaltracker_service
- prometheus_service - prometheus_service
- hipchat_service - hipchat_service
......
...@@ -1887,6 +1887,12 @@ describe MergeRequest do ...@@ -1887,6 +1887,12 @@ describe MergeRequest do
end end
describe '#merge_ongoing?' do describe '#merge_ongoing?' do
it 'returns true when the merge request is locked' do
merge_request = build_stubbed(:merge_request, state: :locked)
expect(merge_request.merge_ongoing?).to be(true)
end
it 'returns true when merge_id, MR is not merged and it has no running job' do it 'returns true when merge_id, MR is not merged and it has no running job' do
merge_request = build_stubbed(:merge_request, state: :open, merge_jid: 'foo') merge_request = build_stubbed(:merge_request, state: :open, merge_jid: 'foo')
allow(Gitlab::SidekiqStatus).to receive(:running?).with('foo') { true } allow(Gitlab::SidekiqStatus).to receive(:running?).with('foo') { true }
......
require 'spec_helper'
describe PackagistService do
describe "Associations" do
it { is_expected.to belong_to :project }
it { is_expected.to have_one :service_hook }
end
let(:project) { create(:project) }
let(:packagist_server) { 'https://packagist.example.com' }
let(:packagist_username) { 'theUser' }
let(:packagist_token) { 'verySecret' }
let(:packagist_hook_url) do
"#{packagist_server}/api/update-package?username=#{packagist_username}&apiToken=#{packagist_token}"
end
let(:packagist_params) do
{
active: true,
project: project,
properties: {
username: packagist_username,
token: packagist_token,
server: packagist_server
}
}
end
describe '#execute' do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:push_sample_data) { Gitlab::DataBuilder::Push.build_sample(project, user) }
let(:packagist_service) { described_class.create(packagist_params) }
before do
stub_request(:post, packagist_hook_url)
end
it 'calls Packagist API' do
packagist_service.execute(push_sample_data)
expect(a_request(:post, packagist_hook_url)).to have_been_made.once
end
end
end
...@@ -24,6 +24,7 @@ describe Project do ...@@ -24,6 +24,7 @@ describe Project do
it { is_expected.to have_one(:slack_service) } it { is_expected.to have_one(:slack_service) }
it { is_expected.to have_one(:microsoft_teams_service) } it { is_expected.to have_one(:microsoft_teams_service) }
it { is_expected.to have_one(:mattermost_service) } it { is_expected.to have_one(:mattermost_service) }
it { is_expected.to have_one(:packagist_service) }
it { is_expected.to have_one(:pushover_service) } it { is_expected.to have_one(:pushover_service) }
it { is_expected.to have_one(:asana_service) } it { is_expected.to have_one(:asana_service) }
it { is_expected.to have_many(:boards) } it { is_expected.to have_many(:boards) }
...@@ -2886,6 +2887,7 @@ describe Project do ...@@ -2886,6 +2887,7 @@ describe Project do
context 'legacy storage' do context 'legacy storage' do
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
let(:gitlab_shell) { Gitlab::Shell.new } let(:gitlab_shell) { Gitlab::Shell.new }
let(:project_storage) { project.send(:storage) }
before do before do
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell) allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
...@@ -2927,7 +2929,7 @@ describe Project do ...@@ -2927,7 +2929,7 @@ describe Project do
describe '#hashed_storage?' do describe '#hashed_storage?' do
it 'returns false' do it 'returns false' do
expect(project.hashed_storage?).to be_falsey expect(project.hashed_storage?(:repository)).to be_falsey
end end
end end
...@@ -2986,6 +2988,30 @@ describe Project do ...@@ -2986,6 +2988,30 @@ describe Project do
it { expect { subject }.to raise_error(StandardError) } it { expect { subject }.to raise_error(StandardError) }
end end
context 'gitlab pages' do
before do
expect(project_storage).to receive(:rename_repo) { true }
end
it 'moves pages folder to new location' do
expect_any_instance_of(Gitlab::PagesTransfer).to receive(:rename_project)
project.rename_repo
end
end
context 'attachments' do
before do
expect(project_storage).to receive(:rename_repo) { true }
end
it 'moves uploads folder to new location' do
expect_any_instance_of(Gitlab::UploadsTransfer).to receive(:rename_project)
project.rename_repo
end
end
end end
describe '#pages_path' do describe '#pages_path' do
...@@ -3045,8 +3071,14 @@ describe Project do ...@@ -3045,8 +3071,14 @@ describe Project do
end end
describe '#hashed_storage?' do describe '#hashed_storage?' do
it 'returns true' do it 'returns true if rolled out' do
expect(project.hashed_storage?).to be_truthy expect(project.hashed_storage?(:attachments)).to be_truthy
end
it 'returns false when not rolled out yet' do
project.storage_version = 1
expect(project.hashed_storage?(:attachments)).to be_falsey
end end
end end
...@@ -3089,10 +3121,6 @@ describe Project do ...@@ -3089,10 +3121,6 @@ describe Project do
.to receive(:execute_hooks_for) .to receive(:execute_hooks_for)
.with(project, :rename) .with(project, :rename)
expect_any_instance_of(Gitlab::UploadsTransfer)
.to receive(:rename_project)
.with('foo', project.path, project.namespace.full_path)
expect(project).to receive(:expire_caches_before_rename) expect(project).to receive(:expire_caches_before_rename)
expect(project).to receive(:expires_full_path_cache) expect(project).to receive(:expires_full_path_cache)
...@@ -3113,6 +3141,32 @@ describe Project do ...@@ -3113,6 +3141,32 @@ describe Project do
it { expect { subject }.to raise_error(StandardError) } it { expect { subject }.to raise_error(StandardError) }
end end
context 'gitlab pages' do
it 'moves pages folder to new location' do
expect_any_instance_of(Gitlab::PagesTransfer).to receive(:rename_project)
project.rename_repo
end
end
context 'attachments' do
it 'keeps uploads folder location unchanged' do
expect_any_instance_of(Gitlab::UploadsTransfer).not_to receive(:rename_project)
project.rename_repo
end
context 'when not rolled out' do
let(:project) { create(:project, :repository, storage_version: 1) }
it 'moves pages folder to new location' do
expect_any_instance_of(Gitlab::UploadsTransfer).to receive(:rename_project)
project.rename_repo
end
end
end
end end
describe '#pages_path' do describe '#pages_path' do
......
...@@ -1171,6 +1171,7 @@ describe API::MergeRequests do ...@@ -1171,6 +1171,7 @@ describe API::MergeRequests do
end end
end end
<<<<<<< HEAD
describe 'GET :id/merge_requests/:merge_request_iid/approvals' do describe 'GET :id/merge_requests/:merge_request_iid/approvals' do
it 'retrieves the approval status' do it 'retrieves the approval status' do
approver = create :user approver = create :user
...@@ -1287,6 +1288,29 @@ describe API::MergeRequests do ...@@ -1287,6 +1288,29 @@ describe API::MergeRequests do
expect(json_response['user_has_approved']).to be false expect(json_response['user_has_approved']).to be false
expect(json_response['user_can_approve']).to be true expect(json_response['user_can_approve']).to be true
end end
=======
describe 'POST :id/merge_requests/:merge_request_iid/cancel_merge_when_pipeline_succeeds' do
before do
::MergeRequests::MergeWhenPipelineSucceedsService.new(merge_request.target_project, user).execute(merge_request)
end
it 'removes the merge_when_pipeline_succeeds status' do
post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/cancel_merge_when_pipeline_succeeds", user)
expect(response).to have_gitlab_http_status(201)
end
it 'returns 404 if the merge request is not found' do
post api("/projects/#{project.id}/merge_requests/123/merge_when_pipeline_succeeds", user)
expect(response).to have_gitlab_http_status(404)
end
it 'returns 404 if the merge request id is used instead of iid' do
post api("/projects/#{project.id}/merge_requests/#{merge_request.id}/merge_when_pipeline_succeeds", user)
expect(response).to have_gitlab_http_status(404)
>>>>>>> bfb5107ae720232a15060ee55feba213ee7dd097
end end
end end
......
...@@ -387,7 +387,7 @@ describe API::Runner do ...@@ -387,7 +387,7 @@ describe API::Runner do
end end
context 'when job is made for tag' do context 'when job is made for tag' do
let!(:job) { create(:ci_build_tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
it 'sets branch as ref_type' do it 'sets branch as ref_type' do
request_job request_job
...@@ -438,8 +438,8 @@ describe API::Runner do ...@@ -438,8 +438,8 @@ describe API::Runner do
end end
context 'when project and pipeline have multiple jobs' do context 'when project and pipeline have multiple jobs' do
let!(:job) { create(:ci_build_tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:job2) { create(:ci_build_tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) } let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) } let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
before do before do
...@@ -460,7 +460,7 @@ describe API::Runner do ...@@ -460,7 +460,7 @@ describe API::Runner do
end end
context 'when pipeline have jobs with artifacts' do context 'when pipeline have jobs with artifacts' do
let!(:job) { create(:ci_build_tag, :artifacts, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } let!(:job) { create(:ci_build, :tag, :artifacts, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) } let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
before do before do
...@@ -480,8 +480,8 @@ describe API::Runner do ...@@ -480,8 +480,8 @@ describe API::Runner do
end end
context 'when explicit dependencies are defined' do context 'when explicit dependencies are defined' do
let!(:job) { create(:ci_build_tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:job2) { create(:ci_build_tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) } let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
let!(:test_job) do let!(:test_job) do
create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'deploy', create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'deploy',
stage: 'deploy', stage_idx: 1, stage: 'deploy', stage_idx: 1,
...@@ -504,8 +504,8 @@ describe API::Runner do ...@@ -504,8 +504,8 @@ describe API::Runner do
end end
context 'when dependencies is an empty array' do context 'when dependencies is an empty array' do
let!(:job) { create(:ci_build_tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) } let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:job2) { create(:ci_build_tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) } let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
let!(:empty_dependencies_job) do let!(:empty_dependencies_job) do
create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job', create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job',
stage: 'deploy', stage_idx: 1, stage: 'deploy', stage_idx: 1,
......
...@@ -12,55 +12,6 @@ describe MergeRequests::MergeService do ...@@ -12,55 +12,6 @@ describe MergeRequests::MergeService do
end end
describe '#execute' do describe '#execute' do
context 'MergeRequest#merge_jid' do
let(:service) do
described_class.new(project, user, commit_message: 'Awesome message')
end
before do
merge_request.update_column(:merge_jid, 'hash-123')
end
it 'is cleaned when no error is raised' do
service.execute(merge_request)
expect(merge_request.reload.merge_jid).to be_nil
end
it 'is cleaned when expected error is raised' do
allow(service).to receive(:commit).and_raise(described_class::MergeError)
service.execute(merge_request)
expect(merge_request.reload.merge_jid).to be_nil
end
it 'is cleaned when merge request is not mergeable' do
allow(merge_request).to receive(:mergeable?).and_return(false)
service.execute(merge_request)
expect(merge_request.reload.merge_jid).to be_nil
end
it 'is cleaned when no source is found' do
allow(merge_request).to receive(:diff_head_sha).and_return(nil)
service.execute(merge_request)
expect(merge_request.reload.merge_jid).to be_nil
end
it 'is not cleaned when unexpected error is raised' do
service = described_class.new(project, user, commit_message: 'Awesome message')
allow(service).to receive(:commit).and_raise(StandardError)
expect { service.execute(merge_request) }.to raise_error(StandardError)
expect(merge_request.reload.merge_jid).to be_present
end
end
context 'valid params' do context 'valid params' do
let(:service) { described_class.new(project, user, commit_message: 'Awesome message') } let(:service) { described_class.new(project, user, commit_message: 'Awesome message') }
......
...@@ -23,7 +23,7 @@ describe Projects::HashedStorageMigrationService do ...@@ -23,7 +23,7 @@ describe Projects::HashedStorageMigrationService do
it 'updates project to be hashed and not read-only' do it 'updates project to be hashed and not read-only' do
service.execute service.execute
expect(project.hashed_storage?).to be_truthy expect(project.hashed_storage?(:repository)).to be_truthy
expect(project.repository_read_only).to be_falsey expect(project.repository_read_only).to be_falsey
end end
......
...@@ -3,12 +3,38 @@ require 'spec_helper' ...@@ -3,12 +3,38 @@ require 'spec_helper'
describe FileUploader do describe FileUploader do
let(:uploader) { described_class.new(build_stubbed(:project)) } let(:uploader) { described_class.new(build_stubbed(:project)) }
context 'legacy storage' do
let(:project) { build_stubbed(:project) }
describe '.absolute_path' do
it 'returns the correct absolute path by building it dynamically' do
upload = double(model: project, path: 'secret/foo.jpg')
dynamic_segment = project.full_path
expect(described_class.absolute_path(upload))
.to end_with("#{dynamic_segment}/secret/foo.jpg")
end
end
describe "#store_dir" do
it "stores in the namespace path" do
uploader = described_class.new(project)
expect(uploader.store_dir).to include(project.full_path)
expect(uploader.store_dir).not_to include("system")
end
end
end
context 'hashed storage' do
let(:project) { build_stubbed(:project, :hashed) }
describe '.absolute_path' do describe '.absolute_path' do
it 'returns the correct absolute path by building it dynamically' do it 'returns the correct absolute path by building it dynamically' do
project = build_stubbed(:project)
upload = double(model: project, path: 'secret/foo.jpg') upload = double(model: project, path: 'secret/foo.jpg')
dynamic_segment = project.path_with_namespace dynamic_segment = project.disk_path
expect(described_class.absolute_path(upload)) expect(described_class.absolute_path(upload))
.to end_with("#{dynamic_segment}/secret/foo.jpg") .to end_with("#{dynamic_segment}/secret/foo.jpg")
...@@ -17,13 +43,13 @@ describe FileUploader do ...@@ -17,13 +43,13 @@ describe FileUploader do
describe "#store_dir" do describe "#store_dir" do
it "stores in the namespace path" do it "stores in the namespace path" do
project = build_stubbed(:project)
uploader = described_class.new(project) uploader = described_class.new(project)
expect(uploader.store_dir).to include(project.path_with_namespace) expect(uploader.store_dir).to include(project.disk_path)
expect(uploader.store_dir).not_to include("system") expect(uploader.store_dir).not_to include("system")
end end
end end
end
describe 'initialize' do describe 'initialize' do
it 'generates a secret if none is provided' do it 'generates a secret if none is provided' do
......
...@@ -12,8 +12,13 @@ describe StuckMergeJobsWorker do ...@@ -12,8 +12,13 @@ describe StuckMergeJobsWorker do
worker.perform worker.perform
expect(mr_with_sha.reload).to be_merged mr_with_sha.reload
expect(mr_without_sha.reload).to be_opened mr_without_sha.reload
expect(mr_with_sha).to be_merged
expect(mr_without_sha).to be_opened
expect(mr_with_sha.merge_jid).to be_present
expect(mr_without_sha.merge_jid).to be_nil
end end
it 'updates merge request to opened when locked but has not been merged' do it 'updates merge request to opened when locked but has not been merged' do
......
/*!
Autosize 3.0.14
license: MIT
http://www.jacklmoore.com/autosize
*/
(function (global, factory) {
if (typeof define === 'function' && define.amd) {
define(['exports', 'module'], factory);
} else if (typeof exports !== 'undefined' && typeof module !== 'undefined') {
factory(exports, module);
} else {
var mod = {
exports: {}
};
factory(mod.exports, mod);
global.autosize = mod.exports;
}
})(this, function (exports, module) {
'use strict';
var set = typeof Set === 'function' ? new Set() : (function () {
var list = [];
return {
has: function has(key) {
return Boolean(list.indexOf(key) > -1);
},
add: function add(key) {
list.push(key);
},
'delete': function _delete(key) {
list.splice(list.indexOf(key), 1);
} };
})();
function assign(ta) {
var _ref = arguments[1] === undefined ? {} : arguments[1];
var _ref$setOverflowX = _ref.setOverflowX;
var setOverflowX = _ref$setOverflowX === undefined ? true : _ref$setOverflowX;
var _ref$setOverflowY = _ref.setOverflowY;
var setOverflowY = _ref$setOverflowY === undefined ? true : _ref$setOverflowY;
if (!ta || !ta.nodeName || ta.nodeName !== 'TEXTAREA' || set.has(ta)) return;
var heightOffset = null;
var overflowY = null;
var clientWidth = ta.clientWidth;
function init() {
var style = window.getComputedStyle(ta, null);
overflowY = style.overflowY;
if (style.resize === 'vertical') {
ta.style.resize = 'none';
} else if (style.resize === 'both') {
ta.style.resize = 'horizontal';
}
if (style.boxSizing === 'content-box') {
heightOffset = -(parseFloat(style.paddingTop) + parseFloat(style.paddingBottom));
} else {
heightOffset = parseFloat(style.borderTopWidth) + parseFloat(style.borderBottomWidth);
}
// Fix when a textarea is not on document body and heightOffset is Not a Number
if (isNaN(heightOffset)) {
heightOffset = 0;
}
update();
}
function changeOverflow(value) {
{
// Chrome/Safari-specific fix:
// When the textarea y-overflow is hidden, Chrome/Safari do not reflow the text to account for the space
// made available by removing the scrollbar. The following forces the necessary text reflow.
var width = ta.style.width;
ta.style.width = '0px';
// Force reflow:
/* jshint ignore:start */
ta.offsetWidth;
/* jshint ignore:end */
ta.style.width = width;
}
overflowY = value;
if (setOverflowY) {
ta.style.overflowY = value;
}
resize();
}
function resize() {
var htmlTop = window.pageYOffset;
var bodyTop = document.body.scrollTop;
var originalHeight = ta.style.height;
ta.style.height = 'auto';
var endHeight = ta.scrollHeight + heightOffset;
if (ta.scrollHeight === 0) {
// If the scrollHeight is 0, then the element probably has display:none or is detached from the DOM.
ta.style.height = originalHeight;
return;
}
ta.style.height = endHeight + 'px';
// used to check if an update is actually necessary on window.resize
clientWidth = ta.clientWidth;
// prevents scroll-position jumping
document.documentElement.scrollTop = htmlTop;
document.body.scrollTop = bodyTop;
}
function update() {
var startHeight = ta.style.height;
resize();
var style = window.getComputedStyle(ta, null);
if (style.height !== ta.style.height) {
if (overflowY !== 'visible') {
changeOverflow('visible');
}
} else {
if (overflowY !== 'hidden') {
changeOverflow('hidden');
}
}
if (startHeight !== ta.style.height) {
var evt = document.createEvent('Event');
evt.initEvent('autosize:resized', true, false);
ta.dispatchEvent(evt);
}
}
var pageResize = function pageResize() {
if (ta.clientWidth !== clientWidth) {
update();
}
};
var destroy = (function (style) {
window.removeEventListener('resize', pageResize, false);
ta.removeEventListener('input', update, false);
ta.removeEventListener('keyup', update, false);
ta.removeEventListener('autosize:destroy', destroy, false);
ta.removeEventListener('autosize:update', update, false);
set['delete'](ta);
Object.keys(style).forEach(function (key) {
ta.style[key] = style[key];
});
}).bind(ta, {
height: ta.style.height,
resize: ta.style.resize,
overflowY: ta.style.overflowY,
overflowX: ta.style.overflowX,
wordWrap: ta.style.wordWrap });
ta.addEventListener('autosize:destroy', destroy, false);
// IE9 does not fire onpropertychange or oninput for deletions,
// so binding to onkeyup to catch most of those events.
// There is no way that I know of to detect something like 'cut' in IE9.
if ('onpropertychange' in ta && 'oninput' in ta) {
ta.addEventListener('keyup', update, false);
}
window.addEventListener('resize', pageResize, false);
ta.addEventListener('input', update, false);
ta.addEventListener('autosize:update', update, false);
set.add(ta);
if (setOverflowX) {
ta.style.overflowX = 'hidden';
ta.style.wordWrap = 'break-word';
}
init();
}
function destroy(ta) {
if (!(ta && ta.nodeName && ta.nodeName === 'TEXTAREA')) return;
var evt = document.createEvent('Event');
evt.initEvent('autosize:destroy', true, false);
ta.dispatchEvent(evt);
}
function update(ta) {
if (!(ta && ta.nodeName && ta.nodeName === 'TEXTAREA')) return;
var evt = document.createEvent('Event');
evt.initEvent('autosize:update', true, false);
ta.dispatchEvent(evt);
}
var autosize = null;
// Do nothing in Node.js environment and IE8 (or lower)
if (typeof window === 'undefined' || typeof window.getComputedStyle !== 'function') {
autosize = function (el) {
return el;
};
autosize.destroy = function (el) {
return el;
};
autosize.update = function (el) {
return el;
};
} else {
autosize = function (el, options) {
if (el) {
Array.prototype.forEach.call(el.length ? el : [el], function (x) {
return assign(x, options);
});
}
return el;
};
autosize.destroy = function (el) {
if (el) {
Array.prototype.forEach.call(el.length ? el : [el], destroy);
}
return el;
};
autosize.update = function (el) {
if (el) {
Array.prototype.forEach.call(el.length ? el : [el], update);
}
return el;
};
}
module.exports = autosize;
});
\ No newline at end of file
...@@ -248,6 +248,10 @@ autoprefixer@^6.3.1: ...@@ -248,6 +248,10 @@ autoprefixer@^6.3.1:
postcss "^5.2.16" postcss "^5.2.16"
postcss-value-parser "^3.2.3" postcss-value-parser "^3.2.3"
autosize@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/autosize/-/autosize-4.0.0.tgz#7a0599b1ba84d73bd7589b0d9da3870152c69237"
aws-sign2@~0.6.0: aws-sign2@~0.6.0:
version "0.6.0" version "0.6.0"
resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment