Commit 3468ca83 authored by Eric Eastwood's avatar Eric Eastwood

Merge branch 'master' into ff_port_from_ee

Conflicts:
	app/models/project.rb
	db/schema.rb
parents c8596aa9 3d899a7d
......@@ -8,4 +8,4 @@
karma.config.js
webpack.config.js
svg.config.js
/app/assets/javascripts/locale/**/*.js
/app/assets/javascripts/locale/**/app.js
......@@ -2,6 +2,13 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 10.0.2 (2017-09-27)
- [FIXED] Notes will not show an empty bubble when the author isn't a member. !14450
- [FIXED] Some checks in `rake gitlab:check` were failling with 'undefined method `run_command`'. !14469
- [FIXED] Make locked setting of Runner to not affect jobs scheduling. !14483
- [FIXED] Re-allow `name` attribute on user-provided anchor HTML.
## 10.0.1 (2017-09-23)
- [FIXED] Fix duplicate key errors in PostDeployMigrateUserExternalMailData migration.
......
......@@ -40,10 +40,10 @@ export default () => {
class="text-center"
v-if="error">
<span v-if="loadError">
An error occured whilst loading the file. Please try again later.
An error occurred whilst loading the file. Please try again later.
</span>
<span v-else>
An error occured whilst parsing the file.
An error occurred whilst parsing the file.
</span>
</p>
</div>
......
......@@ -48,10 +48,10 @@ export default () => {
class="text-center"
v-if="error">
<span v-if="loadError">
An error occured whilst loading the file. Please try again later.
An error occurred whilst loading the file. Please try again later.
</span>
<span v-else>
An error occured whilst decoding the file.
An error occurred whilst decoding the file.
</span>
</p>
</div>
......
......@@ -68,7 +68,7 @@ export default {
<div class="flash-container"
v-if="error">
<div class="flash-alert">
An error occured. Please try again.
An error occurred. Please try again.
</div>
</div>
<label class="label-light"
......
......@@ -163,7 +163,7 @@ export default {
this.service.postAction(endpoint)
.then(() => this.fetchEnvironments())
.catch(() => new Flash('An error occured while making the request.'));
.catch(() => new Flash('An error occurred while making the request.'));
}
},
......
......@@ -158,7 +158,7 @@ export default {
this.service.postAction(endpoint)
.then(() => this.fetchEnvironments())
.catch(() => new Flash('An error occured while making the request.'));
.catch(() => new Flash('An error occurred while making the request.'));
}
},
},
......
......@@ -7,6 +7,8 @@
* causes reflows, visit https://gist.github.com/paulirish/5d52fb081b3570c81e3a
*/
import Cookies from 'js-cookie';
const LINE_NUMBER_CLASS = 'diff-line-num';
const UNFOLDABLE_LINE_CLASS = 'js-unfold';
const NO_COMMENT_CLASS = 'no-comment-btn';
......@@ -27,9 +29,7 @@ export default {
this.userCanCreateNote = $diffFile.closest(DIFF_CONTAINER_SELECTOR).data('can-create-note') === '';
}
if (typeof notes !== 'undefined' && !this.isParallelView) {
this.isParallelView = notes.isParallelView && notes.isParallelView();
}
this.isParallelView = Cookies.get('diff_view') === 'parallel';
if (this.userCanCreateNote) {
$diffFile.on('mouseover', LINE_COLUMN_CLASSES, e => this.showButton(this.isParallelView, e))
......
......@@ -14,7 +14,7 @@ class DropdownEmoji extends gl.FilteredSearchDropdown {
loadingTemplate: this.loadingTemplate,
onError() {
/* eslint-disable no-new */
new Flash('An error occured fetching the dropdown data.');
new Flash('An error occurred fetching the dropdown data.');
/* eslint-enable no-new */
},
},
......
......@@ -17,7 +17,7 @@ class DropdownNonUser extends gl.FilteredSearchDropdown {
preprocessing,
onError() {
/* eslint-disable no-new */
new Flash('An error occured fetching the dropdown data.');
new Flash('An error occurred fetching the dropdown data.');
/* eslint-enable no-new */
},
},
......
......@@ -26,7 +26,7 @@ class DropdownUser extends gl.FilteredSearchDropdown {
},
onError() {
/* eslint-disable no-new */
new Flash('An error occured fetching the dropdown data.');
new Flash('An error occurred fetching the dropdown data.');
/* eslint-enable no-new */
},
},
......
......@@ -36,7 +36,7 @@ class FilteredSearchManager {
.catch((error) => {
if (error.name === 'RecentSearchesServiceError') return undefined;
// eslint-disable-next-line no-new
new window.Flash('An error occured while parsing recent searches');
new window.Flash('An error occurred while parsing recent searches');
// Gracefully fail to empty array
return [];
})
......
......@@ -28,148 +28,149 @@
// </div>
// </div>
//
(function() {
this.LineHighlighter = (function() {
// CSS class applied to highlighted lines
LineHighlighter.prototype.highlightClass = 'hll';
// Internal copy of location.hash so we're not dependent on `location` in tests
LineHighlighter.prototype._hash = '';
function LineHighlighter(hash) {
if (hash == null) {
// Initialize a LineHighlighter object
//
// hash - String URL hash for dependency injection in tests
hash = location.hash;
}
this.setHash = this.setHash.bind(this);
this.highlightLine = this.highlightLine.bind(this);
this.clickHandler = this.clickHandler.bind(this);
this.highlightHash = this.highlightHash.bind(this);
this._hash = hash;
this.bindEvents();
this.highlightHash();
}
LineHighlighter.prototype.bindEvents = function() {
const $fileHolder = $('.file-holder');
$fileHolder.on('click', 'a[data-line-number]', this.clickHandler);
$fileHolder.on('highlight:line', this.highlightHash);
};
LineHighlighter.prototype.highlightHash = function() {
var range;
if (this._hash !== '') {
range = this.hashToRange(this._hash);
if (range[0]) {
this.highlightRange(range);
$.scrollTo("#L" + range[0], {
// Scroll to the first highlighted line on initial load
// Offset -50 for the sticky top bar, and another -100 for some context
offset: -150
});
}
}
};
LineHighlighter.prototype.clickHandler = function(event) {
var current, lineNumber, range;
event.preventDefault();
this.clearHighlight();
lineNumber = $(event.target).closest('a').data('line-number');
current = this.hashToRange(this._hash);
if (!(current[0] && event.shiftKey)) {
// If there's no current selection, or there is but Shift wasn't held,
// treat this like a single-line selection.
this.setHash(lineNumber);
return this.highlightLine(lineNumber);
} else if (event.shiftKey) {
if (lineNumber < current[0]) {
range = [lineNumber, current[0]];
} else {
range = [current[0], lineNumber];
}
this.setHash(range[0], range[1]);
return this.highlightRange(range);
}
};
LineHighlighter.prototype.clearHighlight = function() {
return $("." + this.highlightClass).removeClass(this.highlightClass);
// Unhighlight previously highlighted lines
};
// Convert a URL hash String into line numbers
//
// hash - Hash String
//
// Examples:
//
// hashToRange('#L5') # => [5, null]
// hashToRange('#L5-15') # => [5, 15]
// hashToRange('#foo') # => [null, null]
//
// Returns an Array
LineHighlighter.prototype.hashToRange = function(hash) {
var first, last, matches;
// ?L(\d+)(?:-(\d+))?$/)
matches = hash.match(/^#?L(\d+)(?:-(\d+))?$/);
if (matches && matches.length) {
first = parseInt(matches[1], 10);
last = matches[2] ? parseInt(matches[2], 10) : null;
return [first, last];
} else {
return [null, null];
}
};
// Highlight a single line
//
// lineNumber - Line number to highlight
LineHighlighter.prototype.highlightLine = function(lineNumber) {
return $("#LC" + lineNumber).addClass(this.highlightClass);
};
// Highlight all lines within a range
//
// range - Array containing the starting and ending line numbers
LineHighlighter.prototype.highlightRange = function(range) {
var i, lineNumber, ref, ref1, results;
if (range[1]) {
results = [];
for (lineNumber = i = ref = range[0], ref1 = range[1]; ref <= ref1 ? i <= ref1 : i >= ref1; lineNumber = ref <= ref1 ? (i += 1) : (i -= 1)) {
results.push(this.highlightLine(lineNumber));
}
return results;
} else {
return this.highlightLine(range[0]);
}
};
const LineHighlighter = function(options = {}) {
options.highlightLineClass = options.highlightLineClass || 'hll';
options.fileHolderSelector = options.fileHolderSelector || '.file-holder';
options.scrollFileHolder = options.scrollFileHolder || false;
options.hash = options.hash || location.hash;
// Set the URL hash string
LineHighlighter.prototype.setHash = function(firstLineNumber, lastLineNumber) {
var hash;
if (lastLineNumber) {
hash = "#L" + firstLineNumber + "-" + lastLineNumber;
this.options = options;
this._hash = options.hash;
this.highlightLineClass = options.highlightLineClass;
this.setHash = this.setHash.bind(this);
this.highlightLine = this.highlightLine.bind(this);
this.clickHandler = this.clickHandler.bind(this);
this.highlightHash = this.highlightHash.bind(this);
this.bindEvents();
this.highlightHash();
};
LineHighlighter.prototype.bindEvents = function() {
const $fileHolder = $(this.options.fileHolderSelector);
$fileHolder.on('click', 'a[data-line-number]', this.clickHandler);
$fileHolder.on('highlight:line', this.highlightHash);
};
LineHighlighter.prototype.highlightHash = function() {
var range;
if (this._hash !== '') {
range = this.hashToRange(this._hash);
if (range[0]) {
this.highlightRange(range);
const lineSelector = `#L${range[0]}`;
const scrollOptions = {
// Scroll to the first highlighted line on initial load
// Offset -50 for the sticky top bar, and another -100 for some context
offset: -150
};
if (this.options.scrollFileHolder) {
$(this.options.fileHolderSelector).scrollTo(lineSelector, scrollOptions);
} else {
hash = "#L" + firstLineNumber;
$.scrollTo(lineSelector, scrollOptions);
}
this._hash = hash;
return this.__setLocationHash__(hash);
};
// Make the actual hash change in the browser
//
// This method is stubbed in tests.
LineHighlighter.prototype.__setLocationHash__ = function(value) {
return history.pushState({
url: value
// We're using pushState instead of assigning location.hash directly to
// prevent the page from scrolling on the hashchange event
}, document.title, value);
};
return LineHighlighter;
})();
}).call(window);
}
}
};
LineHighlighter.prototype.clickHandler = function(event) {
var current, lineNumber, range;
event.preventDefault();
this.clearHighlight();
lineNumber = $(event.target).closest('a').data('line-number');
current = this.hashToRange(this._hash);
if (!(current[0] && event.shiftKey)) {
// If there's no current selection, or there is but Shift wasn't held,
// treat this like a single-line selection.
this.setHash(lineNumber);
return this.highlightLine(lineNumber);
} else if (event.shiftKey) {
if (lineNumber < current[0]) {
range = [lineNumber, current[0]];
} else {
range = [current[0], lineNumber];
}
this.setHash(range[0], range[1]);
return this.highlightRange(range);
}
};
LineHighlighter.prototype.clearHighlight = function() {
return $("." + this.highlightLineClass).removeClass(this.highlightLineClass);
};
// Convert a URL hash String into line numbers
//
// hash - Hash String
//
// Examples:
//
// hashToRange('#L5') # => [5, null]
// hashToRange('#L5-15') # => [5, 15]
// hashToRange('#foo') # => [null, null]
//
// Returns an Array
LineHighlighter.prototype.hashToRange = function(hash) {
var first, last, matches;
// ?L(\d+)(?:-(\d+))?$/)
matches = hash.match(/^#?L(\d+)(?:-(\d+))?$/);
if (matches && matches.length) {
first = parseInt(matches[1], 10);
last = matches[2] ? parseInt(matches[2], 10) : null;
return [first, last];
} else {
return [null, null];
}
};
// Highlight a single line
//
// lineNumber - Line number to highlight
LineHighlighter.prototype.highlightLine = function(lineNumber) {
return $("#LC" + lineNumber).addClass(this.highlightLineClass);
};
// Highlight all lines within a range
//
// range - Array containing the starting and ending line numbers
LineHighlighter.prototype.highlightRange = function(range) {
var i, lineNumber, ref, ref1, results;
if (range[1]) {
results = [];
for (lineNumber = i = ref = range[0], ref1 = range[1]; ref <= ref1 ? i <= ref1 : i >= ref1; lineNumber = ref <= ref1 ? (i += 1) : (i -= 1)) {
results.push(this.highlightLine(lineNumber));
}
return results;
} else {
return this.highlightLine(range[0]);
}
};
// Set the URL hash string
LineHighlighter.prototype.setHash = function(firstLineNumber, lastLineNumber) {
var hash;
if (lastLineNumber) {
hash = "#L" + firstLineNumber + "-" + lastLineNumber;
} else {
hash = "#L" + firstLineNumber;
}
this._hash = hash;
return this.__setLocationHash__(hash);
};
// Make the actual hash change in the browser
//
// This method is stubbed in tests.
LineHighlighter.prototype.__setLocationHash__ = function(value) {
return history.pushState({
url: value
// We're using pushState instead of assigning location.hash directly to
// prevent the page from scrolling on the hashchange event
}, document.title, value);
};
window.LineHighlighter = LineHighlighter;
......@@ -16,9 +16,8 @@ const locales = allLocales.reduce((d, obj) => {
return data;
}, {});
let lang = document.querySelector('html').getAttribute('lang') || 'en';
lang = lang.replace(/-/g, '_');
const langAttribute = document.querySelector('html').getAttribute('lang');
const lang = (langAttribute || 'en').replace(/-/g, '_');
const locale = new Jed(locales[lang]);
/**
......
......@@ -302,7 +302,10 @@ $(function () {
return $container.remove();
// Commit show suppressed diff
});
$('.navbar-toggle').on('click', () => $('.header-content').toggleClass('menu-expanded'));
$('.navbar-toggle').on('click', () => {
$('.header-content').toggleClass('menu-expanded');
gl.lazyLoader.loadCheck();
});
// Show/hide comments on diff
$body.on('click', '.js-toggle-diff-comments', function (e) {
var $this = $(this);
......
......@@ -352,7 +352,7 @@ import {
}
expandViewContainer() {
const $wrapper = $('.content-wrapper .container-fluid');
const $wrapper = $('.content-wrapper .container-fluid').not('.breadcrumbs');
if (this.fixedLayoutPref === null) {
this.fixedLayoutPref = $wrapper.hasClass('container-limited');
}
......
......@@ -97,7 +97,7 @@ export default {
postAction(endpoint) {
this.service.postAction(endpoint)
.then(() => eventHub.$emit('refreshPipelines'))
.catch(() => new Flash('An error occured while making the request.'));
.catch(() => new Flash('An error occurred while making the request.'));
},
},
};
......@@ -73,7 +73,8 @@ import _ from 'underscore';
aspectRatio: 1,
modal: true,
scalable: false,
rotatable: false,
rotatable: true,
checkOrientation: true,
zoomable: true,
dragMode: 'move',
guides: false,
......
......@@ -44,7 +44,7 @@ export default {
Store.submitCommitsLoading = true;
Service.commitFiles(payload)
.then(this.resetCommitState)
.catch(() => Flash('An error occured while committing your changes'));
.catch(() => Flash('An error occurred while committing your changes'));
},
resetCommitState() {
......
<script>
/* global LineHighlighter */
import Store from '../stores/repo_store';
export default {
data: () => Store,
mounted() {
this.highlightFile();
},
computed: {
html() {
return this.activeFile.html;
},
},
methods: {
highlightFile() {
$(this.$el).find('.file-content').syntaxHighlight();
},
},
mounted() {
this.highlightFile();
this.lineHighlighter = new LineHighlighter({
fileHolderSelector: '.blob-viewer-container',
scrollFileHolder: true,
});
},
watch: {
html() {
this.$nextTick(() => {
......@@ -45,7 +49,7 @@ export default {
v-else
class="vertical-center render-error">
<p class="text-center">
The source could not be displayed because a rendering error occured. You can <a :href="activeFile.raw_path">download</a> it instead.
The source could not be displayed because a rendering error occurred. You can <a :href="activeFile.raw_path">download</a> it instead.
</p>
</div>
</div>
......
......@@ -37,17 +37,24 @@ export default {
let file = clickedFile;
if (file.loading) return;
file.loading = true;
if (file.type === 'tree' && file.opened) {
file = Store.removeChildFilesOfTree(file);
file.loading = false;
} else {
Service.url = file.url;
Helper.getContent(file)
.then(() => {
file.loading = false;
Helper.scrollTabsRight();
})
.catch(Helper.loadingError);
const openFile = Helper.getFileFromPath(file.url);
if (openFile) {
file.loading = false;
Store.setActiveFiles(openFile);
} else {
Service.url = file.url;
Helper.getContent(file)
.then(() => {
file.loading = false;
Helper.scrollTabsRight();
})
.catch(Helper.loadingError);
}
}
},
......
......@@ -263,6 +263,10 @@ const RepoHelper = {
return Store.openedFiles.find(openedFile => Store.activeFile.url === openedFile.url);
},
getFileFromPath(path) {
return Store.openedFiles.find(file => file.url === path);
},
loadingError() {
Flash('Unable to load this content at this time.');
},
......
......@@ -43,6 +43,8 @@ import Cookies from 'js-cookie';
$allGutterToggleIcons.removeClass('fa-angle-double-left').addClass('fa-angle-double-right');
$('aside.right-sidebar').removeClass('right-sidebar-collapsed').addClass('right-sidebar-expanded');
$('.page-with-sidebar').removeClass('right-sidebar-collapsed').addClass('right-sidebar-expanded');
if (gl.lazyLoader) gl.lazyLoader.loadCheck();
}
if (!triggered) {
return Cookies.set("collapsed_gutter", $('.right-sidebar').hasClass('right-sidebar-collapsed'));
......
......@@ -38,7 +38,7 @@ class SidebarMoveIssue {
data: (searchTerm, callback) => {
this.mediator.fetchAutocompleteProjects(searchTerm)
.then(callback)
.catch(() => new Flash('An error occured while fetching projects autocomplete.'));
.catch(() => new Flash('An error occurred while fetching projects autocomplete.'));
},
renderRow: project => `
<li>
......@@ -73,7 +73,7 @@ class SidebarMoveIssue {
this.mediator.moveIssue()
.catch(() => {
Flash('An error occured while moving the issue.');
Flash('An error occurred while moving the issue.');
this.$confirmButton
.enable()
.removeClass('is-loading');
......
......@@ -41,7 +41,7 @@ export default class SidebarMediator {
this.store.setAssigneeData(data);
this.store.setTimeTrackingData(data);
})
.catch(() => new Flash('Error occured when fetching sidebar data'));
.catch(() => new Flash('Error occurred when fetching sidebar data'));
}
fetchAutocompleteProjects(searchTerm) {
......
......@@ -306,6 +306,8 @@ header.navbar-gitlab-new {
display: flex;
width: 100%;
position: relative;
padding-top: $gl-padding / 2;
padding-bottom: $gl-padding / 2;
align-items: center;
border-bottom: 1px solid $border-color;
}
......@@ -346,6 +348,7 @@ header.navbar-gitlab-new {
display: flex;
align-items: center;
position: relative;
padding: 2px 0;
&:not(:last-child) {
margin-right: 20px;
......@@ -381,7 +384,7 @@ header.navbar-gitlab-new {
margin: 0;
font-size: 12px;
font-weight: 600;
line-height: 1;
line-height: 16px;
a {
color: $gl-text-color;
......
......@@ -535,7 +535,6 @@
}
.diff-notes-collapse {
position: relative;
width: 19px;
height: 19px;
padding: 0;
......@@ -543,11 +542,7 @@
z-index: 100;
svg {
position: absolute;
left: 50%;
top: 50%;
margin-left: -5.5px;
margin-top: -5.5px;
vertical-align: text-top;
}
path {
......
......@@ -54,6 +54,10 @@
border-radius: $border-radius-default;
color: $almost-black;
.code.white pre .hll {
background-color: $well-light-border !important;
}
.tree-content-holder {
display: flex;
min-height: 300px;
......
......@@ -22,8 +22,7 @@ class Admin::ApplicationsController < Admin::ApplicationController
@application = Doorkeeper::Application.new(application_params)
if @application.save
flash[:notice] = I18n.t(:notice, scope: [:doorkeeper, :flash, :applications, :create])
redirect_to admin_application_url(@application)
redirect_to_admin_page
else
render :new
end
......@@ -42,6 +41,13 @@ class Admin::ApplicationsController < Admin::ApplicationController
redirect_to admin_applications_url, status: 302, notice: 'Application was successfully destroyed.'
end
protected
def redirect_to_admin_page
flash[:notice] = I18n.t(:notice, scope: [:doorkeeper, :flash, :applications, :create])
redirect_to admin_application_url(@application)
end
private
def set_application
......
......@@ -128,7 +128,7 @@ class Admin::UsersController < Admin::ApplicationController
end
respond_to do |format|
result = Users::UpdateService.new(user, user_params_with_pass).execute do |user|
result = Users::UpdateService.new(current_user, user_params_with_pass.merge(user: user)).execute do |user|
user.skip_reconfirmation!
end
......@@ -155,7 +155,7 @@ class Admin::UsersController < Admin::ApplicationController
def remove_email
email = user.emails.find(params[:email_id])
success = Emails::DestroyService.new(user, email: email.email).execute
success = Emails::DestroyService.new(current_user, user: user, email: email.email).execute
respond_to do |format|
if success
......@@ -219,7 +219,7 @@ class Admin::UsersController < Admin::ApplicationController
end
def update_user(&block)
result = Users::UpdateService.new(user).execute(&block)
result = Users::UpdateService.new(current_user, user: user).execute(&block)
result[:status] == :success
end
......
......@@ -12,10 +12,14 @@ class ConfirmationsController < Devise::ConfirmationsController
def after_confirmation_path_for(resource_name, resource)
if signed_in?(resource_name)
after_sign_in_path_for(resource)
after_sign_in(resource)
else
flash[:notice] += " Please sign in."
new_session_path(resource_name)
end
end
def after_sign_in(resource)
after_sign_in_path_for(resource)
end
end
......@@ -21,14 +21,20 @@ class Oauth::ApplicationsController < Doorkeeper::ApplicationsController
@application.owner = current_user
if @application.save
flash[:notice] = I18n.t(:notice, scope: [:doorkeeper, :flash, :applications, :create])
redirect_to oauth_application_url(@application)
redirect_to_oauth_application_page
else
set_index_vars
render :index
end
end
protected
def redirect_to_oauth_application_page
flash[:notice] = I18n.t(:notice, scope: [:doorkeeper, :flash, :applications, :create])
redirect_to oauth_application_url(@application)
end
private
def verify_user_oauth_applications_enabled
......
......@@ -2,7 +2,7 @@ class Profiles::AvatarsController < Profiles::ApplicationController
def destroy
@user = current_user
Users::UpdateService.new(@user).execute { |user| user.remove_avatar! }
Users::UpdateService.new(current_user, user: @user).execute { |user| user.remove_avatar! }
redirect_to profile_path, status: 302
end
......
......@@ -5,7 +5,7 @@ class Profiles::EmailsController < Profiles::ApplicationController
end
def create
@email = Emails::CreateService.new(current_user, email_params).execute
@email = Emails::CreateService.new(current_user, email_params.merge(user: current_user)).execute
if @email.errors.blank?
NotificationService.new.new_email(@email)
......@@ -19,7 +19,7 @@ class Profiles::EmailsController < Profiles::ApplicationController
def destroy
@email = current_user.emails.find(params[:id])
Emails::DestroyService.new(current_user, email: @email.email).execute
Emails::DestroyService.new(current_user, user: current_user, email: @email.email).execute
respond_to do |format|
format.html { redirect_to profile_emails_url, status: 302 }
......
......@@ -14,7 +14,7 @@ class Profiles::KeysController < Profiles::ApplicationController
@key = Keys::CreateService.new(current_user, key_params).execute
if @key.persisted?
redirect_to profile_key_path(@key)
redirect_to_profile_key_path
else
@keys = current_user.keys.select(&:persisted?)
render :index
......@@ -50,6 +50,12 @@ class Profiles::KeysController < Profiles::ApplicationController
end
end
protected
def redirect_to_profile_key_path
redirect_to profile_key_path(@key)
end
private
def key_params
......
......@@ -7,7 +7,7 @@ class Profiles::NotificationsController < Profiles::ApplicationController
end
def update
result = Users::UpdateService.new(current_user, user_params).execute
result = Users::UpdateService.new(current_user, user_params.merge(user: current_user)).execute
if result[:status] == :success
flash[:notice] = "Notification settings saved"
......
......@@ -21,10 +21,10 @@ class Profiles::PasswordsController < Profiles::ApplicationController
password_automatically_set: false
}
result = Users::UpdateService.new(@user, password_attributes).execute
result = Users::UpdateService.new(current_user, password_attributes.merge(user: @user)).execute
if result[:status] == :success
Users::UpdateService.new(@user, password_expires_at: nil).execute
Users::UpdateService.new(current_user, user: @user, password_expires_at: nil).execute
redirect_to root_path, notice: 'Password successfully changed'
else
......@@ -46,7 +46,7 @@ class Profiles::PasswordsController < Profiles::ApplicationController
return
end
result = Users::UpdateService.new(@user, password_attributes).execute
result = Users::UpdateService.new(current_user, password_attributes.merge(user: @user)).execute
if result[:status] == :success
flash[:notice] = "Password was successfully updated. Please login with it"
......
......@@ -6,7 +6,7 @@ class Profiles::PreferencesController < Profiles::ApplicationController
def update
begin
result = Users::UpdateService.new(user, preferences_params).execute
result = Users::UpdateService.new(current_user, preferences_params.merge(user: user)).execute
if result[:status] == :success
flash[:notice] = 'Preferences saved.'
......
......@@ -10,7 +10,7 @@ class Profiles::TwoFactorAuthsController < Profiles::ApplicationController
current_user.otp_grace_period_started_at = Time.current
end
Users::UpdateService.new(current_user).execute!
Users::UpdateService.new(current_user, user: current_user).execute!
if two_factor_authentication_required? && !current_user.two_factor_enabled?
two_factor_authentication_reason(
......@@ -41,7 +41,7 @@ class Profiles::TwoFactorAuthsController < Profiles::ApplicationController
def create
if current_user.validate_and_consume_otp!(params[:pin_code])
Users::UpdateService.new(current_user, otp_required_for_login: true).execute! do |user|
Users::UpdateService.new(current_user, user: current_user, otp_required_for_login: true).execute! do |user|
@codes = user.generate_otp_backup_codes!
end
......@@ -70,7 +70,7 @@ class Profiles::TwoFactorAuthsController < Profiles::ApplicationController
end
def codes
Users::UpdateService.new(current_user).execute! do |user|
Users::UpdateService.new(current_user, user: current_user).execute! do |user|
@codes = user.generate_otp_backup_codes!
end
end
......
......@@ -10,7 +10,7 @@ class ProfilesController < Profiles::ApplicationController
def update
respond_to do |format|
result = Users::UpdateService.new(@user, user_params).execute
result = Users::UpdateService.new(current_user, user_params.merge(user: @user)).execute
if result[:status] == :success
message = "Profile was successfully updated"
......@@ -25,7 +25,7 @@ class ProfilesController < Profiles::ApplicationController
end
def reset_private_token
Users::UpdateService.new(@user).execute! do |user|
Users::UpdateService.new(current_user, user: @user).execute! do |user|
user.reset_authentication_token!
end
......@@ -35,7 +35,7 @@ class ProfilesController < Profiles::ApplicationController
end
def reset_incoming_email_token
Users::UpdateService.new(@user).execute! do |user|
Users::UpdateService.new(current_user, user: @user).execute! do |user|
user.reset_incoming_email_token!
end
......@@ -45,7 +45,7 @@ class ProfilesController < Profiles::ApplicationController
end
def reset_rss_token
Users::UpdateService.new(@user).execute! do |user|
Users::UpdateService.new(current_user, user: @user).execute! do |user|
user.reset_rss_token!
end
......@@ -61,7 +61,7 @@ class ProfilesController < Profiles::ApplicationController
end
def update_username
result = Users::UpdateService.new(@user, username: user_params[:username]).execute
result = Users::UpdateService.new(current_user, user: @user, username: user_params[:username]).execute
options = if result[:status] == :success
{ notice: "Username successfully changed" }
......
......@@ -55,7 +55,7 @@ class SessionsController < Devise::SessionsController
return unless user && user.require_password_creation?
Users::UpdateService.new(user).execute do |user|
Users::UpdateService.new(current_user, user: user).execute do |user|
@token = user.generate_reset_token
end
......
module CustomAttributesFilter
def by_custom_attributes(items)
return items unless params[:custom_attributes].is_a?(Hash)
return items unless Ability.allowed?(current_user, :read_custom_attribute)
association = items.reflect_on_association(:custom_attributes)
attributes_table = association.klass.arel_table
attributable_table = items.model.arel_table
custom_attributes = association.klass.select('true').where(
attributes_table[association.foreign_key]
.eq(attributable_table[association.association_primary_key])
)
# perform a subquery for each attribute to be filtered
params[:custom_attributes].inject(items) do |scope, (key, value)|
scope.where('EXISTS (?)', custom_attributes.where(key: key, value: value))
end
end
end
......@@ -15,6 +15,7 @@
#
class UsersFinder
include CreatedAtFilter
include CustomAttributesFilter
attr_accessor :current_user, :params
......@@ -32,6 +33,7 @@ class UsersFinder
users = by_external_identity(users)
users = by_external(users)
users = by_created_at(users)
users = by_custom_attributes(users)
users
end
......
......@@ -79,6 +79,6 @@ module BoardsHelper
end
def boards_link_text
_("Board")
s_("IssueBoards|Board")
end
end
......@@ -248,16 +248,25 @@ module IssuablesHelper
Gitlab::IssuablesCountForState.new(finder)[state]
end
def close_issuable_url(issuable)
issuable_url(issuable, close_reopen_params(issuable, :close))
def close_issuable_path(issuable)
issuable_path(issuable, close_reopen_params(issuable, :close))
end
def reopen_issuable_url(issuable)
issuable_url(issuable, close_reopen_params(issuable, :reopen))
def reopen_issuable_path(issuable)
issuable_path(issuable, close_reopen_params(issuable, :reopen))
end
def close_reopen_issuable_url(issuable, should_inverse = false)
issuable.closed? ^ should_inverse ? reopen_issuable_url(issuable) : close_issuable_url(issuable)
def close_reopen_issuable_path(issuable, should_inverse = false)
issuable.closed? ^ should_inverse ? reopen_issuable_path(issuable) : close_issuable_path(issuable)
end
def issuable_path(issuable, *options)
case issuable
when Issue
issue_path(issuable, *options)
when MergeRequest
merge_request_path(issuable, *options)
end
end
def issuable_url(issuable, *options)
......
......@@ -434,7 +434,7 @@ module Ci
def update_duration
return unless started_at
self.duration = Gitlab::Ci::PipelineDuration.from_pipeline(self)
self.duration = Gitlab::Ci::Pipeline::Duration.from_pipeline(self)
end
def execute_hooks
......
......@@ -245,6 +245,9 @@ class Project < ActiveRecord::Base
scope :pending_delete, -> { where(pending_delete: true) }
scope :without_deleted, -> { where(pending_delete: false) }
scope :with_hashed_storage, -> { where('storage_version >= 1') }
scope :with_legacy_storage, -> { where(storage_version: [nil, 0]) }
scope :sorted_by_activity, -> { reorder(last_activity_at: :desc) }
scope :sorted_by_stars, -> { reorder('projects.star_count DESC') }
......@@ -1550,7 +1553,11 @@ class Project < ActiveRecord::Base
end
def legacy_storage?
self.storage_version.nil?
[nil, 0].include?(self.storage_version)
end
def hashed_storage?
self.storage_version && self.storage_version >= 1
end
def renamed?
......@@ -1573,11 +1580,33 @@ class Project < ActiveRecord::Base
self.merge_requests_ff_only_enabled
end
def migrate_to_hashed_storage!
return if hashed_storage?
update!(repository_read_only: true)
if repo_reference_count > 0 || wiki_reference_count > 0
ProjectMigrateHashedStorageWorker.perform_in(Gitlab::ReferenceCounter::REFERENCE_EXPIRE_TIME, id)
else
ProjectMigrateHashedStorageWorker.perform_async(id)
end
end
def storage_version=(value)
super
@storage = nil if storage_version_changed?
end
def gl_repository(is_wiki:)
Gitlab::GlRepository.gl_repository(self, is_wiki)
end
private
def storage
@storage ||=
if self.storage_version && self.storage_version >= 1
if hashed_storage?
Storage::HashedProject.new(self)
else
Storage::LegacyProject.new(self)
......@@ -1590,6 +1619,14 @@ class Project < ActiveRecord::Base
end
end
def repo_reference_count
Gitlab::ReferenceCounter.new(gl_repository(is_wiki: false)).value
end
def wiki_reference_count
Gitlab::ReferenceCounter.new(gl_repository(is_wiki: true)).value
end
# set last_activity_at to the same as created_at
def set_last_activity_at
update_column(:last_activity_at, self.created_at)
......
......@@ -489,13 +489,7 @@ class Repository
def exists?
return false unless full_path
Gitlab::GitalyClient.migrate(:repository_exists) do |enabled|
if enabled
raw_repository.exists?
else
refs_directory_exists?
end
end
raw_repository.exists?
end
cache_method :exists?
......@@ -1082,12 +1076,6 @@ class Repository
blob.data
end
def refs_directory_exists?
circuit_breaker.perform do
File.exist?(File.join(path_to_repo, 'refs'))
end
end
def cache
# TODO: should we use UUIDs here? We could move repositories without clearing this cache
@cache ||= RepositoryCache.new(full_path, @project.id)
......@@ -1139,10 +1127,6 @@ class Repository
Gitlab::Git::Repository.new(project.repository_storage, disk_path + '.git', Gitlab::GlRepository.gl_repository(project, false))
end
def circuit_breaker
@circuit_breaker ||= Gitlab::Git::Storage::CircuitBreaker.for_storage(project.repository_storage)
end
def find_commits_by_message_by_shelling_out(query, ref, path, limit, offset)
ref ||= root_ref
......
......@@ -4,6 +4,7 @@ module Storage
delegate :gitlab_shell, :repository_storage_path, to: :project
ROOT_PATH_PREFIX = '@hashed'.freeze
STORAGE_VERSION = 1
def initialize(project)
@project = project
......
......@@ -60,7 +60,7 @@ class User < ActiveRecord::Base
lease = Gitlab::ExclusiveLease.new("user_update_tracked_fields:#{id}", timeout: 1.hour.to_i)
return unless lease.try_obtain
Users::UpdateService.new(self).execute(validate: false)
Users::UpdateService.new(self, user: self).execute(validate: false)
end
attr_accessor :force_random_password
......@@ -130,6 +130,8 @@ class User < ActiveRecord::Base
has_many :assigned_issues, class_name: "Issue", through: :issue_assignees, source: :issue
has_many :assigned_merge_requests, dependent: :nullify, foreign_key: :assignee_id, class_name: "MergeRequest" # rubocop:disable Cop/ActiveRecordDependent
has_many :custom_attributes, class_name: 'UserCustomAttribute'
#
# Validations
#
......@@ -526,8 +528,8 @@ class User < ActiveRecord::Base
def update_emails_with_primary_email
primary_email_record = emails.find_by(email: email)
if primary_email_record
Emails::DestroyService.new(self, email: email).execute
Emails::CreateService.new(self, email: email_was).execute
Emails::DestroyService.new(self, user: self, email: email).execute
Emails::CreateService.new(self, user: self, email: email_was).execute
end
end
......@@ -1000,7 +1002,7 @@ class User < ActiveRecord::Base
if attempts_exceeded?
lock_access! unless access_locked?
else
Users::UpdateService.new(self).execute(validate: false)
Users::UpdateService.new(self, user: self).execute(validate: false)
end
end
......@@ -1186,7 +1188,7 @@ class User < ActiveRecord::Base
&creation_block
)
Users::UpdateService.new(user).execute(validate: false)
Users::UpdateService.new(user, user: user).execute(validate: false)
user
ensure
Gitlab::ExclusiveLease.cancel(lease_key, uuid)
......
class UserCustomAttribute < ActiveRecord::Base
belongs_to :user
validates :user_id, :key, :value, presence: true
validates :key, uniqueness: { scope: [:user_id] }
end
......@@ -47,4 +47,9 @@ class GlobalPolicy < BasePolicy
rule { ~(anonymous & restricted_public_level) }.policy do
enable :read_users_list
end
rule { admin }.policy do
enable :read_custom_attribute
enable :update_custom_attribute
end
end
......@@ -2,110 +2,55 @@ module Ci
class CreatePipelineService < BaseService
attr_reader :pipeline
def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil)
SEQUENCE = [Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
Gitlab::Ci::Pipeline::Chain::Validate::Repository,
Gitlab::Ci::Pipeline::Chain::Validate::Config,
Gitlab::Ci::Pipeline::Chain::Skip,
Gitlab::Ci::Pipeline::Chain::Create].freeze
def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, &block)
@pipeline = Ci::Pipeline.new(
source: source,
project: project,
ref: ref,
sha: sha,
before_sha: before_sha,
tag: tag?,
tag: tag_exists?,
trigger_requests: Array(trigger_request),
user: current_user,
pipeline_schedule: schedule,
protected: project.protected_for?(ref)
)
result = validate_project_and_git_items ||
validate_pipeline(ignore_skip_ci: ignore_skip_ci,
save_on_errors: save_on_errors)
command = OpenStruct.new(ignore_skip_ci: ignore_skip_ci,
save_incompleted: save_on_errors,
seeds_block: block,
project: project,
current_user: current_user)
return result if result
sequence = Gitlab::Ci::Pipeline::Chain::Sequence
.new(pipeline, command, SEQUENCE)
begin
Ci::Pipeline.transaction do
pipeline.save!
sequence.build! do |pipeline, sequence|
update_merge_requests_head_pipeline if pipeline.persisted?
yield(pipeline) if block_given?
if sequence.complete?
cancel_pending_pipelines if project.auto_cancel_pending_pipelines?
pipeline_created_counter.increment(source: source)
Ci::CreatePipelineStagesService
.new(project, current_user)
.execute(pipeline)
pipeline.process!
end
rescue ActiveRecord::RecordInvalid => e
return error("Failed to persist the pipeline: #{e}")
end
update_merge_requests_head_pipeline
cancel_pending_pipelines if project.auto_cancel_pending_pipelines?
pipeline_created_counter.increment(source: source)
pipeline.tap(&:process!)
end
private
def validate_project_and_git_items
unless project.builds_enabled?
return error('Pipeline is disabled')
end
unless allowed_to_trigger_pipeline?
if can?(current_user, :create_pipeline, project)
return error("Insufficient permissions for protected ref '#{ref}'")
else
return error('Insufficient permissions to create a new pipeline')
end
end
unless branch? || tag?
return error('Reference not found')
end
unless commit
return error('Commit not found')
end
end
def validate_pipeline(ignore_skip_ci:, save_on_errors:)
unless pipeline.config_processor
unless pipeline.ci_yaml_file
return error("Missing #{pipeline.ci_yaml_file_path} file")
end
return error(pipeline.yaml_errors, save: save_on_errors)
end
if !ignore_skip_ci && skip_ci?
pipeline.skip if save_on_errors
return pipeline
end
unless pipeline.has_stage_seeds?
return error('No stages / jobs for this pipeline.')
end
end
def allowed_to_trigger_pipeline?
if current_user
allowed_to_create?
else # legacy triggers don't have a corresponding user
!project.protected_for?(ref)
end
def commit
@commit ||= project.commit(origin_sha || origin_ref)
end
def allowed_to_create?
return unless can?(current_user, :create_pipeline, project)
access = Gitlab::UserAccess.new(current_user, project: project)
if branch?
access.can_update_branch?(ref)
elsif tag?
access.can_create_tag?(ref)
else
true # Allow it for now and we'll reject when we check ref existence
end
def sha
commit.try(:id)
end
def update_merge_requests_head_pipeline
......@@ -115,11 +60,6 @@ module Ci
.update_all(head_pipeline_id: @pipeline.id)
end
def skip_ci?
return false unless pipeline.git_commit_message
pipeline.git_commit_message =~ /\[(ci[ _-]skip|skip[ _-]ci)\]/i
end
def cancel_pending_pipelines
Gitlab::OptimisticLocking.retry_lock(auto_cancelable_pipelines) do |cancelables|
cancelables.find_each do |cancelable|
......@@ -136,14 +76,6 @@ module Ci
.created_or_pending
end
def commit
@commit ||= project.commit(origin_sha || origin_ref)
end
def sha
commit.try(:id)
end
def before_sha
params[:checkout_sha] || params[:before] || Gitlab::Git::BLANK_SHA
end
......@@ -156,41 +88,17 @@ module Ci
params[:ref]
end
def branch?
return @is_branch if defined?(@is_branch)
@is_branch =
project.repository.ref_exists?(Gitlab::Git::BRANCH_REF_PREFIX + ref)
end
def tag?
return @is_tag if defined?(@is_tag)
@is_tag =
project.repository.ref_exists?(Gitlab::Git::TAG_REF_PREFIX + ref)
def tag_exists?
project.repository.tag_exists?(ref)
end
def ref
@ref ||= Gitlab::Git.ref_name(origin_ref)
end
def valid_sha?
origin_sha && origin_sha != Gitlab::Git::BLANK_SHA
end
def error(message, save: false)
pipeline.tap do
pipeline.errors.add(:base, message)
if save
pipeline.drop
update_merge_requests_head_pipeline
end
end
end
def pipeline_created_counter
@pipeline_created_counter ||= Gitlab::Metrics.counter(:pipelines_created_total, "Counter of pipelines created")
@pipeline_created_counter ||= Gitlab::Metrics
.counter(:pipelines_created_total, "Counter of pipelines created")
end
end
end
module Emails
class BaseService
def initialize(user, opts)
@user = user
def initialize(current_user, opts)
@current_user = current_user
@user = opts.delete(:user)
@email = opts[:email]
end
end
......
module Emails
class DestroyService < ::Emails::BaseService
def execute
Email.find_by_email!(@email).destroy && update_secondary_emails!
update_secondary_emails! if Email.find_by_email!(@email).destroy
end
private
def update_secondary_emails!
result = ::Users::UpdateService.new(@user).execute do |user|
result = ::Users::UpdateService.new(@current_user, user: @user).execute do |user|
user.update_secondary_emails!
end
......
......@@ -19,13 +19,13 @@ module MergeRequests
@merge_request = merge_request
unless @merge_request.mergeable?
return log_merge_error('Merge request is not mergeable', save_message_on_model: true)
return handle_merge_error(log_message: 'Merge request is not mergeable', save_message_on_model: true)
end
@source = find_merge_source
unless @source
return log_merge_error('No source for merge', save_message_on_model: true)
return handle_merge_error(log_message: 'No source for merge', save_message_on_model: true)
end
merge_request.in_locked_state do
......@@ -36,8 +36,7 @@ module MergeRequests
end
end
rescue MergeError => e
clean_merge_jid
log_merge_error(e.message, save_message_on_model: true)
handle_merge_error(log_message: e.message, save_message_on_model: true)
end
private
......@@ -79,10 +78,16 @@ module MergeRequests
@merge_request.force_remove_source_branch? ? @merge_request.author : current_user
end
def log_merge_error(message, save_message_on_model: false)
Rails.logger.error("MergeService ERROR: #{merge_request_info} - #{message}")
# Logs merge error message and cleans `MergeRequest#merge_jid`.
#
def handle_merge_error(log_message:, save_message_on_model: false)
Rails.logger.error("MergeService ERROR: #{merge_request_info} - #{log_message}")
@merge_request.update(merge_error: message) if save_message_on_model
if save_message_on_model
@merge_request.update(merge_error: log_message, merge_jid: nil)
else
clean_merge_jid
end
end
def merge_request_info
......
......@@ -14,6 +14,7 @@ module MergeRequests
notification_service.merge_mr(merge_request, current_user)
execute_hooks(merge_request, 'merge')
invalidate_cache_counts(merge_request, users: merge_request.assignees)
merge_request.update_project_counter_caches
end
private
......
module Projects
class HashedStorageMigrationService < BaseService
include Gitlab::ShellAdapter
attr_reader :old_disk_path, :new_disk_path
def initialize(project, logger = nil)
@project = project
@logger ||= Rails.logger
end
def execute
return if project.hashed_storage?
@old_disk_path = project.disk_path
has_wiki = project.wiki.repository_exists?
project.storage_version = Storage::HashedProject::STORAGE_VERSION
project.ensure_storage_path_exists
@new_disk_path = project.disk_path
result = move_repository(@old_disk_path, @new_disk_path)
if has_wiki
result &&= move_repository("#{@old_disk_path}.wiki", "#{@new_disk_path}.wiki")
end
unless result
rollback_folder_move
return
end
project.repository_read_only = false
project.save!
block_given? ? yield : result
end
private
def move_repository(from_name, to_name)
from_exists = gitlab_shell.exists?(project.repository_storage_path, "#{from_name}.git")
to_exists = gitlab_shell.exists?(project.repository_storage_path, "#{to_name}.git")
# If we don't find the repository on either original or target we should log that as it could be an issue if the
# project was not originally empty.
if !from_exists && !to_exists
logger.warn "Can't find a repository on either source or target paths for #{project.full_path} (ID=#{project.id}) ..."
return false
elsif !from_exists
# Repository have been moved already.
return true
end
gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
end
def rollback_folder_move
move_repository(@new_disk_path, @old_disk_path)
move_repository("#{@new_disk_path}.wiki", "#{@old_disk_path}.wiki")
end
def logger
@logger
end
end
end
......@@ -2,22 +2,21 @@ module Users
class UpdateService < BaseService
include NewUserNotifier
def initialize(user, params = {})
@user = user
def initialize(current_user, params = {})
@current_user = current_user
@user = params.delete(:user)
@params = params.dup
end
def execute(validate: true, &block)
yield(@user) if block_given?
assign_attributes(&block)
user_exists = @user.persisted?
if @user.save(validate: validate)
notify_new_user(@user, nil) unless user_exists
assign_attributes(&block)
success
if @user.save(validate: validate)
notify_success(user_exists)
else
error(@user.errors.full_messages.uniq.join('. '))
end
......@@ -33,6 +32,12 @@ module Users
private
def notify_success(user_exists)
notify_new_user(@user, nil) unless user_exists
success
end
def assign_attributes(&block)
if @user.user_synced_attributes_metadata
params.except!(*@user.user_synced_attributes_metadata.read_only_attributes)
......
- confirmation_link = confirmation_url(@resource, confirmation_token: @token)
- if @resource.unconfirmed_email.present?
#content
= email_default_heading(@resource.unconfirmed_email)
%p Click the link below to confirm your email address.
#cta
= link_to 'Confirm your email address', confirmation_url(@resource, confirmation_token: @token)
= link_to confirmation_link, confirmation_link
- else
#content
- if Gitlab.com?
......@@ -12,4 +13,4 @@
= email_default_heading("Welcome, #{@resource.name}!")
%p To get started, click the link below to confirm your account.
#cta
= link_to 'Confirm your account', confirmation_url(@resource, confirmation_token: @token)
= link_to confirmation_link, confirmation_link
......@@ -3,7 +3,7 @@
= render "projects/jobs/header", show_controls: false
- add_to_breadcrumbs(_('Jobs'), project_jobs_path(@project))
- add_to_breadcrumbs(s_('CICD|Jobs'), project_jobs_path(@project))
- add_to_breadcrumbs("##{@build.id}", project_jobs_path(@project))
.tree-holder
......
......@@ -6,10 +6,10 @@
.svg-container
= custom_icon('icon_autodevops')
.user-callout-copy
%h4= _('Auto DevOps (Beta)')
%p= _('Auto DevOps can be activated for this project. It will automatically build, test, and deploy your application based on a predefined CI/CD configuration.')
%h4= s_('AutoDevOps|Auto DevOps (Beta)')
%p= s_('AutoDevOps|Auto DevOps can be activated for this project. It will automatically build, test, and deploy your application based on a predefined CI/CD configuration.')
%p
#{s_('AutoDevOps|Learn more in the')}
= link_to _('Auto DevOps documentation'), help_page_path('topics/autodevops/index.md'), target: '_blank', rel: 'noopener noreferrer'
- link = link_to(s_('AutoDevOps|Auto DevOps documentation'), help_page_path('topics/autodevops/index.md'), target: '_blank', rel: 'noopener noreferrer')
= s_('AutoDevOps|Learn more in the %{link_to_documentation}').html_safe % { link_to_documentation: link }
= link_to _('Enable in settings'), project_settings_ci_cd_path(@project, anchor: 'js-general-pipeline-settings'), class: 'btn btn-primary js-close-callout'
= link_to s_('AutoDevOps|Enable in settings'), project_settings_ci_cd_path(@project, anchor: 'js-general-pipeline-settings'), class: 'btn btn-primary js-close-callout'
......@@ -3,9 +3,9 @@
- button_method = issuable_close_reopen_button_method(issuable)
- if can_update && is_current_user
= link_to "Close #{display_issuable_type}", close_issuable_url(issuable), method: button_method,
= link_to "Close #{display_issuable_type}", close_issuable_path(issuable), method: button_method,
class: "hidden-xs hidden-sm btn btn-grouped btn-close js-btn-issue-action #{issuable_button_visibility(issuable, true)}", title: "Close #{display_issuable_type}"
= link_to "Reopen #{display_issuable_type}", reopen_issuable_url(issuable), method: button_method,
= link_to "Reopen #{display_issuable_type}", reopen_issuable_path(issuable), method: button_method,
class: "hidden-xs hidden-sm btn btn-grouped btn-reopen js-btn-issue-action #{issuable_button_visibility(issuable, false)}", title: "Reopen #{display_issuable_type}"
- elsif can_update && !is_current_user
= render 'shared/issuable/close_reopen_report_toggle', issuable: issuable
......
......@@ -7,7 +7,7 @@
- button_method = issuable_close_reopen_button_method(issuable)
.pull-left.btn-group.prepend-left-10.issuable-close-dropdown.droplab-dropdown.js-issuable-close-dropdown
= link_to "#{display_button_action} #{display_issuable_type}", close_reopen_issuable_url(issuable),
= link_to "#{display_button_action} #{display_issuable_type}", close_reopen_issuable_path(issuable),
method: button_method, class: "#{button_class} btn-#{button_action}", title: "#{display_button_action} #{display_issuable_type}"
= button_tag type: 'button', class: "#{toggle_class} btn-#{button_action}-color",
......@@ -16,7 +16,7 @@
%ul#issuable-close-menu.js-issuable-close-menu.dropdown-menu{ class: button_responsive_class, data: { dropdown: true } }
%li.close-item{ class: "#{issuable_button_visibility(issuable, true) || 'droplab-item-selected'}",
data: { text: "Close #{display_issuable_type}", url: close_issuable_url(issuable),
data: { text: "Close #{display_issuable_type}", url: close_issuable_path(issuable),
button_class: "#{button_class} btn-close", toggle_class: "#{toggle_class} btn-close-color", method: button_method } }
%button.btn.btn-transparent
= icon('check', class: 'icon')
......@@ -26,7 +26,7 @@
= display_issuable_type
%li.reopen-item{ class: "#{issuable_button_visibility(issuable, false) || 'droplab-item-selected'}",
data: { text: "Reopen #{display_issuable_type}", url: reopen_issuable_url(issuable),
data: { text: "Reopen #{display_issuable_type}", url: reopen_issuable_path(issuable),
button_class: "#{button_class} btn-reopen", toggle_class: "#{toggle_class} btn-reopen-color", method: button_method } }
%button.btn.btn-transparent
= icon('check', class: 'icon')
......
class ProjectMigrateHashedStorageWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
def perform(project_id)
project = Project.find_by(id: project_id)
return if project.nil? || project.pending_delete?
::Projects::HashedStorageMigrationService.new(project, logger).execute
end
end
class StorageMigratorWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
BATCH_SIZE = 100
def perform(start, finish)
projects = build_relation(start, finish)
projects.with_route.find_each(batch_size: BATCH_SIZE) do |project|
Rails.logger.info "Starting storage migration of #{project.full_path} (ID=#{project.id})..."
begin
project.migrate_to_hashed_storage!
rescue => err
Rails.logger.error("#{err.message} migrating storage of #{project.full_path} (ID=#{project.id}), trace - #{err.backtrace}")
end
end
end
def build_relation(start, finish)
relation = Project
table = Project.arel_table
relation = relation.where(table[:id].gteq(start)) if start
relation = relation.where(table[:id].lteq(finish)) if finish
relation
end
end
---
title: Confirmation email shows link as text instead of human readable text
merge_request: 14243
author: bitsapien
type: changed
---
title: Improves i18n for Auto Devops callout
merge_request:
author:
type: other
---
title: find_user Users helper method no longer overrides find_user API helper method.
merge_request: 14418
author:
type: fixed
---
title: Notes will not show an empty bubble when the author isn't a member.
merge_request: 14450
author:
type: fixed
---
title: Some checks in `rake gitlab:check` were failling with 'undefined method `run_command`'
merge_request: 14469
author:
type: fixed
---
title: Fix 500 error on merged merge requests when GitLab is restored from a backup
merge_request:
author:
type: fixed
---
title: Adjust MRs being stuck on "process of being merged" for more than 2 hours
merge_request:
author:
type: fixed
---
title: breadcrumbs receives padding when double lined
merge_request:
author:
type: changed
---
title: Fix CSRF validation issue when closing/opening merge requests from the UI
merge_request: 14555
author:
type: fixed
---
title: Re-allow `name` attribute on user-provided anchor HTML
title: Fixed commenting on side-by-side commit diff
merge_request:
author:
type: fixed
---
title: Make sure API responds with 401 when invalid authentication info is provided
merge_request:
author:
type: fixed
---
title: Clarify artifact download via the API only accepts branch or tag name for ref
merge_request:
author:
type: other
---
title: Change recommended MySQL version to 5.6
merge_request:
author:
type: other
---
title: Support custom attributes on users
merge_request: 13038
author: Markus Koller
---
title: Make locked setting of Runner to not affect jobs scheduling
merge_request: 14483
author:
type: fixed
---
title: Fix merge request counter updates after merge
merge_request:
author:
type: fixed
---
title: Script to migrate project's repositories to new Hashed Storage
merge_request: 14067
author:
type: added
---
title: Fixed breadcrumbs container expanding in side-by-side diff view
merge_request:
author:
type: fixed
---
title: Fix profile image orientation based on EXIF data gvieira37
merge_request: 14461
author: gvieira37
type: fixed
---
title: Gitaly RepositoryExists remains opt-in for all method calls
merge_request:
author:
type: fixed
......@@ -62,3 +62,5 @@
- [update_user_activity, 1]
- [propagate_service_template, 1]
- [background_migration, 1]
- [project_migrate_hashed_storage, 1]
- [storage_migrator, 1]
class AddRepositoryReadOnlyToProjects < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :projects, :repository_read_only, :boolean
end
end
class CreateUserCustomAttributes < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table :user_custom_attributes do |t|
t.timestamps_with_timezone null: false
t.references :user, null: false, foreign_key: { on_delete: :cascade }
t.string :key, null: false
t.string :value, null: false
t.index [:user_id, :key], unique: true
t.index [:key, :value]
end
end
end
......@@ -1215,6 +1215,7 @@ ActiveRecord::Schema.define(version: 20170921115009) do
t.datetime "last_repository_updated_at"
t.integer "storage_version", limit: 2
t.boolean "resolve_outdated_diff_discussions"
t.boolean "repository_read_only"
t.boolean "merge_requests_ff_only_enabled", default: false
end
......@@ -1535,6 +1536,17 @@ ActiveRecord::Schema.define(version: 20170921115009) do
add_index "user_agent_details", ["subject_id", "subject_type"], name: "index_user_agent_details_on_subject_id_and_subject_type", using: :btree
create_table "user_custom_attributes", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "user_id", null: false
t.string "key", null: false
t.string "value", null: false
end
add_index "user_custom_attributes", ["key", "value"], name: "index_user_custom_attributes_on_key_and_value", using: :btree
add_index "user_custom_attributes", ["user_id", "key"], name: "index_user_custom_attributes_on_user_id_and_key", unique: true, using: :btree
create_table "user_synced_attributes_metadata", force: :cascade do |t|
t.boolean "name_synced", default: false
t.boolean "email_synced", default: false
......@@ -1761,6 +1773,7 @@ ActiveRecord::Schema.define(version: 20170921115009) do
add_foreign_key "todos", "projects", name: "fk_45054f9c45", on_delete: :cascade
add_foreign_key "trending_projects", "projects", on_delete: :cascade
add_foreign_key "u2f_registrations", "users"
add_foreign_key "user_custom_attributes", "users", on_delete: :cascade
add_foreign_key "user_synced_attributes_metadata", "users", on_delete: :cascade
add_foreign_key "users_star_projects", "projects", name: "fk_22cd27ddfc", on_delete: :cascade
add_foreign_key "web_hook_logs", "web_hooks", on_delete: :cascade
......
# Repository Storage Rake Tasks
This is a collection of rake tasks you can use to help you list and migrate
existing projects from Legacy storage to the new Hashed storage type.
You can read more about the storage types [here][storage-types].
## List projects on Legacy storage
To have a simple summary of projects using **Legacy** storage:
**Omnibus Installation**
```bash
gitlab-rake gitlab:storage:legacy_projects
```
**Source Installation**
```bash
rake gitlab:storage:legacy_projects
```
------
To list projects using **Legacy** storage:
**Omnibus Installation**
```bash
gitlab-rake gitlab:storage:list_legacy_projects
```
**Source Installation**
```bash
rake gitlab:storage:list_legacy_projects
```
## List projects on Hashed storage
To have a simple summary of projects using **Hashed** storage:
**Omnibus Installation**
```bash
gitlab-rake gitlab:storage:hashed_projects
```
**Source Installation**
```bash
rake gitlab:storage:hashed_projects
```
------
To list projects using **Hashed** storage:
**Omnibus Installation**
```bash
gitlab-rake gitlab:storage:list_hashed_projects
```
**Source Installation**
```bash
rake gitlab:storage:list_hashed_projects
```
## Migrate existing projects to Hashed storage
Before migrating your existing projects, you should
[enable hashed storage][storage-migration] for the new projects as well.
This task will schedule all your existing projects to be migrated to the
**Hashed** storage type:
**Omnibus Installation**
```bash
gitlab-rake gitlab:storage:migrate_to_hashed
```
**Source Installation**
```bash
rake gitlab:storage:migrate_to_hashed
```
You can monitor the progress in the _Admin > Monitoring > Background jobs_ screen.
There is a specific Queue you can watch to see how long it will take to finish: **project_migrate_hashed_storage**
After it reaches zero, you can confirm every project has been migrated by running the commands above.
If you find it necessary, you can run this migration script again to schedule missing projects.
Any error or warning will be logged in the sidekiq log file.
[storage-types]: ../repository_storage_types.md
[storage-migration]: ../repository_storage_types.md#how-to-migrate-to-hashed-storage
# Repository Storage Types
> [Introduced][ce-28283] in GitLab 10.0.
## Legacy Storage
Legacy Storage is the storage behavior prior to version 10.0. For historical reasons, GitLab replicated the same
mapping structure from the projects URLs:
* Project's repository: `#{namespace}/#{project_name}.git`
* Project's wiki: `#{namespace}/#{project_name}.wiki.git`
This structure made simple to migrate from existing solutions to GitLab and easy for Administrators to find where the
repository is stored.
On the other hand this has some drawbacks:
Storage location will concentrate huge amount of top-level namespaces. The impact can be reduced by the introduction of [multiple storage paths][storage-paths].
Because Backups are a snapshot of the same URL mapping, if you try to recover a very old backup, you need to verify
if any project has taken the place of an old removed project sharing the same URL. This means that `mygroup/myproject`
from your backup may not be the same original project that is today in the same URL.
Any change in the URL will need to be reflected on disk (when groups / users or projects are renamed). This can add a lot
of load in big installations, and can be even worst if they are using any type of network based filesystem.
Last, for GitLab Geo, this storage type means we have to synchronize the disk state, replicate renames in the correct
order or we may end-up with wrong repository or missing data temporarily.
## Hashed Storage
Hashed Storage is the new storage behavior we are rolling out with 10.0. It's not enabled by default yet, but we
encourage everyone to try-it and take the time to fix any script you may have that depends on the old behavior.
Instead of coupling project URL and the folder structure where the repository will be stored on disk, we are coupling
a hash, based on the project's ID.
This makes the folder structure immutable, and therefore eliminates any requirement to synchronize state from URLs to
disk structure. This means that renaming a group, user or project will cost only the database transaction, and will take
effect immediately.
The hash also helps to spread the repositories more evenly on the disk, so the top-level directory will contain less
folders than the total amount of top-level namespaces.
Hash format is based on hexadecimal representation of SHA256: `SHA256(project.id)`.
Top-level folder uses first 2 characters, followed by another folder with the next 2 characters. They are both stored in
a special folder `@hashed`, to co-exist with existing Legacy projects:
```ruby
# Project's repository:
"@hashed/#{hash[0..1]}/#{hash[2..3]}/#{hash}.git"
# Wiki's repository:
"@hashed/#{hash[0..1]}/#{hash[2..3]}/#{hash}.wiki.git"
```
This new format also makes possible to restore backups with confidence, as when restoring a repository from the backup,
you will never mistakenly restore a repository in the wrong project (considering the backup is made after the migration).
### How to migrate to Hashed Storage
In GitLab, go to **Admin > Settings**, find the **Repository Storage** section and select
"_Create new projects using hashed storage paths_".
To migrate your existing projects to the new storage type, check the specific [rake tasks].
[ce-28283]: https://gitlab.com/gitlab-org/gitlab-ce/issues/28283
[rake tasks]: raketasks/storage.md#migrate-existing-projects-to-hashed-storage
[storage-paths]: repository_storage_types.md
......@@ -14,6 +14,7 @@ following locations:
- [Project-level Variables](project_level_variables.md)
- [Group-level Variables](group_level_variables.md)
- [Commits](commits.md)
- [Custom Attributes](custom_attributes.md)
- [Deployments](deployments.md)
- [Deploy Keys](deploy_keys.md)
- [Environments](environments.md)
......
# Custom Attributes API
Every API call to custom attributes must be authenticated as administrator.
## List custom attributes
Get all custom attributes on a user.
```
GET /users/:id/custom_attributes
```
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer | yes | The ID of a user |
```bash
curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/users/42/custom_attributes
```
Example response:
```json
[
{
"key": "location",
"value": "Antarctica"
},
{
"key": "role",
"value": "Developer"
}
]
```
## Single custom attribute
Get a single custom attribute on a user.
```
GET /users/:id/custom_attributes/:key
```
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer | yes | The ID of a user |
| `key` | string | yes | The key of the custom attribute |
```bash
curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/users/42/custom_attributes/location
```
Example response:
```json
{
"key": "location",
"value": "Antarctica"
}
```
## Set custom attribute
Set a custom attribute on a user. The attribute will be updated if it already exists,
or newly created otherwise.
```
PUT /users/:id/custom_attributes/:key
```
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer | yes | The ID of a user |
| `key` | string | yes | The key of the custom attribute |
| `value` | string | yes | The value of the custom attribute |
```bash
curl --request PUT --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" --data "value=Greenland" https://gitlab.example.com/api/v4/users/42/custom_attributes/location
```
Example response:
```json
{
"key": "location",
"value": "Greenland"
}
```
## Delete custom attribute
Delete a custom attribute on a user.
```
DELETE /users/:id/custom_attributes/:key
```
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer | yes | The ID of a user |
| `key` | string | yes | The key of the custom attribute |
```bash
curl --request DELETE --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/users/42/custom_attributes/location
```
......@@ -336,7 +336,7 @@ Parameters
| Attribute | Type | Required | Description |
|-------------|---------|----------|-------------------------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `ref_name` | string | yes | The ref from a repository |
| `ref_name` | string | yes | The ref from a repository (can only be branch or tag name, not HEAD or SHA) |
| `job` | string | yes | The name of the job |
Example request:
......
......@@ -154,6 +154,12 @@ You can search users by creation date time range with:
GET /users?created_before=2001-01-02T00:00:00.060Z&created_after=1999-01-02T00:00:00.060
```
You can filter by [custom attributes](custom_attributes.md) with:
```
GET /users?custom_attributes[key]=value&custom_attributes[other_key]=other_value
```
## Single user
Get a single user.
......
......@@ -250,6 +250,8 @@ By default, when using `docker:dind`, Docker uses the `vfs` storage driver which
copies the filesystem on every run. This is a very disk-intensive operation
which can be avoided if a different driver is used, for example `overlay2`.
### Requirements
1. Make sure a recent kernel is used, preferably `>= 4.2`.
1. Check whether the `overlay` module is loaded:
......@@ -271,14 +273,27 @@ which can be avoided if a different driver is used, for example `overlay2`.
overlay
```
1. Use the driver by defining a variable at the top of your `.gitlab-ci.yml`:
### Use driver per project
```
variables:
DOCKER_DRIVER: overlay2
```
> **Note:**
You can enable the driver for each project individually by editing the project's `.gitlab-ci.yml`:
```
variables:
DOCKER_DRIVER: overlay2
```
### Use driver for every project
To enable the driver for every project, you can set the environment variable for every build by adding `environment` in the `[[runners]]` section of `config.toml`:
```toml
environment = ["DOCKER_DRIVER=overlay2"]
```
If you're running multiple Runners you will have to modify all configuration files.
> **Notes:**
- More information about the Runner configuration is available in the [Runner documentation](https://docs.gitlab.com/runner/configuration/).
- For more information about using OverlayFS with Docker, you can read
[Use the OverlayFS storage driver](https://docs.docker.com/engine/userguide/storagedriver/overlayfs-driver/).
......
......@@ -1570,6 +1570,11 @@ Read more on [GitLab Pages user documentation](../../user/project/pages/index.md
Each instance of GitLab CI has an embedded debug tool called Lint.
You can find the link under `/ci/lint` of your gitlab instance.
## Using reserved keywords
If you get validation error when using specific values (e.g., `true` or `false`),
try to quote them, or change them to a different form (e.g., `/bin/true`).
## Skipping jobs
If your commit message contains `[ci skip]` or `[skip ci]`, using any
......
......@@ -44,6 +44,7 @@
- [Building a package for testing purposes](build_test_package.md)
- [Manage feature flags](feature_flags.md)
- [View sent emails or preview mailers](emails.md)
- [Working with Gitaly](gitaly.md)
## Databases
......
......@@ -29,34 +29,6 @@ For our currently-supported browsers, see our [requirements][requirements].
## Development Process
When you are assigned an issue please follow the next steps:
### Divide a big feature into small Merge Requests
1. Big Merge Request are painful to review. In order to make this process easier we
must break a big feature into smaller ones and create a Merge Request for each step.
1. First step is to create a branch from `master`, let's call it `new-feature`. This branch
will be the recipient of all the smaller Merge Requests. Only this one will be merged to master.
1. Don't do any work on this one, let's keep it synced with master.
1. Create a new branch from `new-feature`, let's call it `new-feature-step-1`. We advise you
to clearly identify which step the branch represents.
1. Do the first part of the modifications in this branch. The target branch of this Merge Request
should be `new-feature`.
1. Once `new-feature-step-1` gets merged into `new-feature` we can continue our work. Create a new
branch from `new-feature`, let's call it `new-feature-step-2` and repeat the process done before.
```shell
master
└─ new-feature
├─ new-feature-step-1
├─ new-feature-step-2
└─ new-feature-step-3
```
**Tips**
- Make sure `new-feature` branch is always synced with `master`: merge master frequently.
- Do the same for the feature branch you have opened. This can be accomplished by merging `master` into `new-feature` and `new-feature` into `new-feature-step-*`
- Avoid rewriting history.
### Share your work early
1. Before writing code guarantee your vision of the architecture is aligned with
GitLab's architecture.
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment