Commit b6ea6f31 authored by Simon Knox's avatar Simon Knox

Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ee into ee-psimyn-issue-note-refac

parents bf4322ef 3d20a428
...@@ -604,7 +604,7 @@ codequality: ...@@ -604,7 +604,7 @@ codequality:
script: script:
- cp .rubocop.yml .rubocop.yml.bak - cp .rubocop.yml .rubocop.yml.bak
- grep -v "rubocop-gitlab-security" .rubocop.yml.bak > .rubocop.yml - grep -v "rubocop-gitlab-security" .rubocop.yml.bak > .rubocop.yml
- docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 analyze -f json > raw_codeclimate.json - docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate analyze -f json > raw_codeclimate.json
- cat raw_codeclimate.json | docker run -i stedolan/jq -c 'map({check_name,fingerprint,location})' > codeclimate.json - cat raw_codeclimate.json | docker run -i stedolan/jq -c 'map({check_name,fingerprint,location})' > codeclimate.json
- mv .rubocop.yml.bak .rubocop.yml - mv .rubocop.yml.bak .rubocop.yml
artifacts: artifacts:
......
...@@ -114,7 +114,7 @@ gem 'google-api-client', '~> 0.13.6' ...@@ -114,7 +114,7 @@ gem 'google-api-client', '~> 0.13.6'
gem 'unf', '~> 0.1.4' gem 'unf', '~> 0.1.4'
# Seed data # Seed data
gem 'seed-fu', '~> 2.3.5' gem 'seed-fu', '~> 2.3.7'
# Search # Search
gem 'elasticsearch-model', '~> 0.1.9' gem 'elasticsearch-model', '~> 0.1.9'
...@@ -295,7 +295,7 @@ group :metrics do ...@@ -295,7 +295,7 @@ group :metrics do
gem 'influxdb', '~> 0.2', require: false gem 'influxdb', '~> 0.2', require: false
# Prometheus # Prometheus
gem 'prometheus-client-mmap', '~> 0.7.0.beta37' gem 'prometheus-client-mmap', '~> 0.7.0.beta39'
gem 'raindrops', '~> 0.18' gem 'raindrops', '~> 0.18'
end end
......
...@@ -654,7 +654,7 @@ GEM ...@@ -654,7 +654,7 @@ GEM
parser parser
unparser unparser
procto (0.0.3) procto (0.0.3)
prometheus-client-mmap (0.7.0.beta37) prometheus-client-mmap (0.7.0.beta39)
mmap2 (~> 2.2, >= 2.2.9) mmap2 (~> 2.2, >= 2.2.9)
pry (0.10.4) pry (0.10.4)
coderay (~> 1.1.0) coderay (~> 1.1.0)
...@@ -844,7 +844,7 @@ GEM ...@@ -844,7 +844,7 @@ GEM
rake (>= 0.9, < 13) rake (>= 0.9, < 13)
sass (~> 3.4.20) sass (~> 3.4.20)
securecompare (1.0.0) securecompare (1.0.0)
seed-fu (2.3.6) seed-fu (2.3.7)
activerecord (>= 3.1) activerecord (>= 3.1)
activesupport (>= 3.1) activesupport (>= 3.1)
select2-rails (3.5.9.3) select2-rails (3.5.9.3)
...@@ -1149,7 +1149,7 @@ DEPENDENCIES ...@@ -1149,7 +1149,7 @@ DEPENDENCIES
peek-sidekiq (~> 1.0.3) peek-sidekiq (~> 1.0.3)
pg (~> 0.18.2) pg (~> 0.18.2)
premailer-rails (~> 1.9.7) premailer-rails (~> 1.9.7)
prometheus-client-mmap (~> 0.7.0.beta37) prometheus-client-mmap (~> 0.7.0.beta39)
pry-byebug (~> 3.4.1) pry-byebug (~> 3.4.1)
pry-rails (~> 0.3.4) pry-rails (~> 0.3.4)
rack-attack (~> 4.4.1) rack-attack (~> 4.4.1)
......
...@@ -16,6 +16,10 @@ export default { ...@@ -16,6 +16,10 @@ export default {
required: true, required: true,
type: String, type: String,
}, },
updateEndpoint: {
required: true,
type: String,
},
canUpdate: { canUpdate: {
required: true, required: true,
type: Boolean, type: Boolean,
...@@ -262,6 +266,8 @@ export default { ...@@ -262,6 +266,8 @@ export default {
:description-text="state.descriptionText" :description-text="state.descriptionText"
:updated-at="state.updatedAt" :updated-at="state.updatedAt"
:task-status="state.taskStatus" :task-status="state.taskStatus"
:issuable-type="issuableType"
:update-url="updateEndpoint"
/> />
<edited-component <edited-component
v-if="hasUpdated" v-if="hasUpdated"
......
...@@ -22,6 +22,16 @@ ...@@ -22,6 +22,16 @@
required: false, required: false,
default: '', default: '',
}, },
issuableType: {
type: String,
required: false,
default: 'issue',
},
updateUrl: {
type: String,
required: false,
default: null,
},
}, },
data() { data() {
return { return {
...@@ -48,7 +58,7 @@ ...@@ -48,7 +58,7 @@
if (this.canUpdate) { if (this.canUpdate) {
// eslint-disable-next-line no-new // eslint-disable-next-line no-new
new TaskList({ new TaskList({
dataType: 'issue', dataType: this.issuableType,
fieldName: 'description', fieldName: 'description',
selector: '.detail-page-description', selector: '.detail-page-description',
}); });
...@@ -95,7 +105,9 @@ ...@@ -95,7 +105,9 @@
<textarea <textarea
class="hidden js-task-list-field" class="hidden js-task-list-field"
v-if="descriptionText" v-if="descriptionText"
v-model="descriptionText"> v-model="descriptionText"
:data-update-url="updateUrl"
>
</textarea> </textarea>
</div> </div>
</template> </template>
...@@ -56,9 +56,11 @@ export const slugify = str => str.trim().toLowerCase(); ...@@ -56,9 +56,11 @@ export const slugify = str => str.trim().toLowerCase();
export const truncate = (string, maxLength) => `${string.substr(0, (maxLength - 3))}...`; export const truncate = (string, maxLength) => `${string.substr(0, (maxLength - 3))}...`;
/** /**
* Capitalizes first character. * Capitalizes first character
* *
* @param {String} text * @param {String} text
* @returns {String} * @return {String}
*/ */
export const capitalizeFirstCharacter = text => `${text[0].toUpperCase()}${text.slice(1)}`; export function capitalizeFirstCharacter(text) {
return `${text[0].toUpperCase()}${text.slice(1)}`;
}
...@@ -12,6 +12,9 @@ ...@@ -12,6 +12,9 @@
/> />
*/ */
// only allow classes in images.scss e.g. s12
const validSizes = [8, 12, 16, 18, 24, 32, 48, 72];
export default { export default {
props: { props: {
name: { name: {
...@@ -23,6 +26,9 @@ ...@@ -23,6 +26,9 @@
type: Number, type: Number,
required: false, required: false,
default: 16, default: 16,
validator(value) {
return validSizes.includes(value);
},
}, },
cssClasses: { cssClasses: {
...@@ -42,6 +48,7 @@ ...@@ -42,6 +48,7 @@
}, },
}; };
</script> </script>
<template> <template>
<svg <svg
:class="[iconSizeClass, cssClasses]"> :class="[iconSizeClass, cssClasses]">
......
...@@ -292,6 +292,8 @@ ...@@ -292,6 +292,8 @@
.gutter-toggle { .gutter-toggle {
margin-top: 7px; margin-top: 7px;
border-left: 1px solid $border-gray-normal; border-left: 1px solid $border-gray-normal;
padding-left: 0;
text-align: center;
} }
.title .gutter-toggle { .title .gutter-toggle {
......
...@@ -54,7 +54,7 @@ module IssuableActions ...@@ -54,7 +54,7 @@ module IssuableActions
end end
def destroy def destroy
issuable.destroy Issuable::DestroyService.new(issuable.project, current_user).execute(issuable)
TodoService.new.destroy_issuable(issuable, current_user) TodoService.new.destroy_issuable(issuable, current_user)
name = issuable.human_class_name name = issuable.human_class_name
......
...@@ -45,8 +45,7 @@ class Projects::CommitsController < Projects::ApplicationController ...@@ -45,8 +45,7 @@ class Projects::CommitsController < Projects::ApplicationController
private private
def set_commits def set_commits
render_404 unless request.format == :atom || @repository.blob_at(@commit.id, @path) || @repository.tree(@commit.id, @path).entries.present? render_404 unless @path.empty? || request.format == :atom || @repository.blob_at(@commit.id, @path) || @repository.tree(@commit.id, @path).entries.present?
@limit, @offset = (params[:limit] || 40).to_i, (params[:offset] || 0).to_i @limit, @offset = (params[:limit] || 40).to_i, (params[:offset] || 0).to_i
search = params[:search] search = params[:search]
......
class RunnerJobsFinder
attr_reader :runner, :params
def initialize(runner, params = {})
@runner = runner
@params = params
end
def execute
items = @runner.builds
items = by_status(items)
items
end
private
def by_status(items)
return items unless HasStatus::AVAILABLE_STATUSES.include?(params[:status])
items.where(status: params[:status])
end
end
...@@ -178,6 +178,9 @@ module ApplicationSettingsHelper ...@@ -178,6 +178,9 @@ module ApplicationSettingsHelper
:ed25519_key_restriction, :ed25519_key_restriction,
:email_author_in_body, :email_author_in_body,
:enabled_git_access_protocol, :enabled_git_access_protocol,
:gitaly_timeout_default,
:gitaly_timeout_medium,
:gitaly_timeout_fast,
:gravatar_enabled, :gravatar_enabled,
:hashed_storage_enabled, :hashed_storage_enabled,
:help_page_hide_commercial_content, :help_page_hide_commercial_content,
......
...@@ -213,6 +213,7 @@ module IssuablesHelper ...@@ -213,6 +213,7 @@ module IssuablesHelper
def issuable_initial_data(issuable) def issuable_initial_data(issuable)
data = { data = {
endpoint: issuable_path(issuable), endpoint: issuable_path(issuable),
updateEndpoint: "#{issuable_path(issuable)}.json",
canUpdate: can?(current_user, :"update_#{issuable.to_ability_name}", issuable), canUpdate: can?(current_user, :"update_#{issuable.to_ability_name}", issuable),
canDestroy: can?(current_user, :"destroy_#{issuable.to_ability_name}", issuable), canDestroy: can?(current_user, :"destroy_#{issuable.to_ability_name}", issuable),
canAdmin: can?(current_user, :"admin_#{issuable.to_ability_name}", issuable), canAdmin: can?(current_user, :"admin_#{issuable.to_ability_name}", issuable),
......
...@@ -185,6 +185,27 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -185,6 +185,27 @@ class ApplicationSetting < ActiveRecord::Base
end end
end end
validates :gitaly_timeout_default,
presence: true,
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :gitaly_timeout_medium,
presence: true,
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :gitaly_timeout_medium,
numericality: { less_than_or_equal_to: :gitaly_timeout_default },
if: :gitaly_timeout_default
validates :gitaly_timeout_medium,
numericality: { greater_than_or_equal_to: :gitaly_timeout_fast },
if: :gitaly_timeout_fast
validates :gitaly_timeout_fast,
presence: true,
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :gitaly_timeout_fast,
numericality: { less_than_or_equal_to: :gitaly_timeout_default },
if: :gitaly_timeout_default
SUPPORTED_KEY_TYPES.each do |type| SUPPORTED_KEY_TYPES.each do |type|
validates :"#{type}_key_restriction", presence: true, key_restriction: { type: type } validates :"#{type}_key_restriction", presence: true, key_restriction: { type: type }
end end
...@@ -325,7 +346,10 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -325,7 +346,10 @@ class ApplicationSetting < ActiveRecord::Base
slack_app_enabled: false, slack_app_enabled: false,
slack_app_id: nil, slack_app_id: nil,
slack_app_secret: nil, slack_app_secret: nil,
slack_app_verification_token: nil slack_app_verification_token: nil,
gitaly_timeout_fast: 10,
gitaly_timeout_medium: 30,
gitaly_timeout_default: 55
} }
end end
......
...@@ -108,6 +108,7 @@ module Ci ...@@ -108,6 +108,7 @@ module Ci
end end
before_transition any => [:failed] do |build| before_transition any => [:failed] do |build|
next unless build.project
next if build.retries_max.zero? next if build.retries_max.zero?
if build.retries_count < build.retries_max if build.retries_count < build.retries_max
......
...@@ -4,7 +4,6 @@ module Ci ...@@ -4,7 +4,6 @@ module Ci
include HasVariable include HasVariable
include Presentable include Presentable
prepend EE::Ci::Variable prepend EE::Ci::Variable
include Presentable
belongs_to :project belongs_to :project
......
...@@ -17,6 +17,7 @@ class CommitStatus < ActiveRecord::Base ...@@ -17,6 +17,7 @@ class CommitStatus < ActiveRecord::Base
validates :name, presence: true, unless: :importing? validates :name, presence: true, unless: :importing?
alias_attribute :author, :user alias_attribute :author, :user
alias_attribute :pipeline_id, :commit_id
scope :failed_but_allowed, -> do scope :failed_but_allowed, -> do
where(allow_failure: true, status: [:failed, :canceled]) where(allow_failure: true, status: [:failed, :canceled])
...@@ -103,26 +104,29 @@ class CommitStatus < ActiveRecord::Base ...@@ -103,26 +104,29 @@ class CommitStatus < ActiveRecord::Base
end end
after_transition do |commit_status, transition| after_transition do |commit_status, transition|
next unless commit_status.project
next if transition.loopback? next if transition.loopback?
commit_status.run_after_commit do commit_status.run_after_commit do
if pipeline if pipeline_id
if complete? || manual? if complete? || manual?
PipelineProcessWorker.perform_async(pipeline.id) PipelineProcessWorker.perform_async(pipeline_id)
else else
PipelineUpdateWorker.perform_async(pipeline.id) PipelineUpdateWorker.perform_async(pipeline_id)
end end
end end
StageUpdateWorker.perform_async(commit_status.stage_id) StageUpdateWorker.perform_async(stage_id)
ExpireJobCacheWorker.perform_async(commit_status.id) ExpireJobCacheWorker.perform_async(id)
end end
end end
after_transition any => :failed do |commit_status| after_transition any => :failed do |commit_status|
next unless commit_status.project
commit_status.run_after_commit do commit_status.run_after_commit do
MergeRequests::AddTodoWhenBuildFailsService MergeRequests::AddTodoWhenBuildFailsService
.new(pipeline.project, nil).execute(self) .new(project, nil).execute(self)
end end
end end
end end
......
...@@ -16,6 +16,10 @@ module HasVariable ...@@ -16,6 +16,10 @@ module HasVariable
key: Gitlab::Application.secrets.db_key_base, key: Gitlab::Application.secrets.db_key_base,
algorithm: 'aes-256-cbc' algorithm: 'aes-256-cbc'
def key=(new_key)
super(new_key.to_s.strip)
end
def to_runner_variable def to_runner_variable
{ key: key, value: value, public: false } { key: key, value: value, public: false }
end end
......
...@@ -265,8 +265,10 @@ module Issuable ...@@ -265,8 +265,10 @@ module Issuable
participants(user).include?(user) participants(user).include?(user)
end end
def to_hook_data(user, old_labels: [], old_assignees: [], old_total_time_spent: nil) def to_hook_data(user, old_associations: {})
changes = previous_changes changes = previous_changes
old_labels = old_associations.fetch(:labels, [])
old_assignees = old_associations.fetch(:assignees, [])
if old_labels != labels if old_labels != labels
changes[:labels] = [old_labels.map(&:hook_attrs), labels.map(&:hook_attrs)] changes[:labels] = [old_labels.map(&:hook_attrs), labels.map(&:hook_attrs)]
...@@ -280,8 +282,12 @@ module Issuable ...@@ -280,8 +282,12 @@ module Issuable
end end
end end
if self.respond_to?(:total_time_spent) && old_total_time_spent != total_time_spent if self.respond_to?(:total_time_spent)
changes[:total_time_spent] = [old_total_time_spent, total_time_spent] old_total_time_spent = old_associations.fetch(:total_time_spent, nil)
if old_total_time_spent != total_time_spent
changes[:total_time_spent] = [old_total_time_spent, total_time_spent]
end
end end
Gitlab::HookData::IssuableBuilder.new(self).build(user: user, changes: changes) Gitlab::HookData::IssuableBuilder.new(self).build(user: user, changes: changes)
......
...@@ -30,6 +30,10 @@ module Geo ...@@ -30,6 +30,10 @@ module Geo
class_name: 'Geo::LfsObjectDeletedEvent', class_name: 'Geo::LfsObjectDeletedEvent',
foreign_key: :lfs_object_deleted_event_id foreign_key: :lfs_object_deleted_event_id
belongs_to :hashed_storage_attachments_event,
class_name: 'Geo::HashedStorageAttachmentsEvent',
foreign_key: :hashed_storage_attachments_event_id
def self.latest_event def self.latest_event
order(id: :desc).first order(id: :desc).first
end end
...@@ -41,7 +45,8 @@ module Geo ...@@ -41,7 +45,8 @@ module Geo
repository_renamed_event || repository_renamed_event ||
repositories_changed_event || repositories_changed_event ||
hashed_storage_migrated_event || hashed_storage_migrated_event ||
lfs_object_deleted_event lfs_object_deleted_event ||
hashed_storage_attachments_event
end end
def project_id def project_id
......
module Geo
module Fdw
class Project < ::Geo::BaseFdw
self.table_name = Gitlab::Geo.fdw_table('projects')
end
end
end
module Geo
class HashedStorageAttachmentsEvent < ActiveRecord::Base
include Geo::Model
belongs_to :project
validates :project, :old_attachments_path, :new_attachments_path, presence: true
end
end
...@@ -150,27 +150,6 @@ class GeoNode < ActiveRecord::Base ...@@ -150,27 +150,6 @@ class GeoNode < ActiveRecord::Base
end end
end end
# These are projects that meet the project restriction but haven't yet been
# synced (i.e., do not yet have a project registry entry).
#
# This query requires data from two different databases, and unavoidably
# plucks a list of project IDs from one into the other. This will not scale
# well with the number of synchronized projects - the query will increase
# linearly in size - so this should be replaced with postgres_fdw ASAP.
def unsynced_projects
registry_project_ids = project_registries.pluck(:project_id)
return projects if registry_project_ids.empty?
joined_relation = projects.joins(<<~SQL)
LEFT OUTER JOIN
(VALUES #{registry_project_ids.map { |id| "(#{id}, 't')" }.join(',')})
project_registry(project_id, registry_present)
ON projects.id = project_registry.project_id
SQL
joined_relation.where(project_registry: { registry_present: [nil, false] })
end
def uploads def uploads
if restricted_project_ids if restricted_project_ids
uploads_table = Upload.arel_table uploads_table = Upload.arel_table
......
...@@ -64,7 +64,6 @@ class Issue < ActiveRecord::Base ...@@ -64,7 +64,6 @@ class Issue < ActiveRecord::Base
scope :public_only, -> { where(confidential: false) } scope :public_only, -> { where(confidential: false) }
after_save :expire_etag_cache after_save :expire_etag_cache
after_commit :update_project_counter_caches, on: :destroy
attr_spammable :title, spam_title: true attr_spammable :title, spam_title: true
attr_spammable :description, spam_description: true attr_spammable :description, spam_description: true
......
...@@ -56,7 +56,6 @@ class MergeRequest < ActiveRecord::Base ...@@ -56,7 +56,6 @@ class MergeRequest < ActiveRecord::Base
after_create :ensure_merge_request_diff, unless: :importing? after_create :ensure_merge_request_diff, unless: :importing?
after_update :reload_diff_if_branch_changed after_update :reload_diff_if_branch_changed
after_commit :update_project_counter_caches, on: :destroy
# When this attribute is true some MR validation is ignored # When this attribute is true some MR validation is ignored
# It allows us to close or modify broken merge requests # It allows us to close or modify broken merge requests
......
...@@ -277,8 +277,9 @@ class Project < ActiveRecord::Base ...@@ -277,8 +277,9 @@ class Project < ActiveRecord::Base
scope :pending_delete, -> { where(pending_delete: true) } scope :pending_delete, -> { where(pending_delete: true) }
scope :without_deleted, -> { where(pending_delete: false) } scope :without_deleted, -> { where(pending_delete: false) }
scope :with_hashed_storage, -> { where('storage_version >= 1') } scope :with_storage_feature, ->(feature) { where('storage_version >= :version', version: HASHED_STORAGE_FEATURES[feature]) }
scope :with_legacy_storage, -> { where(storage_version: [nil, 0]) } scope :without_storage_feature, ->(feature) { where('storage_version < :version OR storage_version IS NULL', version: HASHED_STORAGE_FEATURES[feature]) }
scope :with_unmigrated_storage, -> { where('storage_version < :version OR storage_version IS NULL', version: LATEST_STORAGE_VERSION) }
scope :sorted_by_activity, -> { reorder(last_activity_at: :desc) } scope :sorted_by_activity, -> { reorder(last_activity_at: :desc) }
scope :sorted_by_stars, -> { reorder('projects.star_count DESC') } scope :sorted_by_stars, -> { reorder('projects.star_count DESC') }
......
...@@ -4,7 +4,6 @@ module Storage ...@@ -4,7 +4,6 @@ module Storage
delegate :gitlab_shell, :repository_storage_path, to: :project delegate :gitlab_shell, :repository_storage_path, to: :project
ROOT_PATH_PREFIX = '@hashed'.freeze ROOT_PATH_PREFIX = '@hashed'.freeze
STORAGE_VERSION = 1
def initialize(project) def initialize(project)
@project = project @project = project
......
...@@ -45,6 +45,8 @@ class GroupPolicy < BasePolicy ...@@ -45,6 +45,8 @@ class GroupPolicy < BasePolicy
rule { admin } .enable :read_group rule { admin } .enable :read_group
rule { has_projects } .enable :read_group rule { has_projects } .enable :read_group
rule { has_access }.enable :read_namespace
rule { developer }.enable :admin_milestones rule { developer }.enable :admin_milestones
rule { reporter }.enable :admin_label rule { reporter }.enable :admin_label
......
...@@ -8,6 +8,7 @@ class NamespacePolicy < BasePolicy ...@@ -8,6 +8,7 @@ class NamespacePolicy < BasePolicy
rule { owner | admin }.policy do rule { owner | admin }.policy do
enable :create_projects enable :create_projects
enable :admin_namespace enable :admin_namespace
enable :read_namespace
end end
rule { personal_project & ~can_create_personal_project }.prevent :create_projects rule { personal_project & ~can_create_personal_project }.prevent :create_projects
......
module Geo
class HashedStorageAttachmentsEventStore < EventStore
self.event_type = :hashed_storage_attachments_event
private
def build_event
Geo::HashedStorageAttachmentsEvent.new(
project: project,
old_attachments_path: old_attachments_path,
new_attachments_path: new_attachments_path
)
end
def old_attachments_path
params.fetch(:old_attachments_path)
end
def new_attachments_path
params.fetch(:new_attachments_path)
end
end
end
module Geo
AttachmentMigrationError = Class.new(StandardError)
class HashedStorageAttachmentsMigrationService
include ::Gitlab::Geo::LogHelpers
attr_reader :project_id, :old_attachments_path, :new_attachments_path
def initialize(project_id, old_attachments_path:, new_attachments_path:)
@project_id = project_id
@old_attachments_path = old_attachments_path
@new_attachments_path = new_attachments_path
end
def async_execute
Geo::HashedStorageAttachmentsMigrationWorker.perform_async(
project_id,
old_attachments_path,
new_attachments_path
)
end
def execute
origin = File.join(CarrierWave.root, FileUploader.base_dir, old_attachments_path)
target = File.join(CarrierWave.root, FileUploader.base_dir, new_attachments_path)
move_folder!(origin, target)
end
private
def project
@project ||= Project.find(project_id)
end
def move_folder!(old_path, new_path)
unless File.directory?(old_path)
log_info("Skipped attachments migration to Hashed Storage, source path doesn't exist or is not a directory", project_id: project.id, source: old_path, target: new_path)
return
end
if File.exist?(new_path)
log_error("Cannot migrate attachments to Hashed Storage, target path already exist", project_id: project.id, source: old_path, target: new_path)
raise AttachmentMigrationError, "Target path '#{new_path}' already exist"
end
# Create hashed storage base path folder
FileUtils.mkdir_p(File.dirname(new_path))
FileUtils.mv(old_path, new_path)
log_info("Migrated project attachments to Hashed Storage", project_id: project.id, source: old_path, target: new_path)
true
end
end
end
module Geo module Geo
class HashedStorageMigrationService class HashedStorageMigrationService
include ::Gitlab::Geo::LogHelpers
attr_reader :project_id, :old_disk_path, :new_disk_path, :old_storage_version attr_reader :project_id, :old_disk_path, :new_disk_path, :old_storage_version
def initialize(project_id, old_disk_path:, new_disk_path:, old_storage_version:) def initialize(project_id, old_disk_path:, new_disk_path:, old_storage_version:)
...@@ -22,9 +24,12 @@ module Geo ...@@ -22,9 +24,12 @@ module Geo
project.expire_caches_before_rename(old_disk_path) project.expire_caches_before_rename(old_disk_path)
if migrating_from_legacy_storage? && !move_repository if migrating_from_legacy_storage? && !move_repository
log_error("Repository could not be migrated to Hashed Storage", project_id: project.id, source: old_disk_path, target: new_disk_path)
raise RepositoryCannotBeRenamed, "Repository #{old_disk_path} could not be renamed to #{new_disk_path}" raise RepositoryCannotBeRenamed, "Repository #{old_disk_path} could not be renamed to #{new_disk_path}"
end end
log_info("Repository migrated to Hashed Storage", project_id: project.id, source: old_disk_path, target: new_disk_path)
true true
end end
......
...@@ -37,7 +37,7 @@ module Geo ...@@ -37,7 +37,7 @@ module Geo
rescue Gitlab::Git::Repository::NoRepository => e rescue Gitlab::Git::Repository::NoRepository => e
log_error('Invalid wiki', e) log_error('Invalid wiki', e)
registry.update(force_to_redownload_wiki: true, registry.update(force_to_redownload_wiki: true,
repository_retry_count: retry_count + 1) wiki_retry_count: retry_count + 1)
ensure ensure
clean_up_temporary_repository if redownload clean_up_temporary_repository if redownload
end end
......
module Issuable
class DestroyService < IssuableBaseService
def execute(issuable)
if issuable.destroy
issuable.update_project_counter_caches
end
end
end
end
...@@ -165,16 +165,13 @@ class IssuableBaseService < BaseService ...@@ -165,16 +165,13 @@ class IssuableBaseService < BaseService
# To be overridden by subclasses # To be overridden by subclasses
end end
def update(issuable) # rubocop:disable Metrics/AbcSize def update(issuable)
change_state(issuable) change_state(issuable)
change_subscription(issuable) change_subscription(issuable)
change_todo(issuable) change_todo(issuable)
toggle_award(issuable) toggle_award(issuable)
filter_params(issuable) filter_params(issuable)
old_labels = issuable.labels.to_a old_associations = associations_before_update(issuable)
old_mentioned_users = issuable.mentioned_users.to_a
old_assignees = issuable.assignees.to_a
old_total_time_spent = issuable.total_time_spent if issuable.respond_to?(:total_time_spent)
label_ids = process_label_ids(params, existing_label_ids: issuable.label_ids) label_ids = process_label_ids(params, existing_label_ids: issuable.label_ids)
params[:label_ids] = label_ids if labels_changing?(issuable.label_ids, label_ids) params[:label_ids] = label_ids if labels_changing?(issuable.label_ids, label_ids)
...@@ -195,18 +192,13 @@ class IssuableBaseService < BaseService ...@@ -195,18 +192,13 @@ class IssuableBaseService < BaseService
if issuable.with_transaction_returning_status { issuable.save } if issuable.with_transaction_returning_status { issuable.save }
# We do not touch as it will affect a update on updated_at field # We do not touch as it will affect a update on updated_at field
ActiveRecord::Base.no_touching do ActiveRecord::Base.no_touching do
Issuable::CommonSystemNotesService.new(project, current_user).execute(issuable, old_labels) Issuable::CommonSystemNotesService.new(project, current_user).execute(issuable, old_associations[:labels])
end end
handle_changes( handle_changes(issuable, old_associations: old_associations)
issuable,
old_labels: old_labels,
old_mentioned_users: old_mentioned_users,
old_assignees: old_assignees
)
new_assignees = issuable.assignees.to_a new_assignees = issuable.assignees.to_a
affected_assignees = (old_assignees + new_assignees) - (old_assignees & new_assignees) affected_assignees = (old_associations[:assignees] + new_assignees) - (old_associations[:assignees] & new_assignees)
invalidate_cache_counts(issuable, users: affected_assignees.compact) invalidate_cache_counts(issuable, users: affected_assignees.compact)
after_update(issuable) after_update(issuable)
...@@ -214,9 +206,8 @@ class IssuableBaseService < BaseService ...@@ -214,9 +206,8 @@ class IssuableBaseService < BaseService
execute_hooks( execute_hooks(
issuable, issuable,
'update', 'update',
old_labels: old_labels, old_associations: old_associations
old_assignees: old_assignees, )
old_total_time_spent: old_total_time_spent)
issuable.update_project_counter_caches if update_project_counters issuable.update_project_counter_caches if update_project_counters
end end
...@@ -269,6 +260,18 @@ class IssuableBaseService < BaseService ...@@ -269,6 +260,18 @@ class IssuableBaseService < BaseService
end end
end end
def associations_before_update(issuable)
associations =
{
labels: issuable.labels.to_a,
mentioned_users: issuable.mentioned_users.to_a,
assignees: issuable.assignees.to_a
}
associations[:total_time_spent] = issuable.total_time_spent if issuable.respond_to?(:total_time_spent)
associations
end
def has_changes?(issuable, old_labels: [], old_assignees: []) def has_changes?(issuable, old_labels: [], old_assignees: [])
valid_attrs = [:title, :description, :assignee_id, :milestone_id, :target_branch] valid_attrs = [:title, :description, :assignee_id, :milestone_id, :target_branch]
......
module Issues module Issues
class BaseService < ::IssuableBaseService class BaseService < ::IssuableBaseService
def hook_data(issue, action, old_labels: [], old_assignees: [], old_total_time_spent: nil) def hook_data(issue, action, old_associations: {})
hook_data = issue.to_hook_data(current_user, old_labels: old_labels, old_assignees: old_assignees, old_total_time_spent: old_total_time_spent) hook_data = issue.to_hook_data(current_user, old_associations: old_associations)
hook_data[:object_attributes][:action] = action hook_data[:object_attributes][:action] = action
hook_data hook_data
...@@ -22,8 +22,8 @@ module Issues ...@@ -22,8 +22,8 @@ module Issues
issue, issue.project, current_user, old_assignees) issue, issue.project, current_user, old_assignees)
end end
def execute_hooks(issue, action = 'open', old_labels: [], old_assignees: [], old_total_time_spent: nil) def execute_hooks(issue, action = 'open', old_associations: {})
issue_data = hook_data(issue, action, old_labels: old_labels, old_assignees: old_assignees, old_total_time_spent: old_total_time_spent) issue_data = hook_data(issue, action, old_associations: old_associations)
hooks_scope = issue.confidential? ? :confidential_issue_hooks : :issue_hooks hooks_scope = issue.confidential? ? :confidential_issue_hooks : :issue_hooks
issue.project.execute_hooks(issue_data, hooks_scope) issue.project.execute_hooks(issue_data, hooks_scope)
issue.project.execute_services(issue_data, hooks_scope) issue.project.execute_services(issue_data, hooks_scope)
......
...@@ -14,9 +14,10 @@ module Issues ...@@ -14,9 +14,10 @@ module Issues
end end
def handle_changes(issue, options) def handle_changes(issue, options)
old_labels = options[:old_labels] || [] old_associations = options.fetch(:old_associations, {})
old_mentioned_users = options[:old_mentioned_users] || [] old_labels = old_associations.fetch(:labels, [])
old_assignees = options[:old_assignees] || [] old_mentioned_users = old_associations.fetch(:mentioned_users, [])
old_assignees = old_associations.fetch(:assignees, [])
if has_changes?(issue, old_labels: old_labels, old_assignees: old_assignees) if has_changes?(issue, old_labels: old_labels, old_assignees: old_assignees)
todo_service.mark_pending_todos_as_done(issue, current_user) todo_service.mark_pending_todos_as_done(issue, current_user)
......
...@@ -6,8 +6,8 @@ module MergeRequests ...@@ -6,8 +6,8 @@ module MergeRequests
SystemNoteService.change_status(merge_request, merge_request.target_project, current_user, state, nil) SystemNoteService.change_status(merge_request, merge_request.target_project, current_user, state, nil)
end end
def hook_data(merge_request, action, old_rev: nil, old_labels: [], old_assignees: [], old_total_time_spent: nil) def hook_data(merge_request, action, old_rev: nil, old_associations: {})
hook_data = merge_request.to_hook_data(current_user, old_labels: old_labels, old_assignees: old_assignees, old_total_time_spent: old_total_time_spent) hook_data = merge_request.to_hook_data(current_user, old_associations: old_associations)
hook_data[:object_attributes][:action] = action hook_data[:object_attributes][:action] = action
if old_rev && !Gitlab::Git.blank_ref?(old_rev) if old_rev && !Gitlab::Git.blank_ref?(old_rev)
hook_data[:object_attributes][:oldrev] = old_rev hook_data[:object_attributes][:oldrev] = old_rev
...@@ -16,9 +16,9 @@ module MergeRequests ...@@ -16,9 +16,9 @@ module MergeRequests
hook_data hook_data
end end
def execute_hooks(merge_request, action = 'open', old_rev: nil, old_labels: [], old_assignees: [], old_total_time_spent: nil) def execute_hooks(merge_request, action = 'open', old_rev: nil, old_associations: {})
if merge_request.project if merge_request.project
merge_data = hook_data(merge_request, action, old_rev: old_rev, old_labels: old_labels, old_assignees: old_assignees, old_total_time_spent: old_total_time_spent) merge_data = hook_data(merge_request, action, old_rev: old_rev, old_associations: old_associations)
merge_request.project.execute_hooks(merge_data, :merge_request_hooks) merge_request.project.execute_hooks(merge_data, :merge_request_hooks)
merge_request.project.execute_services(merge_data, :merge_request_hooks) merge_request.project.execute_services(merge_data, :merge_request_hooks)
end end
......
...@@ -33,8 +33,9 @@ module MergeRequests ...@@ -33,8 +33,9 @@ module MergeRequests
end end
def handle_changes(merge_request, options) def handle_changes(merge_request, options)
old_labels = options[:old_labels] || [] old_associations = options.fetch(:old_associations, {})
old_mentioned_users = options[:old_mentioned_users] || [] old_labels = old_associations.fetch(:labels, [])
old_mentioned_users = old_associations.fetch(:mentioned_users, [])
if has_changes?(merge_request, old_labels: old_labels) if has_changes?(merge_request, old_labels: old_labels)
todo_service.mark_pending_todos_as_done(merge_request, current_user) todo_service.mark_pending_todos_as_done(merge_request, current_user)
......
module Projects
module HashedStorage
AttachmentMigrationError = Class.new(StandardError)
class MigrateAttachmentsService < BaseService
attr_reader :logger, :old_path, :new_path
prepend ::EE::Projects::HashedStorage::MigrateAttachmentsService
def initialize(project, logger = nil)
@project = project
@logger = logger || Rails.logger
end
def execute
@old_path = project.full_path
@new_path = project.disk_path
origin = FileUploader.dynamic_path_segment(project)
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
target = FileUploader.dynamic_path_segment(project)
result = move_folder!(origin, target)
project.save!
if result && block_given?
yield
end
result
end
private
def move_folder!(old_path, new_path)
unless File.directory?(old_path)
logger.info("Skipped attachments migration from '#{old_path}' to '#{new_path}', source path doesn't exist or is not a directory (PROJECT_ID=#{project.id})")
return
end
if File.exist?(new_path)
logger.error("Cannot migrate attachments from '#{old_path}' to '#{new_path}', target path already exist (PROJECT_ID=#{project.id})")
raise AttachmentMigrationError, "Target path '#{new_path}' already exist"
end
# Create hashed storage base path folder
FileUtils.mkdir_p(File.dirname(new_path))
FileUtils.mv(old_path, new_path)
logger.info("Migrated project attachments from '#{old_path}' to '#{new_path}' (PROJECT_ID=#{project.id})")
true
end
end
end
end
module Projects
module HashedStorage
class MigrateRepositoryService < BaseService
include Gitlab::ShellAdapter
prepend ::EE::Projects::HashedStorage::MigrateRepositoryService
attr_reader :old_disk_path, :new_disk_path, :old_wiki_disk_path, :old_storage_version, :logger
def initialize(project, logger = nil)
@project = project
@logger = logger || Rails.logger
end
def execute
@old_disk_path = project.disk_path
has_wiki = project.wiki.repository_exists?
@old_storage_version = project.storage_version
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:repository]
project.ensure_storage_path_exists
@new_disk_path = project.disk_path
result = move_repository(@old_disk_path, @new_disk_path)
if has_wiki
@old_wiki_disk_path = "#{@old_disk_path}.wiki"
result &&= move_repository("#{@old_wiki_disk_path}", "#{@new_disk_path}.wiki")
end
unless result
rollback_folder_move
project.storage_version = nil
end
project.repository_read_only = false
project.save!
if result && block_given?
yield
end
result
end
private
def move_repository(from_name, to_name)
from_exists = gitlab_shell.exists?(project.repository_storage_path, "#{from_name}.git")
to_exists = gitlab_shell.exists?(project.repository_storage_path, "#{to_name}.git")
# If we don't find the repository on either original or target we should log that as it could be an issue if the
# project was not originally empty.
if !from_exists && !to_exists
logger.warn "Can't find a repository on either source or target paths for #{project.full_path} (ID=#{project.id}) ..."
return false
elsif !from_exists
# Repository have been moved already.
return true
end
gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
end
def rollback_folder_move
move_repository(@new_disk_path, @old_disk_path)
move_repository("#{@new_disk_path}.wiki", "#{@old_disk_path}.wiki")
end
end
end
end
module Projects module Projects
class HashedStorageMigrationService < BaseService class HashedStorageMigrationService < BaseService
include Gitlab::ShellAdapter attr_reader :logger
prepend ::EE::Projects::HashedStorageMigrationService
attr_reader :old_disk_path, :new_disk_path, :old_wiki_disk_path, :old_storage_version
def initialize(project, logger = nil) def initialize(project, logger = nil)
@project = project @project = project
@logger ||= Rails.logger @logger = logger || Rails.logger
end end
def execute def execute
return if project.hashed_storage?(:repository) # Migrate repository from Legacy to Hashed Storage
unless project.hashed_storage?(:repository)
@old_disk_path = project.disk_path return unless HashedStorage::MigrateRepositoryService.new(project, logger).execute
has_wiki = project.wiki.repository_exists?
@old_storage_version = project.storage_version
project.storage_version = Storage::HashedProject::STORAGE_VERSION
project.ensure_storage_path_exists
@new_disk_path = project.disk_path
result = move_repository(@old_disk_path, @new_disk_path)
if has_wiki
@old_wiki_disk_path = "#{@old_disk_path}.wiki"
result &&= move_repository(@old_wiki_disk_path, "#{@new_disk_path}.wiki")
end end
unless result # Migrate attachments from Legacy to Hashed Storage
rollback_folder_move unless project.hashed_storage?(:attachments)
return HashedStorage::MigrateAttachmentsService.new(project, logger).execute
end end
project.repository_read_only = false
project.save!
block_given? ? yield : result
end
private
def move_repository(from_name, to_name)
from_exists = gitlab_shell.exists?(project.repository_storage_path, "#{from_name}.git")
to_exists = gitlab_shell.exists?(project.repository_storage_path, "#{to_name}.git")
# If we don't find the repository on either original or target we should log that as it could be an issue if the
# project was not originally empty.
if !from_exists && !to_exists
logger.warn "Can't find a repository on either source or target paths for #{project.full_path} (ID=#{project.id}) ..."
return false
elsif !from_exists
# Repository have been moved already.
return true
end
gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
end
def rollback_folder_move
move_repository(@new_disk_path, @old_disk_path)
move_repository("#{@new_disk_path}.wiki", "#{@old_disk_path}.wiki")
end
def logger
@logger
end end
end end
end end
...@@ -31,12 +31,19 @@ class FileUploader < GitlabUploader ...@@ -31,12 +31,19 @@ class FileUploader < GitlabUploader
# Returns a String without a trailing slash # Returns a String without a trailing slash
def self.dynamic_path_segment(project) def self.dynamic_path_segment(project)
if project.hashed_storage?(:attachments) if project.hashed_storage?(:attachments)
File.join(CarrierWave.root, base_dir, project.disk_path) dynamic_path_builder(project.disk_path)
else else
File.join(CarrierWave.root, base_dir, project.full_path) dynamic_path_builder(project.full_path)
end end
end end
# Auxiliary method to build dynamic path segment when not using a project model
#
# Prefer to use the `.dynamic_path_segment` as it includes Hashed Storage specific logic
def self.dynamic_path_builder(path)
File.join(CarrierWave.root, base_dir, path)
end
attr_accessor :model attr_accessor :model
attr_reader :secret attr_reader :secret
......
...@@ -771,6 +771,30 @@ ...@@ -771,6 +771,30 @@
.help-block .help-block
Number of Git pushes after which 'git gc' is run. Number of Git pushes after which 'git gc' is run.
%fieldset
%legend Gitaly Timeouts
.form-group
= f.label :gitaly_timeout_default, 'Default Timeout Period', class: 'control-label col-sm-2'
.col-sm-10
= f.number_field :gitaly_timeout_default, class: 'form-control'
.help-block
Timeout for Gitaly calls from the GitLab application (in seconds). This timeout is not enforced
for git fetch/push operations or Sidekiq jobs.
.form-group
= f.label :gitaly_timeout_fast, 'Fast Timeout Period', class: 'control-label col-sm-2'
.col-sm-10
= f.number_field :gitaly_timeout_fast, class: 'form-control'
.help-block
Fast operation timeout (in seconds). Some Gitaly operations are expected to be fast.
If they exceed this threshold, there may be a problem with a storage shard and 'failing fast'
can help maintain the stability of the GitLab instance.
.form-group
= f.label :gitaly_timeout_medium, 'Medium Timeout Period', class: 'control-label col-sm-2'
.col-sm-10
= f.number_field :gitaly_timeout_medium, class: 'form-control'
.help-block
Medium operation timeout (in seconds). This should be a value between the Fast and the Default timeout.
%fieldset %fieldset
%legend Web terminal %legend Web terminal
.form-group .form-group
......
...@@ -13,7 +13,7 @@ module Geo ...@@ -13,7 +13,7 @@ module Geo
end end
def finder def finder
@finder ||= RegistryFinder.new(current_node: current_node) @finder ||= FileRegistryFinder.new(current_node: current_node)
end end
# Pools for new resources to be transferred # Pools for new resources to be transferred
......
module Geo
class HashedStorageAttachmentsMigrationWorker
include Sidekiq::Worker
include GeoQueue
def perform(project_id, old_attachments_path, new_attachments_path)
Geo::HashedStorageAttachmentsMigrationService.new(
project_id,
old_attachments_path: old_attachments_path,
new_attachments_path: new_attachments_path
).execute
end
end
end
...@@ -12,6 +12,10 @@ module Geo ...@@ -12,6 +12,10 @@ module Geo
{ id: project_id, job_id: job_id } if job_id { id: project_id, job_id: job_id } if job_id
end end
def finder
@finder ||= ProjectRegistryFinder.new(current_node: current_node)
end
def load_pending_resources def load_pending_resources
resources = find_project_ids_not_synced(batch_size: db_retrieve_batch_size) resources = find_project_ids_not_synced(batch_size: db_retrieve_batch_size)
remaining_capacity = db_retrieve_batch_size - resources.size remaining_capacity = db_retrieve_batch_size - resources.size
...@@ -24,19 +28,15 @@ module Geo ...@@ -24,19 +28,15 @@ module Geo
end end
def find_project_ids_not_synced(batch_size:) def find_project_ids_not_synced(batch_size:)
healthy_shards_restriction(current_node.unsynced_projects) healthy_shards_restriction(finder.find_unsynced_projects(batch_size: batch_size))
.reorder(last_repository_updated_at: :desc) .reorder(last_repository_updated_at: :desc)
.limit(batch_size)
.pluck(:id) .pluck(:id)
end end
def find_project_ids_updated_recently(batch_size:) def find_project_ids_updated_recently(batch_size:)
current_node.project_registries healthy_shards_restriction(finder.find_projects_updated_recently(batch_size: batch_size))
.dirty .order(Gitlab::Database.nulls_first_order(:last_repository_updated_at, :desc))
.retry_due .pluck(:id)
.order(Gitlab::Database.nulls_first_order(:last_repository_synced_at, :desc))
.limit(batch_size)
.pluck(:project_id)
end end
def healthy_shards_restriction(relation) def healthy_shards_restriction(relation)
......
...@@ -2,10 +2,34 @@ class ProjectMigrateHashedStorageWorker ...@@ -2,10 +2,34 @@ class ProjectMigrateHashedStorageWorker
include Sidekiq::Worker include Sidekiq::Worker
include DedicatedSidekiqQueue include DedicatedSidekiqQueue
LEASE_TIMEOUT = 30.seconds.to_i
def perform(project_id) def perform(project_id)
project = Project.find_by(id: project_id) project = Project.find_by(id: project_id)
return if project.nil? || project.pending_delete? return if project.nil? || project.pending_delete?
::Projects::HashedStorageMigrationService.new(project, logger).execute uuid = lease_for(project_id).try_obtain
if uuid
::Projects::HashedStorageMigrationService.new(project, logger).execute
else
false
end
rescue => ex
cancel_lease_for(project_id, uuid) if uuid
raise ex
end
def lease_for(project_id)
Gitlab::ExclusiveLease.new(lease_key(project_id), timeout: LEASE_TIMEOUT)
end
private
def lease_key(project_id)
"project_migrate_hashed_storage_worker:#{project_id}"
end
def cancel_lease_for(project_id, uuid)
Gitlab::ExclusiveLease.cancel(lease_key(project_id), uuid)
end end
end end
...@@ -45,9 +45,17 @@ class StuckCiJobsWorker ...@@ -45,9 +45,17 @@ class StuckCiJobsWorker
end end
def search(status, timeout) def search(status, timeout)
builds = Ci::Build.where(status: status).where('ci_builds.updated_at < ?', timeout.ago) loop do
builds.joins(:project).merge(Project.without_deleted).includes(:tags, :runner, project: :namespace).find_each(batch_size: 50).each do |build| jobs = Ci::Build.where(status: status)
yield(build) .where('ci_builds.updated_at < ?', timeout.ago)
.includes(:tags, :runner, project: :namespace)
.limit(100)
.to_a
break if jobs.empty?
jobs.each do |job|
yield(job)
end
end end
end end
......
---
title: Geo - Does not sync repositories on unhealthy shards in non-backfill conditions
merge_request:
author:
type: fixed
---
title: Strip leading & trailing whitespaces in CI/CD secret variable's environment
scope
merge_request: 3563
author:
type: fixed
---
title: 'Geo: replicate Attachments migration to Hashed Storage in secondary node'
merge_request: 3544
author:
type: added
---
title: Fix tasklist for epics
merge_request:
author:
type: fixed
---
title: Fix Geo wiki sync error not increasing retry count
merge_request:
author:
type: fixed
---
title: Create issuable destroy service
merge_request: 15604
author: George Andrinopoulos
type: other
---
title: Strip leading & trailing whitespaces in CI/CD secret variable keys
merge_request: 15615
author:
type: fixed
---
title: Upgrade seed-fu to 2.3.7
merge_request: 15607
author: Takuya Noguchi
type: other
---
title: Add timeouts for Gitaly calls
merge_request: 15047
author:
type: performance
---
title: Hashed Storage migration script now supports migrating project attachments
merge_request: 15352
author:
type: added
---
title: Optimise StuckCiJobsWorker using cheap SQL query outside, and expensive inside
merge_request:
author:
type: performance
---
title: New API endpoint - list jobs for a specified runner
merge_request: 15432
author:
type: added
---
title: Add new API endpoint - get a namespace by ID
merge_request: 15442
author:
type: added
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddGitalyTimeoutPropertiesToApplicationSettings < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_column_with_default :application_settings,
:gitaly_timeout_default,
:integer,
default: 55
add_column_with_default :application_settings,
:gitaly_timeout_medium,
:integer,
default: 30
add_column_with_default :application_settings,
:gitaly_timeout_fast,
:integer,
default: 10
end
def down
remove_column :application_settings, :gitaly_timeout_default
remove_column :application_settings, :gitaly_timeout_medium
remove_column :application_settings, :gitaly_timeout_fast
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddAttachmentsMigrationToGeoMigrationEvents < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table :geo_hashed_storage_attachments_events, id: :bigserial do |t|
t.references :project, index: true, foreign_key: { on_delete: :cascade }, null: false
t.text :old_attachments_path, null: false
t.text :new_attachments_path, null: false
end
add_column :geo_event_log, :hashed_storage_attachments_event_id, :integer, limit: 8
end
end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20171121144800) do ActiveRecord::Schema.define(version: 20171124070437) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -174,6 +174,9 @@ ActiveRecord::Schema.define(version: 20171121144800) do ...@@ -174,6 +174,9 @@ ActiveRecord::Schema.define(version: 20171121144800) do
t.integer "throttle_authenticated_web_period_in_seconds", default: 3600, null: false t.integer "throttle_authenticated_web_period_in_seconds", default: 3600, null: false
t.boolean "password_authentication_enabled_for_web" t.boolean "password_authentication_enabled_for_web"
t.boolean "password_authentication_enabled_for_git", default: true t.boolean "password_authentication_enabled_for_git", default: true
t.integer "gitaly_timeout_default", default: 55, null: false
t.integer "gitaly_timeout_medium", default: 30, null: false
t.integer "gitaly_timeout_fast", default: 10, null: false
end end
create_table "approvals", force: :cascade do |t| create_table "approvals", force: :cascade do |t|
...@@ -882,6 +885,7 @@ ActiveRecord::Schema.define(version: 20171121144800) do ...@@ -882,6 +885,7 @@ ActiveRecord::Schema.define(version: 20171121144800) do
t.integer "repository_created_event_id", limit: 8 t.integer "repository_created_event_id", limit: 8
t.integer "hashed_storage_migrated_event_id", limit: 8 t.integer "hashed_storage_migrated_event_id", limit: 8
t.integer "lfs_object_deleted_event_id", limit: 8 t.integer "lfs_object_deleted_event_id", limit: 8
t.integer "hashed_storage_attachments_event_id", limit: 8
end end
add_index "geo_event_log", ["repositories_changed_event_id"], name: "index_geo_event_log_on_repositories_changed_event_id", using: :btree add_index "geo_event_log", ["repositories_changed_event_id"], name: "index_geo_event_log_on_repositories_changed_event_id", using: :btree
...@@ -890,6 +894,14 @@ ActiveRecord::Schema.define(version: 20171121144800) do ...@@ -890,6 +894,14 @@ ActiveRecord::Schema.define(version: 20171121144800) do
add_index "geo_event_log", ["repository_renamed_event_id"], name: "index_geo_event_log_on_repository_renamed_event_id", using: :btree add_index "geo_event_log", ["repository_renamed_event_id"], name: "index_geo_event_log_on_repository_renamed_event_id", using: :btree
add_index "geo_event_log", ["repository_updated_event_id"], name: "index_geo_event_log_on_repository_updated_event_id", using: :btree add_index "geo_event_log", ["repository_updated_event_id"], name: "index_geo_event_log_on_repository_updated_event_id", using: :btree
create_table "geo_hashed_storage_attachments_events", id: :bigserial, force: :cascade do |t|
t.integer "project_id", null: false
t.text "old_attachments_path", null: false
t.text "new_attachments_path", null: false
end
add_index "geo_hashed_storage_attachments_events", ["project_id"], name: "index_geo_hashed_storage_attachments_events_on_project_id", using: :btree
create_table "geo_hashed_storage_migrated_events", id: :bigserial, force: :cascade do |t| create_table "geo_hashed_storage_migrated_events", id: :bigserial, force: :cascade do |t|
t.integer "project_id", null: false t.integer "project_id", null: false
t.text "repository_storage_name", null: false t.text "repository_storage_name", null: false
...@@ -2450,6 +2462,7 @@ ActiveRecord::Schema.define(version: 20171121144800) do ...@@ -2450,6 +2462,7 @@ ActiveRecord::Schema.define(version: 20171121144800) do
add_foreign_key "geo_event_log", "geo_repository_deleted_events", column: "repository_deleted_event_id", name: "fk_c4b1c1f66e", on_delete: :cascade add_foreign_key "geo_event_log", "geo_repository_deleted_events", column: "repository_deleted_event_id", name: "fk_c4b1c1f66e", on_delete: :cascade
add_foreign_key "geo_event_log", "geo_repository_renamed_events", column: "repository_renamed_event_id", name: "fk_86c84214ec", on_delete: :cascade add_foreign_key "geo_event_log", "geo_repository_renamed_events", column: "repository_renamed_event_id", name: "fk_86c84214ec", on_delete: :cascade
add_foreign_key "geo_event_log", "geo_repository_updated_events", column: "repository_updated_event_id", on_delete: :cascade add_foreign_key "geo_event_log", "geo_repository_updated_events", column: "repository_updated_event_id", on_delete: :cascade
add_foreign_key "geo_hashed_storage_attachments_events", "projects", on_delete: :cascade
add_foreign_key "geo_hashed_storage_migrated_events", "projects", on_delete: :cascade add_foreign_key "geo_hashed_storage_migrated_events", "projects", on_delete: :cascade
add_foreign_key "geo_node_namespace_links", "geo_nodes", on_delete: :cascade add_foreign_key "geo_node_namespace_links", "geo_nodes", on_delete: :cascade
add_foreign_key "geo_node_namespace_links", "namespaces", on_delete: :cascade add_foreign_key "geo_node_namespace_links", "namespaces", on_delete: :cascade
......
...@@ -70,6 +70,16 @@ Database nodes run two services besides PostgreSQL ...@@ -70,6 +70,16 @@ Database nodes run two services besides PostgreSQL
Alongside pgbouncer, there is a consul agent that watches the status of the PostgreSQL service. If that status changes, consul runs a script which updates the configuration and reloads pgbouncer Alongside pgbouncer, there is a consul agent that watches the status of the PostgreSQL service. If that status changes, consul runs a script which updates the configuration and reloads pgbouncer
#### Connection flow
Each service in the package comes with a set of [default ports](https://docs.gitlab.com/omnibus/package-information/defaults.html#ports). You may need to make specific firewall rules for the connections listed below:
- Application servers connect to [PgBouncer default port](https://docs.gitlab.com/omnibus/package-information/defaults.html#pgbouncer)
- PgBouncer connects to the primary database servers [PostgreSQL default port](https://docs.gitlab.com/omnibus/package-information/defaults.html#postgresql)
- Repmgr connects to the database servers [PostgreSQL default port](https://docs.gitlab.com/omnibus/package-information/defaults.html#postgresql)
- Postgres secondaries connect to the primary database servers [PostgreSQL default port](https://docs.gitlab.com/omnibus/package-information/defaults.html#postgresql)
- Consul servers and agents connect to each others [Consul default ports](https://docs.gitlab.com/omnibus/package-information/defaults.html#consul)
### Required information ### Required information
Before proceeding with configuration, you will need to collect all the necessary Before proceeding with configuration, you will need to collect all the necessary
......
# Repository Storage Rake Tasks # Repository Storage Rake Tasks
This is a collection of rake tasks you can use to help you list and migrate This is a collection of rake tasks you can use to help you list and migrate
existing projects from Legacy storage to the new Hashed storage type. existing projects and attachments associated with it from Legacy storage to
the new Hashed storage type.
You can read more about the storage types [here][storage-types]. You can read more about the storage types [here][storage-types].
## Migrate existing projects to Hashed storage
Before migrating your existing projects, you should
[enable hashed storage][storage-migration] for the new projects as well.
This task will schedule all your existing projects and attachments associated with it to be migrated to the
**Hashed** storage type:
**Omnibus Installation**
```bash
gitlab-rake gitlab:storage:migrate_to_hashed
```
**Source Installation**
```bash
rake gitlab:storage:migrate_to_hashed
```
You can monitor the progress in the _Admin > Monitoring > Background jobs_ screen.
There is a specific Queue you can watch to see how long it will take to finish: **project_migrate_hashed_storage**
After it reaches zero, you can confirm every project has been migrated by running the commands bellow.
If you find it necessary, you can run this migration script again to schedule missing projects.
Any error or warning will be logged in the sidekiq's log file.
You only need the `gitlab:storage:migrate_to_hashed` rake task to migrate your repositories, but we have additional
commands below that helps you inspect projects and attachments in both legacy and hashed storage.
## List projects on Legacy storage ## List projects on Legacy storage
To have a simple summary of projects using **Legacy** storage: To have a simple summary of projects using **Legacy** storage:
...@@ -73,35 +106,73 @@ rake gitlab:storage:list_hashed_projects ...@@ -73,35 +106,73 @@ rake gitlab:storage:list_hashed_projects
``` ```
## Migrate existing projects to Hashed storage ## List attachments on Legacy storage
Before migrating your existing projects, you should To have a simple summary of project attachments using **Legacy** storage:
[enable hashed storage][storage-migration] for the new projects as well.
This task will schedule all your existing projects to be migrated to the **Omnibus Installation**
**Hashed** storage type:
```bash
gitlab-rake gitlab:storage:legacy_attachments
```
**Source Installation**
```bash
rake gitlab:storage:legacy_attachments
```
------
To list project attachments using **Legacy** storage:
**Omnibus Installation** **Omnibus Installation**
```bash ```bash
gitlab-rake gitlab:storage:migrate_to_hashed gitlab-rake gitlab:storage:list_legacy_attachments
``` ```
**Source Installation** **Source Installation**
```bash ```bash
rake gitlab:storage:migrate_to_hashed rake gitlab:storage:list_legacy_attachments
``` ```
You can monitor the progress in the _Admin > Monitoring > Background jobs_ screen. ## List attachments on Hashed storage
There is a specific Queue you can watch to see how long it will take to finish: **project_migrate_hashed_storage**
After it reaches zero, you can confirm every project has been migrated by running the commands above. To have a simple summary of project attachments using **Hashed** storage:
If you find it necessary, you can run this migration script again to schedule missing projects.
**Omnibus Installation**
```bash
gitlab-rake gitlab:storage:hashed_attachments
```
Any error or warning will be logged in the sidekiq log file. **Source Installation**
```bash
rake gitlab:storage:hashed_attachments
```
------
To list project attachments using **Hashed** storage:
**Omnibus Installation**
```bash
gitlab-rake gitlab:storage:list_hashed_attachments
```
**Source Installation**
```bash
rake gitlab:storage:list_hashed_attachments
```
[storage-types]: ../repository_storage_types.md [storage-types]: ../repository_storage_types.md
[storage-migration]: ../repository_storage_types.md#how-to-migrate-to-hashed-storage [storage-migration]: ../repository_storage_types.md#how-to-migrate-to-hashed-storage
...@@ -90,3 +90,55 @@ Example response: ...@@ -90,3 +90,55 @@ Example response:
} }
] ]
``` ```
## Get namespace by ID
Get a namespace by ID.
```
GET /namespaces/:id
```
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | ID or path of the namespace |
Example request:
```bash
curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/namespaces/2
```
Example response:
```json
{
"id": 2,
"name": "group1",
"path": "group1",
"kind": "group",
"full_path": "group1",
"parent_id": "null",
"members_count_with_descendants": 2
}
```
Example request:
```bash
curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/namespaces/group1
```
Example response:
```json
{
"id": 2,
"name": "group1",
"path": "group1",
"kind": "group",
"full_path": "group1",
"parent_id": "null",
"members_count_with_descendants": 2
}
```
...@@ -215,6 +215,91 @@ DELETE /runners/:id ...@@ -215,6 +215,91 @@ DELETE /runners/:id
curl --request DELETE --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/runners/6" curl --request DELETE --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/runners/6"
``` ```
## List runner's jobs
List jobs that are being processed or were processed by specified Runner.
```
GET /runners/:id/jobs
```
| Attribute | Type | Required | Description |
|-----------|---------|----------|---------------------|
| `id` | integer | yes | The ID of a runner |
| `status` | string | no | Status of the job; one of: `running`, `success`, `failed`, `canceled` |
```
curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/runners/1/jobs?status=running"
```
Example response:
```json
[
{
"id": 2,
"status": "running",
"stage": "test",
"name": "test",
"ref": "master",
"tag": false,
"coverage": null,
"created_at": "2017-11-16T08:50:29.000Z",
"started_at": "2017-11-16T08:51:29.000Z",
"finished_at": "2017-11-16T08:53:29.000Z",
"duration": 120,
"user": {
"id": 1,
"name": "John Doe2",
"username": "user2",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/c922747a93b40d1ea88262bf1aebee62?s=80&d=identicon",
"web_url": "http://localhost/user2",
"created_at": "2017-11-16T18:38:46.000Z",
"bio": null,
"location": null,
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": null
},
"commit": {
"id": "97de212e80737a608d939f648d959671fb0a0142",
"short_id": "97de212e",
"title": "Update configuration\r",
"created_at": "2017-11-16T08:50:28.000Z",
"parent_ids": [
"1b12f15a11fc6e62177bef08f47bc7b5ce50b141",
"498214de67004b1da3d820901307bed2a68a8ef6"
],
"message": "See merge request !123",
"author_name": "John Doe2",
"author_email": "user2@example.org",
"authored_date": "2017-11-16T08:50:27.000Z",
"committer_name": "John Doe2",
"committer_email": "user2@example.org",
"committed_date": "2017-11-16T08:50:27.000Z"
},
"pipeline": {
"id": 2,
"sha": "97de212e80737a608d939f648d959671fb0a0142",
"ref": "master",
"status": "running"
},
"project": {
"id": 1,
"description": null,
"name": "project1",
"name_with_namespace": "John Doe2 / project1",
"path": "project1",
"path_with_namespace": "namespace1/project1",
"created_at": "2017-11-16T18:38:46.620Z"
}
}
]
```
## List project's runners ## List project's runners
List all runners (specific and shared) available in the project. Shared runners List all runners (specific and shared) available in the project. Shared runners
......
...@@ -82,7 +82,7 @@ added directly to your configured cluster. Those applications are needed for ...@@ -82,7 +82,7 @@ added directly to your configured cluster. Those applications are needed for
| Application | GitLab version | Description | | Application | GitLab version | Description |
| ----------- | :------------: | ----------- | | ----------- | :------------: | ----------- |
| [Helm Tiller](https://docs.helm.sh/) | 10.2+ | Helm is a package manager for Kubernetes and is required to install all the other applications. It will be automatically installed as a dependency when you try to install a different app. It is installed in its own pod inside the cluster which can run the `helm` CLI in a safe environment. | | [Helm Tiller](https://docs.helm.sh/) | 10.2+ | Helm is a package manager for Kubernetes and is required to install all the other applications. It will be automatically installed as a dependency when you try to install a different app. It is installed in its own pod inside the cluster which can run the `helm` CLI in a safe environment. |
| [Ingress](https://kubernetes.io/docs/concepts/services-networking/ingress/) | 10.2+ | Ingress can provide load balancing, SSL termination and name-based virtual hosting. It acts as a web proxy for your applications and is useful if you want to use [Auto DevOps](../../../topics/autodevops/index.md) or deploy your own web apps. | | [Ingress](https://kubernetes.io/docs/concepts/services-networking/ingress/) | 10.2+ | Ingress can provide load balancing, SSL termination, and name-based virtual hosting. It acts as a web proxy for your applications and is useful if you want to use [Auto DevOps](../../../topics/autodevops/index.md) or deploy your own web apps. |
## Enabling or disabling the Cluster integration ## Enabling or disabling the Cluster integration
......
...@@ -12,6 +12,10 @@ ...@@ -12,6 +12,10 @@
type: String, type: String,
required: true, required: true,
}, },
updateEndpoint: {
type: String,
required: true,
},
canUpdate: { canUpdate: {
required: true, required: true,
type: Boolean, type: Boolean,
...@@ -111,7 +115,9 @@ ...@@ -111,7 +115,9 @@
:can-update="canUpdate" :can-update="canUpdate"
:can-destroy="canDestroy" :can-destroy="canDestroy"
:endpoint="endpoint" :endpoint="endpoint"
:update-endpoint="updateEndpoint"
:issuable-ref="issuableRef" :issuable-ref="issuableRef"
issuable-type="epic"
:initial-title-html="initialTitleHtml" :initial-title-html="initialTitleHtml"
:initial-title-text="initialTitleText" :initial-title-text="initialTitleText"
:initial-description-html="initialDescriptionHtml" :initial-description-html="initialDescriptionHtml"
......
module Geo module Geo
class RegistryFinder class FileRegistryFinder < RegistryFinder
attr_reader :current_node
def initialize(current_node: nil)
@current_node = current_node
end
def find_failed_objects(batch_size:) def find_failed_objects(batch_size:)
Geo::FileRegistry Geo::FileRegistry
.failed .failed
...@@ -28,7 +22,7 @@ module Geo ...@@ -28,7 +22,7 @@ module Geo
# Selective project replication adds a wrinkle to FDW queries, so # Selective project replication adds a wrinkle to FDW queries, so
# we fallback to the legacy version for now. # we fallback to the legacy version for now.
relation = relation =
if Gitlab::Geo.fdw? && !selective_sync if fdw?
fdw_find_nonreplicated_lfs_objects fdw_find_nonreplicated_lfs_objects
else else
legacy_find_nonreplicated_lfs_objects(except_registry_ids: except_registry_ids) legacy_find_nonreplicated_lfs_objects(except_registry_ids: except_registry_ids)
...@@ -54,7 +48,7 @@ module Geo ...@@ -54,7 +48,7 @@ module Geo
# Selective project replication adds a wrinkle to FDW queries, so # Selective project replication adds a wrinkle to FDW queries, so
# we fallback to the legacy version for now. # we fallback to the legacy version for now.
relation = relation =
if Gitlab::Geo.fdw? && !selective_sync if fdw?
fdw_find_nonreplicated_uploads fdw_find_nonreplicated_uploads
else else
legacy_find_nonreplicated_uploads(except_registry_ids: except_registry_ids) legacy_find_nonreplicated_uploads(except_registry_ids: except_registry_ids)
...@@ -68,10 +62,6 @@ module Geo ...@@ -68,10 +62,6 @@ module Geo
protected protected
def selective_sync
current_node.restricted_project_ids
end
# #
# FDW accessors # FDW accessors
# #
......
module Geo
class ProjectRegistryFinder < RegistryFinder
def find_unsynced_projects(batch_size:)
relation =
if fdw?
fdw_find_unsynced_projects
else
legacy_find_unsynced_projects
end
relation.limit(batch_size)
end
def find_projects_updated_recently(batch_size:)
relation =
if fdw?
fdw_find_projects_updated_recently
else
legacy_find_projects_updated_recently
end
relation.limit(batch_size)
end
protected
def fdw_table
Geo::Fdw::Project.table_name
end
#
# FDW accessors
#
# @return [ActiveRecord::Relation<Geo::Fdw::Project>]
def fdw_find_unsynced_projects
Geo::Fdw::Project.joins("LEFT OUTER JOIN project_registry ON project_registry.project_id = #{fdw_table}.id")
.where('project_registry.project_id IS NULL')
end
# @return [ActiveRecord::Relation<Geo::Fdw::Project>]
def fdw_find_projects_updated_recently
Geo::Fdw::Project.joins("INNER JOIN project_registry ON project_registry.project_id = #{fdw_table}.id")
.merge(Geo::ProjectRegistry.dirty)
.merge(Geo::ProjectRegistry.retry_due)
end
#
# Legacy accessors (non FDW)
#
# @return [ActiveRecord::Relation<Project>] list of unsynced projects
def legacy_find_unsynced_projects
registry_project_ids = current_node.project_registries.pluck(:project_id)
return current_node.projects if registry_project_ids.empty?
joined_relation = current_node.projects.joins(<<~SQL)
LEFT OUTER JOIN
(VALUES #{registry_project_ids.map { |id| "(#{id}, 't')" }.join(',')})
project_registry(project_id, registry_present)
ON projects.id = project_registry.project_id
SQL
joined_relation.where(project_registry: { registry_present: [nil, false] })
end
# @return [ActiveRecord::Relation<Project>] list of projects updated recently
def legacy_find_projects_updated_recently
registry_project_ids = current_node.project_registries.dirty.retry_due.pluck(:project_id)
return Project.none if registry_project_ids.empty?
joined_relation = current_node.projects.joins(<<~SQL)
INNER JOIN
(VALUES #{registry_project_ids.map { |id| "(#{id})" }.join(',')})
project_registry(project_id)
ON projects.id = project_registry.project_id
SQL
joined_relation
end
end
end
module Geo
class RegistryFinder
attr_reader :current_node
def initialize(current_node: nil)
@current_node = current_node
end
protected
def fdw?
# Selective project replication adds a wrinkle to FDW
# queries, so we fallback to the legacy version for now.
Gitlab::Geo.fdw? && !selective_sync
end
def selective_sync
current_node.restricted_project_ids
end
end
end
...@@ -11,6 +11,10 @@ module EE ...@@ -11,6 +11,10 @@ module EE
message: ::Gitlab::Regex.environment_scope_regex_message } message: ::Gitlab::Regex.environment_scope_regex_message }
) )
end end
def environment_scope=(new_environment_scope)
super(new_environment_scope.to_s.strip)
end
end end
end end
end end
module EE
module Projects
module HashedStorage
module MigrateAttachmentsService
def execute
raise NotImplementedError.new unless defined?(super)
super do
::Geo::HashedStorageAttachmentsEventStore.new(
project,
old_attachments_path: old_path,
new_attachments_path: new_path
).create
end
end
end
end
end
end
module EE
module Projects
module HashedStorage
module MigrateRepositoryService
def execute
raise NotImplementedError.new unless defined?(super)
super do
::Geo::HashedStorageMigratedEventStore.new(
project,
old_storage_version: old_storage_version,
old_disk_path: old_disk_path,
old_wiki_disk_path: old_wiki_disk_path
).create
end
end
end
end
end
end
module EE
module Projects
module HashedStorageMigrationService
def execute
raise NotImplementedError.new unless defined?(super)
super do
::Geo::HashedStorageMigratedEventStore.new(
project,
old_storage_version: old_storage_version,
old_disk_path: old_disk_path,
old_wiki_disk_path: old_wiki_disk_path
).create
end
end
end
end
end
...@@ -35,7 +35,7 @@ module Projects ...@@ -35,7 +35,7 @@ module Projects
repository.relative_path, repository.relative_path,
repository.gl_repository) repository.gl_repository)
new_repository.fetch_as_mirror_without_shell(repository.path) new_repository.fetch_repository_as_mirror(repository)
end end
def mark_old_paths_for_archive def mark_old_paths_for_archive
......
...@@ -90,16 +90,21 @@ module API ...@@ -90,16 +90,21 @@ module API
expose :group_access, as: :group_access_level expose :group_access, as: :group_access_level
end end
class BasicProjectDetails < Grape::Entity class ProjectIdentity < Grape::Entity
expose :id, :description, :default_branch, :tag_list expose :id, :description
expose :ssh_url_to_repo, :http_url_to_repo, :web_url
expose :name, :name_with_namespace expose :name, :name_with_namespace
expose :path, :path_with_namespace expose :path, :path_with_namespace
expose :created_at
end
class BasicProjectDetails < ProjectIdentity
expose :default_branch, :tag_list
expose :ssh_url_to_repo, :http_url_to_repo, :web_url
expose :avatar_url do |project, options| expose :avatar_url do |project, options|
project.avatar_url(only_path: false) project.avatar_url(only_path: false)
end end
expose :star_count, :forks_count expose :star_count, :forks_count
expose :created_at, :last_activity_at expose :last_activity_at
end end
class Project < BasicProjectDetails class Project < BasicProjectDetails
...@@ -938,17 +943,24 @@ module API ...@@ -938,17 +943,24 @@ module API
expose :id, :sha, :ref, :status expose :id, :sha, :ref, :status
end end
class Job < Grape::Entity class JobBasic < Grape::Entity
expose :id, :status, :stage, :name, :ref, :tag, :coverage expose :id, :status, :stage, :name, :ref, :tag, :coverage
expose :created_at, :started_at, :finished_at expose :created_at, :started_at, :finished_at
expose :duration expose :duration
expose :user, with: User expose :user, with: User
expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? }
expose :commit, with: Commit expose :commit, with: Commit
expose :runner, with: Runner
expose :pipeline, with: PipelineBasic expose :pipeline, with: PipelineBasic
end end
class Job < JobBasic
expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? }
expose :runner, with: Runner
end
class JobBasicWithProject < JobBasic
expose :project, with: ProjectIdentity
end
class Trigger < Grape::Entity class Trigger < Grape::Entity
expose :id expose :id
expose :token, :description expose :token, :description
......
...@@ -52,6 +52,10 @@ module API ...@@ -52,6 +52,10 @@ module API
initial_current_user != current_user initial_current_user != current_user
end end
def user_namespace
@user_namespace ||= find_namespace!(params[:id])
end
def user_group def user_group
@group ||= find_group!(params[:id]) @group ||= find_group!(params[:id])
end end
...@@ -109,14 +113,6 @@ module API ...@@ -109,14 +113,6 @@ module API
end end
end end
def find_namespace(id)
if id =~ /^\d+$/
Namespace.find_by(id: id)
else
Namespace.find_by_full_path(id)
end
end
def find_group!(id) def find_group!(id)
# CI job token authentication: # CI job token authentication:
# currently we do not allow any group access for CI job token # currently we do not allow any group access for CI job token
...@@ -131,6 +127,24 @@ module API ...@@ -131,6 +127,24 @@ module API
end end
end end
def find_namespace(id)
if id.to_s =~ /^\d+$/
Namespace.find_by(id: id)
else
Namespace.find_by_full_path(id)
end
end
def find_namespace!(id)
namespace = find_namespace(id)
if can?(current_user, :read_namespace, namespace)
namespace
else
not_found!('Namespace')
end
end
def find_project_label(id) def find_project_label(id)
label = available_labels.find_by_id(id) || available_labels.find_by_title(id) label = available_labels.find_by_id(id) || available_labels.find_by_title(id)
label || not_found!('Label') label || not_found!('Label')
......
...@@ -261,7 +261,9 @@ module API ...@@ -261,7 +261,9 @@ module API
authorize!(:destroy_issue, issue) authorize!(:destroy_issue, issue)
destroy_conditionally!(issue) destroy_conditionally!(issue) do |issue|
Issuable::DestroyService.new(user_project, current_user).execute(issue)
end
end end
desc 'List merge requests closing issue' do desc 'List merge requests closing issue' do
......
...@@ -179,7 +179,9 @@ module API ...@@ -179,7 +179,9 @@ module API
authorize!(:destroy_merge_request, merge_request) authorize!(:destroy_merge_request, merge_request)
destroy_conditionally!(merge_request) destroy_conditionally!(merge_request) do |merge_request|
Issuable::DestroyService.new(user_project, current_user).execute(merge_request)
end
end end
params do params do
......
...@@ -40,6 +40,16 @@ module API ...@@ -40,6 +40,16 @@ module API
render_validation_error!(namespace) render_validation_error!(namespace)
end end
end end
desc 'Get a namespace by ID' do
success Entities::Namespace
end
params do
requires :id, type: String, desc: "Namespace's ID or path"
end
get ':id' do
present user_namespace, with: Entities::Namespace, current_user: current_user
end
end end
end end
end end
...@@ -84,6 +84,23 @@ module API ...@@ -84,6 +84,23 @@ module API
destroy_conditionally!(runner) destroy_conditionally!(runner)
end end
desc 'List jobs running on a runner' do
success Entities::JobBasicWithProject
end
params do
requires :id, type: Integer, desc: 'The ID of the runner'
optional :status, type: String, desc: 'Status of the job', values: Ci::Build::AVAILABLE_STATUSES
use :pagination
end
get ':id/jobs' do
runner = get_runner(params[:id])
authenticate_list_runners_jobs!(runner)
jobs = RunnerJobsFinder.new(runner, params).execute
present paginate(jobs), with: Entities::JobBasicWithProject
end
end end
params do params do
...@@ -192,6 +209,12 @@ module API ...@@ -192,6 +209,12 @@ module API
forbidden!("No access granted") unless user_can_access_runner?(runner) forbidden!("No access granted") unless user_can_access_runner?(runner)
end end
def authenticate_list_runners_jobs!(runner)
return if current_user.admin?
forbidden!("No access granted") unless user_can_access_runner?(runner)
end
def user_can_access_runner?(runner) def user_can_access_runner?(runner)
current_user.ci_authorized_runners.exists?(runner.id) current_user.ci_authorized_runners.exists?(runner.id)
end end
......
...@@ -123,6 +123,9 @@ module API ...@@ -123,6 +123,9 @@ module API
end end
optional :terminal_max_session_time, type: Integer, desc: 'Maximum time for web terminal websocket connection (in seconds). Set to 0 for unlimited time.' optional :terminal_max_session_time, type: Integer, desc: 'Maximum time for web terminal websocket connection (in seconds). Set to 0 for unlimited time.'
optional :polling_interval_multiplier, type: BigDecimal, desc: 'Interval multiplier used by endpoints that perform polling. Set to 0 to disable polling.' optional :polling_interval_multiplier, type: BigDecimal, desc: 'Interval multiplier used by endpoints that perform polling. Set to 0 to disable polling.'
optional :gitaly_timeout_default, type: Integer, desc: 'Default Gitaly timeout, in seconds. Set to 0 to disable timeouts.'
optional :gitaly_timeout_medium, type: Integer, desc: 'Medium Gitaly timeout, in seconds. Set to 0 to disable timeouts.'
optional :gitaly_timeout_fast, type: Integer, desc: 'Gitaly fast operation timeout, in seconds. Set to 0 to disable timeouts.'
ApplicationSetting::SUPPORTED_KEY_TYPES.each do |type| ApplicationSetting::SUPPORTED_KEY_TYPES.each do |type|
optional :"#{type}_key_restriction", optional :"#{type}_key_restriction",
......
...@@ -183,6 +183,23 @@ module Gitlab ...@@ -183,6 +183,23 @@ module Gitlab
job_id: job_id) job_id: job_id)
end end
def handle_hashed_storage_attachments_event(event, created_at)
job_id = ::Geo::HashedStorageAttachmentsMigrationService.new(
event.project_id,
old_attachments_path: event.old_attachments_path,
new_attachments_path: event.new_attachments_path
).async_execute
logger.event_info(
created_at,
message: 'Migrating attachments to hashed storage',
project_id: event.project_id,
old_attachments_path: event.old_attachments_path,
new_attachments_path: event.new_attachments_path,
job_id: job_id
)
end
def handle_lfs_object_deleted_event(event, created_at) def handle_lfs_object_deleted_event(event, created_at)
file_path = File.join(LfsObjectUploader.local_store_path, event.file_path) file_path = File.join(LfsObjectUploader.local_store_path, event.file_path)
......
...@@ -18,6 +18,7 @@ module Gitlab ...@@ -18,6 +18,7 @@ module Gitlab
GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE
].freeze ].freeze
SEARCH_CONTEXT_LINES = 3 SEARCH_CONTEXT_LINES = 3
GITALY_INTERNAL_URL = 'ssh://gitaly/internal.git'.freeze
NoRepository = Class.new(StandardError) NoRepository = Class.new(StandardError)
InvalidBlobName = Class.new(StandardError) InvalidBlobName = Class.new(StandardError)
...@@ -1150,12 +1151,25 @@ module Gitlab ...@@ -1150,12 +1151,25 @@ module Gitlab
@has_visible_content = has_local_branches? @has_visible_content = has_local_branches?
end end
# Like all public `Gitlab::Git::Repository` methods, this method is part def fetch_repository_as_mirror(repository)
# of `Repository`'s interface through `method_missing`. remote_name = "tmp-#{SecureRandom.hex}"
# `Repository` has its own `fetch_remote` which uses `gitlab-shell` and
# takes some extra attributes, so we qualify this method name to prevent confusion. # Notice that this feature flag is not for `fetch_repository_as_mirror`
def fetch_remote_without_shell(remote = 'origin') # as a whole but for the fetching mechanism (file path or gitaly-ssh).
run_git(['fetch', remote]).last.zero? url, env = gitaly_migrate(:fetch_internal) do |is_enabled|
if is_enabled
repository = RemoteRepository.new(repository) unless repository.is_a?(RemoteRepository)
[GITALY_INTERNAL_URL, repository.fetch_env]
else
[repository.path, nil]
end
end
add_remote(remote_name, url)
set_remote_as_mirror(remote_name)
fetch_remote(remote_name, env: env)
ensure
remove_remote(remote_name)
end end
def blob_at(sha, path) def blob_at(sha, path)
...@@ -1661,7 +1675,7 @@ module Gitlab ...@@ -1661,7 +1675,7 @@ module Gitlab
end end
def gitaly_fetch_ref(source_repository, source_ref:, target_ref:) def gitaly_fetch_ref(source_repository, source_ref:, target_ref:)
args = %W(fetch --no-tags -f ssh://gitaly/internal.git #{source_ref}:#{target_ref}) args = %W(fetch --no-tags -f #{GITALY_INTERNAL_URL} #{source_ref}:#{target_ref})
run_git(args, env: source_repository.fetch_env) run_git(args, env: source_repository.fetch_env)
end end
...@@ -1681,6 +1695,10 @@ module Gitlab ...@@ -1681,6 +1695,10 @@ module Gitlab
rescue Rugged::ReferenceError rescue Rugged::ReferenceError
raise ArgumentError, 'Invalid merge source' raise ArgumentError, 'Invalid merge source'
end end
def fetch_remote(remote_name = 'origin', env: nil)
run_git(['fetch', remote_name], env: env).last.zero?
end
end end
end end
end end
...@@ -31,19 +31,6 @@ module Gitlab ...@@ -31,19 +31,6 @@ module Gitlab
end end
end end
# Like all_refs public `Gitlab::Git::Repository` methods, this method is part
# of `Repository`'s interface through `method_missing`.
# `Repository` has its own `fetch_as_mirror` which uses `gitlab-shell` and
# takes some extra attributes, so we qualify this method name to prevent confusion.
def fetch_as_mirror_without_shell(url)
remote_name = "tmp-#{SecureRandom.hex}"
add_remote(remote_name, url)
set_remote_as_mirror(remote_name)
fetch_remote_without_shell(remote_name)
ensure
remove_remote(remote_name) if remote_name
end
def remote_tags(remote) def remote_tags(remote)
# Each line has this format: "dc872e9fa6963f8f03da6c8f6f264d0845d6b092\trefs/tags/v1.10.0\n" # Each line has this format: "dc872e9fa6963f8f03da6c8f6f264d0845d6b092\trefs/tags/v1.10.0\n"
# We want to convert it to: [{ 'v1.10.0' => 'dc872e9fa6963f8f03da6c8f6f264d0845d6b092' }, ...] # We want to convert it to: [{ 'v1.10.0' => 'dc872e9fa6963f8f03da6c8f6f264d0845d6b092' }, ...]
......
...@@ -93,11 +93,11 @@ module Gitlab ...@@ -93,11 +93,11 @@ module Gitlab
# kwargs.merge(deadline: Time.now + 10) # kwargs.merge(deadline: Time.now + 10)
# end # end
# #
def self.call(storage, service, rpc, request, remote_storage: nil) def self.call(storage, service, rpc, request, remote_storage: nil, timeout: nil)
start = Process.clock_gettime(Process::CLOCK_MONOTONIC) start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
enforce_gitaly_request_limits(:call) enforce_gitaly_request_limits(:call)
kwargs = request_kwargs(storage, remote_storage: remote_storage) kwargs = request_kwargs(storage, timeout, remote_storage: remote_storage)
kwargs = yield(kwargs) if block_given? kwargs = yield(kwargs) if block_given?
stub(service, storage).__send__(rpc, request, kwargs) # rubocop:disable GitlabSecurity/PublicSend stub(service, storage).__send__(rpc, request, kwargs) # rubocop:disable GitlabSecurity/PublicSend
...@@ -105,7 +105,7 @@ module Gitlab ...@@ -105,7 +105,7 @@ module Gitlab
self.query_time += Process.clock_gettime(Process::CLOCK_MONOTONIC) - start self.query_time += Process.clock_gettime(Process::CLOCK_MONOTONIC) - start
end end
def self.request_kwargs(storage, remote_storage: nil) def self.request_kwargs(storage, timeout, remote_storage: nil)
encoded_token = Base64.strict_encode64(token(storage).to_s) encoded_token = Base64.strict_encode64(token(storage).to_s)
metadata = { metadata = {
'authorization' => "Bearer #{encoded_token}", 'authorization' => "Bearer #{encoded_token}",
...@@ -117,7 +117,22 @@ module Gitlab ...@@ -117,7 +117,22 @@ module Gitlab
metadata['call_site'] = feature.to_s if feature metadata['call_site'] = feature.to_s if feature
metadata['gitaly-servers'] = address_metadata(remote_storage) if remote_storage metadata['gitaly-servers'] = address_metadata(remote_storage) if remote_storage
{ metadata: metadata } result = { metadata: metadata }
# nil timeout indicates that we should use the default
timeout = default_timeout if timeout.nil?
return result unless timeout > 0
# Do not use `Time.now` for deadline calculation, since it
# will be affected by Timecop in some tests, but grpc's c-core
# uses system time instead of timecop's time, so tests will fail
# `Time.at(Process.clock_gettime(Process::CLOCK_REALTIME))` will
# circumvent timecop
deadline = Time.at(Process.clock_gettime(Process::CLOCK_REALTIME)) + timeout
result[:deadline] = deadline
result
end end
def self.token(storage) def self.token(storage)
...@@ -290,6 +305,26 @@ module Gitlab ...@@ -290,6 +305,26 @@ module Gitlab
Google::Protobuf::RepeatedField.new(:bytes, a.map { |s| self.encode(s) } ) Google::Protobuf::RepeatedField.new(:bytes, a.map { |s| self.encode(s) } )
end end
# The default timeout on all Gitaly calls
def self.default_timeout
return 0 if Sidekiq.server?
timeout(:gitaly_timeout_default)
end
def self.fast_timeout
timeout(:gitaly_timeout_fast)
end
def self.medium_timeout
timeout(:gitaly_timeout_medium)
end
def self.timeout(timeout_name)
Gitlab::CurrentSettings.current_application_settings[timeout_name]
end
private_class_method :timeout
# Count a stack. Used for n+1 detection # Count a stack. Used for n+1 detection
def self.count_stack def self.count_stack
return unless RequestStore.active? return unless RequestStore.active?
......
...@@ -16,7 +16,7 @@ module Gitlab ...@@ -16,7 +16,7 @@ module Gitlab
revision: GitalyClient.encode(revision) revision: GitalyClient.encode(revision)
) )
response = GitalyClient.call(@repository.storage, :commit_service, :list_files, request) response = GitalyClient.call(@repository.storage, :commit_service, :list_files, request, timeout: GitalyClient.medium_timeout)
response.flat_map do |msg| response.flat_map do |msg|
msg.paths.map { |d| EncodingHelper.encode!(d.dup) } msg.paths.map { |d| EncodingHelper.encode!(d.dup) }
end end
...@@ -29,7 +29,7 @@ module Gitlab ...@@ -29,7 +29,7 @@ module Gitlab
child_id: child_id child_id: child_id
) )
GitalyClient.call(@repository.storage, :commit_service, :commit_is_ancestor, request).value GitalyClient.call(@repository.storage, :commit_service, :commit_is_ancestor, request, timeout: GitalyClient.fast_timeout).value
end end
def diff(from, to, options = {}) def diff(from, to, options = {})
...@@ -77,7 +77,7 @@ module Gitlab ...@@ -77,7 +77,7 @@ module Gitlab
limit: limit.to_i limit: limit.to_i
) )
response = GitalyClient.call(@repository.storage, :commit_service, :tree_entry, request) response = GitalyClient.call(@repository.storage, :commit_service, :tree_entry, request, timeout: GitalyClient.medium_timeout)
entry = nil entry = nil
data = '' data = ''
...@@ -102,7 +102,7 @@ module Gitlab ...@@ -102,7 +102,7 @@ module Gitlab
path: path.present? ? GitalyClient.encode(path) : '.' path: path.present? ? GitalyClient.encode(path) : '.'
) )
response = GitalyClient.call(@repository.storage, :commit_service, :get_tree_entries, request) response = GitalyClient.call(@repository.storage, :commit_service, :get_tree_entries, request, timeout: GitalyClient.medium_timeout)
response.flat_map do |message| response.flat_map do |message|
message.entries.map do |gitaly_tree_entry| message.entries.map do |gitaly_tree_entry|
...@@ -129,7 +129,7 @@ module Gitlab ...@@ -129,7 +129,7 @@ module Gitlab
request.before = Google::Protobuf::Timestamp.new(seconds: options[:before].to_i) if options[:before].present? request.before = Google::Protobuf::Timestamp.new(seconds: options[:before].to_i) if options[:before].present?
request.path = options[:path] if options[:path].present? request.path = options[:path] if options[:path].present?
GitalyClient.call(@repository.storage, :commit_service, :count_commits, request).count GitalyClient.call(@repository.storage, :commit_service, :count_commits, request, timeout: GitalyClient.medium_timeout).count
end end
def last_commit_for_path(revision, path) def last_commit_for_path(revision, path)
...@@ -139,7 +139,7 @@ module Gitlab ...@@ -139,7 +139,7 @@ module Gitlab
path: GitalyClient.encode(path.to_s) path: GitalyClient.encode(path.to_s)
) )
gitaly_commit = GitalyClient.call(@repository.storage, :commit_service, :last_commit_for_path, request).commit gitaly_commit = GitalyClient.call(@repository.storage, :commit_service, :last_commit_for_path, request, timeout: GitalyClient.fast_timeout).commit
return unless gitaly_commit return unless gitaly_commit
Gitlab::Git::Commit.new(@repository, gitaly_commit) Gitlab::Git::Commit.new(@repository, gitaly_commit)
...@@ -152,7 +152,7 @@ module Gitlab ...@@ -152,7 +152,7 @@ module Gitlab
to: to to: to
) )
response = GitalyClient.call(@repository.storage, :commit_service, :commits_between, request) response = GitalyClient.call(@repository.storage, :commit_service, :commits_between, request, timeout: GitalyClient.medium_timeout)
consume_commits_response(response) consume_commits_response(response)
end end
...@@ -165,7 +165,7 @@ module Gitlab ...@@ -165,7 +165,7 @@ module Gitlab
) )
request.order = opts[:order].upcase if opts[:order].present? request.order = opts[:order].upcase if opts[:order].present?
response = GitalyClient.call(@repository.storage, :commit_service, :find_all_commits, request) response = GitalyClient.call(@repository.storage, :commit_service, :find_all_commits, request, timeout: GitalyClient.medium_timeout)
consume_commits_response(response) consume_commits_response(response)
end end
...@@ -179,7 +179,7 @@ module Gitlab ...@@ -179,7 +179,7 @@ module Gitlab
offset: offset.to_i offset: offset.to_i
) )
response = GitalyClient.call(@repository.storage, :commit_service, :commits_by_message, request) response = GitalyClient.call(@repository.storage, :commit_service, :commits_by_message, request, timeout: GitalyClient.medium_timeout)
consume_commits_response(response) consume_commits_response(response)
end end
...@@ -197,7 +197,7 @@ module Gitlab ...@@ -197,7 +197,7 @@ module Gitlab
path: GitalyClient.encode(path) path: GitalyClient.encode(path)
) )
response = GitalyClient.call(@repository.storage, :commit_service, :raw_blame, request) response = GitalyClient.call(@repository.storage, :commit_service, :raw_blame, request, timeout: GitalyClient.medium_timeout)
response.reduce("") { |memo, msg| memo << msg.data } response.reduce("") { |memo, msg| memo << msg.data }
end end
...@@ -207,7 +207,7 @@ module Gitlab ...@@ -207,7 +207,7 @@ module Gitlab
revision: GitalyClient.encode(revision) revision: GitalyClient.encode(revision)
) )
response = GitalyClient.call(@repository.storage, :commit_service, :find_commit, request) response = GitalyClient.call(@repository.storage, :commit_service, :find_commit, request, timeout: GitalyClient.medium_timeout)
response.commit response.commit
end end
...@@ -217,7 +217,7 @@ module Gitlab ...@@ -217,7 +217,7 @@ module Gitlab
repository: @gitaly_repo, repository: @gitaly_repo,
revision: GitalyClient.encode(revision) revision: GitalyClient.encode(revision)
) )
response = GitalyClient.call(@repository.storage, :diff_service, :commit_patch, request) response = GitalyClient.call(@repository.storage, :diff_service, :commit_patch, request, timeout: GitalyClient.medium_timeout)
response.sum(&:data) response.sum(&:data)
end end
...@@ -227,7 +227,7 @@ module Gitlab ...@@ -227,7 +227,7 @@ module Gitlab
repository: @gitaly_repo, repository: @gitaly_repo,
revision: GitalyClient.encode(revision) revision: GitalyClient.encode(revision)
) )
GitalyClient.call(@repository.storage, :commit_service, :commit_stats, request) GitalyClient.call(@repository.storage, :commit_service, :commit_stats, request, timeout: GitalyClient.medium_timeout)
end end
def find_commits(options) def find_commits(options)
...@@ -245,7 +245,7 @@ module Gitlab ...@@ -245,7 +245,7 @@ module Gitlab
request.paths = GitalyClient.encode_repeated(Array(options[:path])) if options[:path].present? request.paths = GitalyClient.encode_repeated(Array(options[:path])) if options[:path].present?
response = GitalyClient.call(@repository.storage, :commit_service, :find_commits, request) response = GitalyClient.call(@repository.storage, :commit_service, :find_commits, request, timeout: GitalyClient.medium_timeout)
consume_commits_response(response) consume_commits_response(response)
end end
...@@ -259,7 +259,7 @@ module Gitlab ...@@ -259,7 +259,7 @@ module Gitlab
request_params.merge!(Gitlab::Git::DiffCollection.collection_limits(options).to_h) request_params.merge!(Gitlab::Git::DiffCollection.collection_limits(options).to_h)
request = Gitaly::CommitDiffRequest.new(request_params) request = Gitaly::CommitDiffRequest.new(request_params)
response = GitalyClient.call(@repository.storage, :diff_service, :commit_diff, request) response = GitalyClient.call(@repository.storage, :diff_service, :commit_diff, request, timeout: GitalyClient.medium_timeout)
GitalyClient::DiffStitcher.new(response) GitalyClient::DiffStitcher.new(response)
end end
......
...@@ -46,7 +46,8 @@ module Gitlab ...@@ -46,7 +46,8 @@ module Gitlab
commit_id: commit_id, commit_id: commit_id,
prefix: ref_prefix prefix: ref_prefix
) )
encode!(GitalyClient.call(@storage, :ref_service, :find_ref_name, request).name.dup) response = GitalyClient.call(@storage, :ref_service, :find_ref_name, request, timeout: GitalyClient.medium_timeout)
encode!(response.name.dup)
end end
def count_tag_names def count_tag_names
......
...@@ -10,7 +10,9 @@ module Gitlab ...@@ -10,7 +10,9 @@ module Gitlab
def exists? def exists?
request = Gitaly::RepositoryExistsRequest.new(repository: @gitaly_repo) request = Gitaly::RepositoryExistsRequest.new(repository: @gitaly_repo)
GitalyClient.call(@storage, :repository_service, :repository_exists, request).exists response = GitalyClient.call(@storage, :repository_service, :repository_exists, request, timeout: GitalyClient.fast_timeout)
response.exists
end end
def garbage_collect(create_bitmap) def garbage_collect(create_bitmap)
...@@ -30,7 +32,8 @@ module Gitlab ...@@ -30,7 +32,8 @@ module Gitlab
def repository_size def repository_size
request = Gitaly::RepositorySizeRequest.new(repository: @gitaly_repo) request = Gitaly::RepositorySizeRequest.new(repository: @gitaly_repo)
GitalyClient.call(@storage, :repository_service, :repository_size, request).size response = GitalyClient.call(@storage, :repository_service, :repository_size, request)
response.size
end end
def apply_gitattributes(revision) def apply_gitattributes(revision)
...@@ -61,7 +64,7 @@ module Gitlab ...@@ -61,7 +64,7 @@ module Gitlab
def has_local_branches? def has_local_branches?
request = Gitaly::HasLocalBranchesRequest.new(repository: @gitaly_repo) request = Gitaly::HasLocalBranchesRequest.new(repository: @gitaly_repo)
response = GitalyClient.call(@storage, :repository_service, :has_local_branches, request) response = GitalyClient.call(@storage, :repository_service, :has_local_branches, request, timeout: GitalyClient.fast_timeout)
response.value response.value
end end
......
...@@ -2,10 +2,10 @@ namespace :gitlab do ...@@ -2,10 +2,10 @@ namespace :gitlab do
namespace :storage do namespace :storage do
desc 'GitLab | Storage | Migrate existing projects to Hashed Storage' desc 'GitLab | Storage | Migrate existing projects to Hashed Storage'
task migrate_to_hashed: :environment do task migrate_to_hashed: :environment do
legacy_projects_count = Project.with_legacy_storage.count legacy_projects_count = Project.with_unmigrated_storage.count
if legacy_projects_count == 0 if legacy_projects_count == 0
puts 'There are no projects using legacy storage. Nothing to do!' puts 'There are no projects requiring storage migration. Nothing to do!'
next next
end end
...@@ -23,22 +23,42 @@ namespace :gitlab do ...@@ -23,22 +23,42 @@ namespace :gitlab do
desc 'Gitlab | Storage | Summary of existing projects using Legacy Storage' desc 'Gitlab | Storage | Summary of existing projects using Legacy Storage'
task legacy_projects: :environment do task legacy_projects: :environment do
projects_summary(Project.with_legacy_storage) relation_summary('projects', Project.without_storage_feature(:repository))
end end
desc 'Gitlab | Storage | List existing projects using Legacy Storage' desc 'Gitlab | Storage | List existing projects using Legacy Storage'
task list_legacy_projects: :environment do task list_legacy_projects: :environment do
projects_list(Project.with_legacy_storage) projects_list('projects using Legacy Storage', Project.without_storage_feature(:repository))
end end
desc 'Gitlab | Storage | Summary of existing projects using Hashed Storage' desc 'Gitlab | Storage | Summary of existing projects using Hashed Storage'
task hashed_projects: :environment do task hashed_projects: :environment do
projects_summary(Project.with_hashed_storage) relation_summary('projects using Hashed Storage', Project.with_storage_feature(:repository))
end end
desc 'Gitlab | Storage | List existing projects using Hashed Storage' desc 'Gitlab | Storage | List existing projects using Hashed Storage'
task list_hashed_projects: :environment do task list_hashed_projects: :environment do
projects_list(Project.with_hashed_storage) projects_list('projects using Hashed Storage', Project.with_storage_feature(:repository))
end
desc 'Gitlab | Storage | Summary of project attachments using Legacy Storage'
task legacy_attachments: :environment do
relation_summary('attachments using Legacy Storage', legacy_attachments_relation)
end
desc 'Gitlab | Storage | List existing project attachments using Legacy Storage'
task list_legacy_attachments: :environment do
attachments_list('attachments using Legacy Storage', legacy_attachments_relation)
end
desc 'Gitlab | Storage | Summary of project attachments using Hashed Storage'
task hashed_attachments: :environment do
relation_summary('attachments using Hashed Storage', hashed_attachments_relation)
end
desc 'Gitlab | Storage | List existing project attachments using Hashed Storage'
task list_hashed_attachments: :environment do
attachments_list('attachments using Hashed Storage', hashed_attachments_relation)
end end
def batch_size def batch_size
...@@ -46,29 +66,43 @@ namespace :gitlab do ...@@ -46,29 +66,43 @@ namespace :gitlab do
end end
def project_id_batches(&block) def project_id_batches(&block)
Project.with_legacy_storage.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches Project.with_unmigrated_storage.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches
ids = relation.pluck(:id) ids = relation.pluck(:id)
yield ids.min, ids.max yield ids.min, ids.max
end end
end end
def projects_summary(relation) def legacy_attachments_relation
projects_count = relation.count Upload.joins(<<~SQL).where('projects.storage_version < :version OR projects.storage_version IS NULL', version: Project::HASHED_STORAGE_FEATURES[:attachments])
puts "* Found #{projects_count} projects".color(:green) JOIN projects
ON (uploads.model_type='Project' AND uploads.model_id=projects.id)
SQL
end
def hashed_attachments_relation
Upload.joins(<<~SQL).where('projects.storage_version >= :version', version: Project::HASHED_STORAGE_FEATURES[:attachments])
JOIN projects
ON (uploads.model_type='Project' AND uploads.model_id=projects.id)
SQL
end
def relation_summary(relation_name, relation)
relation_count = relation.count
puts "* Found #{relation_count} #{relation_name}".color(:green)
projects_count relation_count
end end
def projects_list(relation) def projects_list(relation_name, relation)
projects_count = projects_summary(relation) relation_count = relation_summary(relation_name, relation)
projects = relation.with_route projects = relation.with_route
limit = ENV.fetch('LIMIT', 500).to_i limit = ENV.fetch('LIMIT', 500).to_i
return unless projects_count > 0 return unless relation_count > 0
puts " ! Displaying first #{limit} projects..." if projects_count > limit puts " ! Displaying first #{limit} #{relation_name}..." if relation_count > limit
counter = 0 counter = 0
projects.find_in_batches(batch_size: batch_size) do |batch| projects.find_in_batches(batch_size: batch_size) do |batch|
...@@ -81,5 +115,26 @@ namespace :gitlab do ...@@ -81,5 +115,26 @@ namespace :gitlab do
end end
end end
end end
def attachments_list(relation_name, relation)
relation_count = relation_summary(relation_name, relation)
limit = ENV.fetch('LIMIT', 500).to_i
return unless relation_count > 0
puts " ! Displaying first #{limit} #{relation_name}..." if relation_count > limit
counter = 0
relation.find_in_batches(batch_size: batch_size) do |batch|
batch.each do |upload|
counter += 1
puts " - #{upload.path} (id: #{upload.id})".color(:red)
return if counter >= limit # rubocop:disable Lint/NonLocalExitFromIterator
end
end
end
end end
end end
...@@ -3,7 +3,16 @@ require 'spec_helper' ...@@ -3,7 +3,16 @@ require 'spec_helper'
feature 'Update Epic', :js do feature 'Update Epic', :js do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:group) { create(:group, :public) } let(:group) { create(:group, :public) }
let(:epic) { create(:epic, group: group) }
let(:markdown) do
<<-MARKDOWN.strip_heredoc
This is a task list:
- [ ] Incomplete entry 1
MARKDOWN
end
let(:epic) { create(:epic, group: group, description: markdown) }
before do before do
stub_licensed_features(epics: true) stub_licensed_features(epics: true)
...@@ -51,6 +60,16 @@ feature 'Update Epic', :js do ...@@ -51,6 +60,16 @@ feature 'Update Epic', :js do
expect(page).not_to have_selector('.uploading-container .button-attach-file') expect(page).not_to have_selector('.uploading-container .button-attach-file')
end end
it 'updates the tasklist' do
expect(page).to have_selector('ul.task-list', count: 1)
expect(page).to have_selector('li.task-list-item', count: 1)
expect(page).to have_selector('ul input[checked]', count: 0)
find('.task-list .task-list-item', text: 'Incomplete entry 1').find('input').click
expect(page).to have_selector('ul input[checked]', count: 1)
end
# Autocomplete is disabled for epics until #4084 is resolved # Autocomplete is disabled for epics until #4084 is resolved
describe 'autocomplete disabled' do describe 'autocomplete disabled' do
it 'does not open atwho container' do it 'does not open atwho container' do
......
require 'spec_helper'
# Disable transactions via :truncate method because a foreign table
# can't see changes inside a transaction of a different connection.
describe Geo::ProjectRegistryFinder, :geo, :truncate do
include ::EE::GeoHelpers
let(:secondary) { create(:geo_node) }
let(:synced_group) { create(:group) }
let!(:project_not_synced) { create(:project) }
let(:project_repository_dirty) { create(:project) }
let(:project_wiki_dirty) { create(:project) }
subject { described_class.new(current_node: secondary) }
before do
stub_current_geo_node(secondary)
end
context 'FDW' do
before do
skip('FDW is not configured') if Gitlab::Database.postgresql? && !Gitlab::Geo.fdw?
end
describe '#find_unsynced_projects' do
it 'delegates to #fdw_find_unsynced_projects' do
expect(subject).to receive(:fdw_find_unsynced_projects).and_call_original
subject.find_unsynced_projects(batch_size: 10)
end
it 'delegates to #legacy_find_unsynced_projects when node has selective sync' do
secondary.update_attribute(:namespaces, [synced_group])
expect(subject).to receive(:legacy_find_unsynced_projects).and_call_original
subject.find_unsynced_projects(batch_size: 10)
end
it 'returns projects without an entry on the tracking database' do
create(:geo_project_registry, :synced, :repository_dirty, project: project_repository_dirty)
projects = subject.find_unsynced_projects(batch_size: 10)
expect(projects.count).to eq(1)
expect(projects.first.id).to eq(project_not_synced.id)
end
end
describe '#find_projects_updated_recently' do
it 'delegates to #fdw_find_projects_updated_recently' do
expect(subject).to receive(:fdw_find_projects_updated_recently).and_call_original
subject.find_projects_updated_recently(batch_size: 10)
end
it 'delegates to #legacy_find_projects_updated_recently when node has selective sync' do
secondary.update_attribute(:namespaces, [synced_group])
expect(subject).to receive(:legacy_find_projects_updated_recently).and_call_original
subject.find_projects_updated_recently(batch_size: 10)
end
it 'returns projects with a dirty entry on the tracking database' do
project_repository_dirty = create(:project)
project_wiki_dirty = create(:project)
create(:geo_project_registry, :synced, :repository_dirty, project: project_repository_dirty)
create(:geo_project_registry, :synced, :wiki_dirty, project: project_wiki_dirty)
projects = subject.find_projects_updated_recently(batch_size: 10)
expect(projects.pluck(:id)).to match_array([project_repository_dirty.id, project_wiki_dirty.id])
end
end
end
context 'Legacy' do
before do
allow(Gitlab::Geo).to receive(:fdw?).and_return(false)
end
describe '#find_unsynced_projects' do
it 'delegates to #legacy_find_unsynced_projects' do
expect(subject).to receive(:legacy_find_unsynced_projects).and_call_original
subject.find_unsynced_projects(batch_size: 10)
end
it 'returns projects without an entry on the tracking database' do
create(:geo_project_registry, :synced, :repository_dirty, project: project_repository_dirty)
projects = subject.find_unsynced_projects(batch_size: 10)
expect(projects).to match_array([project_not_synced])
end
end
describe '#find_projects_updated_recently' do
it 'delegates to #legacy_find_projects_updated_recently' do
expect(subject).to receive(:legacy_find_projects_updated_recently).and_call_original
subject.find_projects_updated_recently(batch_size: 10)
end
it 'returns projects with a dirty entry on the tracking database' do
create(:geo_project_registry, :synced, :repository_dirty, project: project_repository_dirty)
create(:geo_project_registry, :synced, :wiki_dirty, project: project_wiki_dirty)
projects = subject.find_projects_updated_recently(batch_size: 10)
expect(projects.pluck(:id)).to match_array([project_repository_dirty.id, project_wiki_dirty.id])
end
end
end
end
...@@ -11,4 +11,22 @@ describe Ci::Variable do ...@@ -11,4 +11,22 @@ describe Ci::Variable do
is_expected.to validate_uniqueness_of(:key) is_expected.to validate_uniqueness_of(:key)
.scoped_to(:project_id, :environment_scope) .scoped_to(:project_id, :environment_scope)
end end
describe '#environment_scope=' do
context 'when the new environment_scope is nil' do
it 'strips leading and trailing whitespaces' do
subject.environment_scope = nil
expect(subject.environment_scope).to eq('')
end
end
context 'when the new environment_scope has leadind and trailing whitespaces' do
it 'strips leading and trailing whitespaces' do
subject.environment_scope = ' * '
expect(subject.environment_scope).to eq('*')
end
end
end
end end
require 'spec_helper'
describe Projects::HashedStorage::MigrateAttachmentsService do
let(:project) { create(:project, storage_version: 1) }
let(:service) { described_class.new(project) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
let(:old_attachments_path) { legacy_storage.disk_path }
let(:new_attachments_path) { hashed_storage.disk_path }
describe '#execute' do
set(:primary) { create(:geo_node, :primary) }
set(:secondary) { create(:geo_node) }
context 'on success' do
before do
FileUtils.mkdir_p(FileUploader.dynamic_path_builder(old_attachments_path))
end
it 'returns true' do
expect(service.execute).to be_truthy
end
it 'creates a Geo::HashedStorageAttachmentsEvent' do
expect { service.execute }.to change(Geo::EventLog, :count).by(1)
event = Geo::EventLog.first.event
expect(event).to be_a(Geo::HashedStorageAttachmentsEvent)
expect(event).to have_attributes(
old_attachments_path: old_attachments_path,
new_attachments_path: new_attachments_path
)
end
end
context 'on failure' do
it 'does not create a Geo event when skipped' do
expect { service.execute }.not_to change { Geo::EventLog.count }
end
it 'does not create a Geo event on failure' do
expect(service).to receive(:move_folder!).and_raise(::Projects::HashedStorage::AttachmentMigrationError)
expect { service.execute }.to raise_error(::Projects::HashedStorage::AttachmentMigrationError)
expect(Geo::EventLog.count).to eq(0)
end
end
end
end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment