Commit 3977421e authored by Douglas Barbosa Alexandre's avatar Douglas Barbosa Alexandre

Merge branch '53966-make-hashed-storage-migration-safer-and-more-inviting' into 'master'

Hashed Storage rollback mechanism

See merge request gitlab-org/gitlab-ce!23955
parents 25c91fa4 3524a618
...@@ -1970,9 +1970,19 @@ class Project < ActiveRecord::Base ...@@ -1970,9 +1970,19 @@ class Project < ActiveRecord::Base
return unless storage_upgradable? return unless storage_upgradable?
if git_transfer_in_progress? if git_transfer_in_progress?
ProjectMigrateHashedStorageWorker.perform_in(Gitlab::ReferenceCounter::REFERENCE_EXPIRE_TIME, id) HashedStorage::ProjectMigrateWorker.perform_in(Gitlab::ReferenceCounter::REFERENCE_EXPIRE_TIME, id)
else else
ProjectMigrateHashedStorageWorker.perform_async(id) HashedStorage::ProjectMigrateWorker.perform_async(id)
end
end
def rollback_to_legacy_storage!
return if legacy_storage?
if git_transfer_in_progress?
HashedStorage::ProjectRollbackWorker.perform_in(Gitlab::ReferenceCounter::REFERENCE_EXPIRE_TIME, id)
else
HashedStorage::ProjectRollbackWorker.perform_async(id)
end end
end end
......
# frozen_string_literal: true
module Projects
module HashedStorage
AttachmentMigrationError = Class.new(StandardError)
AttachmentCannotMoveError = Class.new(StandardError)
class BaseAttachmentService < BaseService
# Returns the disk_path value before the execution
attr_reader :old_disk_path
# Returns the disk_path value after the execution
attr_reader :new_disk_path
# Returns the logger currently in use
attr_reader :logger
# Return whether this operation was skipped or not
#
# @return [Boolean] true if skipped of false otherwise
def skipped?
@skipped
end
protected
def move_folder!(old_path, new_path)
unless File.directory?(old_path)
logger.info("Skipped attachments move from '#{old_path}' to '#{new_path}', source path doesn't exist or is not a directory (PROJECT_ID=#{project.id})")
@skipped = true
return true
end
if File.exist?(new_path)
logger.error("Cannot move attachments from '#{old_path}' to '#{new_path}', target path already exist (PROJECT_ID=#{project.id})")
raise AttachmentCannotMoveError, "Target path '#{new_path}' already exists"
end
# Create base path folder on the new storage layout
FileUtils.mkdir_p(File.dirname(new_path))
FileUtils.mv(old_path, new_path)
logger.info("Project attachments moved from '#{old_path}' to '#{new_path}' (PROJECT_ID=#{project.id})")
true
end
end
end
end
...@@ -2,11 +2,8 @@ ...@@ -2,11 +2,8 @@
module Projects module Projects
module HashedStorage module HashedStorage
# Returned when there is an error with the Hashed Storage migration # Returned when repository can't be made read-only because there is already a git transfer in progress
RepositoryMigrationError = Class.new(StandardError) RepositoryInUseError = Class.new(StandardError)
# Returned when there is an error with the Hashed Storage rollback
RepositoryRollbackError = Class.new(StandardError)
class BaseRepositoryService < BaseService class BaseRepositoryService < BaseService
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
...@@ -38,7 +35,10 @@ module Projects ...@@ -38,7 +35,10 @@ module Projects
# project was not originally empty. # project was not originally empty.
if !from_exists && !to_exists if !from_exists && !to_exists
logger.warn "Can't find a repository on either source or target paths for #{project.full_path} (ID=#{project.id}) ..." logger.warn "Can't find a repository on either source or target paths for #{project.full_path} (ID=#{project.id}) ..."
return false
# We return true so we still reflect the change in the database.
# Next time the repository is (re)created it will be under the new storage layout
return true
elsif !from_exists elsif !from_exists
# Repository have been moved already. # Repository have been moved already.
return true return true
...@@ -52,6 +52,16 @@ module Projects ...@@ -52,6 +52,16 @@ module Projects
move_repository(new_disk_path, old_disk_path) move_repository(new_disk_path, old_disk_path)
move_repository("#{new_disk_path}.wiki", old_wiki_disk_path) move_repository("#{new_disk_path}.wiki", old_wiki_disk_path)
end end
def try_to_set_repository_read_only!
# Mitigate any push operation to start during migration
unless project.set_repository_read_only!
migration_error = "Target repository '#{old_disk_path}' cannot be made read-only as there is a git transfer in progress"
logger.error migration_error
raise RepositoryInUseError, migration_error
end
end
end end
end end
end end
...@@ -2,63 +2,38 @@ ...@@ -2,63 +2,38 @@
module Projects module Projects
module HashedStorage module HashedStorage
AttachmentMigrationError = Class.new(StandardError) class MigrateAttachmentsService < BaseAttachmentService
class MigrateAttachmentsService < BaseService
attr_reader :logger, :old_disk_path, :new_disk_path
def initialize(project, old_disk_path, logger: nil) def initialize(project, old_disk_path, logger: nil)
@project = project @project = project
@logger = logger || Rails.logger @logger = logger || Rails.logger
@old_disk_path = old_disk_path @old_disk_path = old_disk_path
@new_disk_path = project.disk_path
@skipped = false @skipped = false
end end
def execute def execute
origin = FileUploader.absolute_base_dir(project) origin = FileUploader.absolute_base_dir(project)
# It's possible that old_disk_path does not match project.disk_path. For example, that happens when we rename a project # It's possible that old_disk_path does not match project.disk_path.
# For example, that happens when we rename a project
origin.sub!(/#{Regexp.escape(project.full_path)}\z/, old_disk_path) origin.sub!(/#{Regexp.escape(project.full_path)}\z/, old_disk_path)
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments] project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
target = FileUploader.absolute_base_dir(project) target = FileUploader.absolute_base_dir(project)
@new_disk_path = project.disk_path
result = move_folder!(origin, target) result = move_folder!(origin, target)
if result
project.save! project.save!
if result && block_given? yield if block_given?
yield else
# Rollback changes
project.rollback!
end end
result result
end end
def skipped?
@skipped
end
private
def move_folder!(old_path, new_path)
unless File.directory?(old_path)
logger.info("Skipped attachments migration from '#{old_path}' to '#{new_path}', source path doesn't exist or is not a directory (PROJECT_ID=#{project.id})")
@skipped = true
return true
end
if File.exist?(new_path)
logger.error("Cannot migrate attachments from '#{old_path}' to '#{new_path}', target path already exist (PROJECT_ID=#{project.id})")
raise AttachmentMigrationError, "Target path '#{new_path}' already exist"
end
# Create hashed storage base path folder
FileUtils.mkdir_p(File.dirname(new_path))
FileUtils.mv(old_path, new_path)
logger.info("Migrated project attachments from '#{old_path}' to '#{new_path}' (PROJECT_ID=#{project.id})")
true
end
end end
end end
end end
...@@ -15,7 +15,7 @@ module Projects ...@@ -15,7 +15,7 @@ module Projects
result = move_repository(old_disk_path, new_disk_path) result = move_repository(old_disk_path, new_disk_path)
if move_wiki if move_wiki
result &&= move_repository("#{old_wiki_disk_path}", "#{new_disk_path}.wiki") result &&= move_repository(old_wiki_disk_path, "#{new_disk_path}.wiki")
end end
if result if result
...@@ -35,18 +35,6 @@ module Projects ...@@ -35,18 +35,6 @@ module Projects
result result
end end
private
def try_to_set_repository_read_only!
# Mitigate any push operation to start during migration
unless project.set_repository_read_only!
migration_error = "Target repository '#{old_disk_path}' cannot be made read-only as there is a git transfer in progress"
logger.error migration_error
raise RepositoryMigrationError, migration_error
end
end
end end
end end
end end
# frozen_string_literal: true
module Projects
module HashedStorage
class RollbackAttachmentsService < BaseAttachmentService
def initialize(project, logger: nil)
@project = project
@logger = logger || Rails.logger
@old_disk_path = project.disk_path
end
def execute
origin = FileUploader.absolute_base_dir(project)
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:repository]
target = FileUploader.absolute_base_dir(project)
@new_disk_path = FileUploader.base_dir(project)
result = move_folder!(origin, target)
if result
project.save!
yield if block_given?
else
# Rollback changes
project.rollback!
end
result
end
end
end
end
# frozen_string_literal: true
module Projects
module HashedStorage
class RollbackRepositoryService < BaseRepositoryService
def execute
try_to_set_repository_read_only!
@old_storage_version = project.storage_version
project.storage_version = nil
project.ensure_storage_path_exists
@new_disk_path = project.disk_path
result = move_repository(old_disk_path, new_disk_path)
if move_wiki
result &&= move_repository(old_wiki_disk_path, "#{new_disk_path}.wiki")
end
if result
project.write_repository_config
project.track_project_repository
else
rollback_folder_move
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:repository]
end
project.repository_read_only = false
project.save!
if result && block_given?
yield
end
result
end
end
end
end
# frozen_string_literal: true
module Projects
module HashedStorage
class RollbackService < BaseService
attr_reader :logger, :old_disk_path
def initialize(project, old_disk_path, logger: nil)
@project = project
@old_disk_path = old_disk_path
@logger = logger || Rails.logger
end
def execute
# Rollback attachments from Hashed Storage to Legacy
if project.hashed_storage?(:attachments)
return false unless rollback_attachments
end
# Rollback repository from Hashed Storage to Legacy
if project.hashed_storage?(:repository)
rollback_repository
end
end
private
def rollback_attachments
HashedStorage::RollbackAttachmentsService.new(project, logger: logger).execute
end
def rollback_repository
HashedStorage::RollbackRepositoryService.new(project, old_disk_path, logger: logger).execute
end
end
end
end
...@@ -47,6 +47,9 @@ ...@@ -47,6 +47,9 @@
- github_importer:github_import_stage_import_repository - github_importer:github_import_stage_import_repository
- hashed_storage:hashed_storage_migrator - hashed_storage:hashed_storage_migrator
- hashed_storage:hashed_storage_rollbacker
- hashed_storage:hashed_storage_project_migrate
- hashed_storage:hashed_storage_project_rollback
- mail_scheduler:mail_scheduler_issue_due - mail_scheduler:mail_scheduler_issue_due
- mail_scheduler:mail_scheduler_notification_service - mail_scheduler:mail_scheduler_notification_service
...@@ -126,7 +129,6 @@ ...@@ -126,7 +129,6 @@
- project_cache - project_cache
- project_destroy - project_destroy
- project_export - project_export
- project_migrate_hashed_storage
- project_service - project_service
- propagate_service_template - propagate_service_template
- reactive_caching - reactive_caching
......
# frozen_string_literal: true
module HashedStorage
class BaseWorker
include ExclusiveLeaseGuard
LEASE_TIMEOUT = 30.seconds.to_i
LEASE_KEY_SEGMENT = 'project_migrate_hashed_storage_worker'.freeze
protected
def lease_key
# we share the same lease key for both migration and rollback so they don't run simultaneously
"#{LEASE_KEY_SEGMENT}:#{project_id}"
end
def lease_timeout
LEASE_TIMEOUT
end
end
end
# frozen_string_literal: true
module HashedStorage
class ProjectMigrateWorker < BaseWorker
include ApplicationWorker
queue_namespace :hashed_storage
attr_reader :project_id
# rubocop: disable CodeReuse/ActiveRecord
def perform(project_id, old_disk_path = nil)
@project_id = project_id # we need to set this in order to create the lease_key
try_obtain_lease do
project = Project.without_deleted.find_by(id: project_id)
break unless project
old_disk_path ||= project.disk_path
::Projects::HashedStorage::MigrationService.new(project, old_disk_path, logger: logger).execute
end
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
# frozen_string_literal: true
module HashedStorage
class ProjectRollbackWorker < BaseWorker
include ApplicationWorker
queue_namespace :hashed_storage
attr_reader :project_id
# rubocop: disable CodeReuse/ActiveRecord
def perform(project_id, old_disk_path = nil)
@project_id = project_id # we need to set this in order to create the lease_key
try_obtain_lease do
project = Project.without_deleted.find_by(id: project_id)
break unless project
old_disk_path ||= project.disk_path
::Projects::HashedStorage::RollbackService.new(project, old_disk_path, logger: logger).execute
end
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
# frozen_string_literal: true
module HashedStorage
class RollbackerWorker
include ApplicationWorker
queue_namespace :hashed_storage
# @param [Integer] start initial ID of the batch
# @param [Integer] finish last ID of the batch
def perform(start, finish)
migrator = Gitlab::HashedStorage::Migrator.new
migrator.bulk_rollback(start: start, finish: finish)
end
end
end
# frozen_string_literal: true
class ProjectMigrateHashedStorageWorker
include ApplicationWorker
LEASE_TIMEOUT = 30.seconds.to_i
LEASE_KEY_SEGMENT = 'project_migrate_hashed_storage_worker'.freeze
# rubocop: disable CodeReuse/ActiveRecord
def perform(project_id, old_disk_path = nil)
uuid = lease_for(project_id).try_obtain
if uuid
project = Project.find_by(id: project_id)
return if project.nil? || project.pending_delete?
old_disk_path ||= project.disk_path
::Projects::HashedStorage::MigrationService.new(project, old_disk_path, logger: logger).execute
else
return false
end
ensure
cancel_lease_for(project_id, uuid) if uuid
end
# rubocop: enable CodeReuse/ActiveRecord
def lease_for(project_id)
Gitlab::ExclusiveLease.new(lease_key(project_id), timeout: LEASE_TIMEOUT)
end
private
def lease_key(project_id)
# we share the same lease key for both migration and rollback so they don't run simultaneously
"#{LEASE_KEY_SEGMENT}:#{project_id}"
end
def cancel_lease_for(project_id, uuid)
Gitlab::ExclusiveLease.cancel(lease_key(project_id), uuid)
end
end
---
title: Hashed Storage rollback mechanism
merge_request: 23955
author:
type: added
...@@ -68,6 +68,7 @@ ...@@ -68,6 +68,7 @@
- [background_migration, 1] - [background_migration, 1]
- [gcp_cluster, 1] - [gcp_cluster, 1]
- [project_migrate_hashed_storage, 1] - [project_migrate_hashed_storage, 1]
- [project_rollback_hashed_storage, 1]
- [hashed_storage, 1] - [hashed_storage, 1]
- [pages_domain_verification, 1] - [pages_domain_verification, 1]
- [object_storage_upload, 1] - [object_storage_upload, 1]
......
# frozen_string_literal: true
class MigrateProjectMigrateSidekiqQueue < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
DOWNTIME = false
def up
sidekiq_queue_migrate 'project_migrate_hashed_storage', to: 'hashed_storage:hashed_storage_project_migrate'
end
def down
sidekiq_queue_migrate 'hashed_storage:hashed_storage_project_migrate', to: 'project_migrate_hashed_storage'
end
end
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20190228092516) do ActiveRecord::Schema.define(version: 20190301081611) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
......
...@@ -13,10 +13,18 @@ module Gitlab ...@@ -13,10 +13,18 @@ module Gitlab
# #
# @param [Integer] start first project id for the range # @param [Integer] start first project id for the range
# @param [Integer] finish last project id for the range # @param [Integer] finish last project id for the range
def bulk_schedule(start:, finish:) def bulk_schedule_migration(start:, finish:)
::HashedStorage::MigratorWorker.perform_async(start, finish) ::HashedStorage::MigratorWorker.perform_async(start, finish)
end end
# Schedule a range of projects to be bulk rolledback with #bulk_rollback asynchronously
#
# @param [Integer] start first project id for the range
# @param [Integer] finish last project id for the range
def bulk_schedule_rollback(start:, finish:)
::HashedStorage::RollbackerWorker.perform_async(start, finish)
end
# Start migration of projects from specified range # Start migration of projects from specified range
# #
# Flagging a project to be migrated is a synchronous action # Flagging a project to be migrated is a synchronous action
...@@ -34,6 +42,23 @@ module Gitlab ...@@ -34,6 +42,23 @@ module Gitlab
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
# Start rollback of projects from specified range
#
# Flagging a project to be rolled back is a synchronous action
# but the rollback runs through async jobs
#
# @param [Integer] start first project id for the range
# @param [Integer] finish last project id for the range
# rubocop: disable CodeReuse/ActiveRecord
def bulk_rollback(start:, finish:)
projects = build_relation(start, finish)
projects.with_route.find_each(batch_size: BATCH_SIZE) do |project|
rollback(project)
end
end
# rubocop: enable CodeReuse/ActiveRecord
# Flag a project to be migrated to Hashed Storage # Flag a project to be migrated to Hashed Storage
# #
# @param [Project] project that will be migrated # @param [Project] project that will be migrated
...@@ -45,8 +70,15 @@ module Gitlab ...@@ -45,8 +70,15 @@ module Gitlab
Rails.logger.error("#{err.message} migrating storage of #{project.full_path} (ID=#{project.id}), trace - #{err.backtrace}") Rails.logger.error("#{err.message} migrating storage of #{project.full_path} (ID=#{project.id}), trace - #{err.backtrace}")
end end
# Flag a project to be rolled-back to Legacy Storage
#
# @param [Project] project that will be rolled-back
def rollback(project) def rollback(project)
# TODO: implement rollback strategy Rails.logger.info "Starting storage rollback of #{project.full_path} (ID=#{project.id})..."
project.rollback_to_legacy_storage!
rescue => err
Rails.logger.error("#{err.message} rolling-back storage of #{project.full_path} (ID=#{project.id}), trace - #{err.backtrace}")
end end
private private
......
...@@ -24,7 +24,7 @@ module Gitlab ...@@ -24,7 +24,7 @@ module Gitlab
end end
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def self.project_id_batches(&block) def self.project_id_batches_migration(&block)
Project.with_unmigrated_storage.in_batches(of: batch_size, start: range_from, finish: range_to) do |relation| # rubocop: disable Cop/InBatches Project.with_unmigrated_storage.in_batches(of: batch_size, start: range_from, finish: range_to) do |relation| # rubocop: disable Cop/InBatches
ids = relation.pluck(:id) ids = relation.pluck(:id)
...@@ -33,6 +33,16 @@ module Gitlab ...@@ -33,6 +33,16 @@ module Gitlab
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def self.project_id_batches_rollback(&block)
Project.with_storage_feature(:repository).in_batches(of: batch_size, start: range_from, finish: range_to) do |relation| # rubocop: disable Cop/InBatches
ids = relation.pluck(:id)
yield ids.min, ids.max
end
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def self.legacy_attachments_relation def self.legacy_attachments_relation
Upload.joins(<<~SQL).where('projects.storage_version < :version OR projects.storage_version IS NULL', version: Project::HASHED_STORAGE_FEATURES[:attachments]) Upload.joins(<<~SQL).where('projects.storage_version < :version OR projects.storage_version IS NULL', version: Project::HASHED_STORAGE_FEATURES[:attachments])
......
...@@ -36,8 +36,54 @@ namespace :gitlab do ...@@ -36,8 +36,54 @@ namespace :gitlab do
print "Enqueuing migration of #{legacy_projects_count} projects in batches of #{helper.batch_size}" print "Enqueuing migration of #{legacy_projects_count} projects in batches of #{helper.batch_size}"
helper.project_id_batches do |start, finish| helper.project_id_batches_migration do |start, finish|
storage_migrator.bulk_schedule(start: start, finish: finish) storage_migrator.bulk_schedule_migration(start: start, finish: finish)
print '.'
end
puts ' Done!'
end
desc 'GitLab | Storage | Rollback existing projects to Legacy Storage'
task rollback_to_legacy: :environment do
if Gitlab::Database.read_only?
warn 'This task requires database write access. Exiting.'
next
end
storage_migrator = Gitlab::HashedStorage::Migrator.new
helper = Gitlab::HashedStorage::RakeHelper
if helper.range_single_item?
project = Project.with_storage_feature(:repository).find_by(id: helper.range_from)
unless project
warn "There are no projects that can be rolledback with ID=#{helper.range_from}"
next
end
puts "Enqueueing storage rollback of #{project.full_path} (ID=#{project.id})..."
storage_migrator.rollback(project)
next
end
hashed_projects_count = Project.with_storage_feature(:repository).count
if hashed_projects_count == 0
warn 'There are no projects that can have storage rolledback. Nothing to do!'
next
end
print "Enqueuing rollback of #{hashed_projects_count} projects in batches of #{helper.batch_size}"
helper.project_id_batches_rollback do |start, finish|
puts "Start: #{start} FINISH: #{finish}"
storage_migrator.bulk_schedule_rollback(start: start, finish: finish)
print '.' print '.'
end end
......
require 'spec_helper' require 'spec_helper'
describe Gitlab::HashedStorage::Migrator do describe Gitlab::HashedStorage::Migrator do
describe '#bulk_schedule' do describe '#bulk_schedule_migration' do
it 'schedules job to StorageMigratorWorker' do it 'schedules job to HashedStorage::MigratorWorker' do
Sidekiq::Testing.fake! do Sidekiq::Testing.fake! do
expect { subject.bulk_schedule(start: 1, finish: 5) }.to change(HashedStorage::MigratorWorker.jobs, :size).by(1) expect { subject.bulk_schedule_migration(start: 1, finish: 5) }.to change(HashedStorage::MigratorWorker.jobs, :size).by(1)
end
end
end
describe '#bulk_schedule_rollback' do
it 'schedules job to HashedStorage::RollbackerWorker' do
Sidekiq::Testing.fake! do
expect { subject.bulk_schedule_rollback(start: 1, finish: 5) }.to change(HashedStorage::RollbackerWorker.jobs, :size).by(1)
end end
end end
end end
describe '#bulk_migrate' do describe '#bulk_migrate' do
let(:projects) { create_list(:project, 2, :legacy_storage) } let(:projects) { create_list(:project, 2, :legacy_storage, :empty_repo) }
let(:ids) { projects.map(&:id) } let(:ids) { projects.map(&:id) }
it 'enqueue jobs to ProjectMigrateHashedStorageWorker' do it 'enqueue jobs to HashedStorage::ProjectMigrateWorker' do
Sidekiq::Testing.fake! do Sidekiq::Testing.fake! do
expect { subject.bulk_migrate(start: ids.min, finish: ids.max) }.to change(ProjectMigrateHashedStorageWorker.jobs, :size).by(2) expect { subject.bulk_migrate(start: ids.min, finish: ids.max) }.to change(HashedStorage::ProjectMigrateWorker.jobs, :size).by(2)
end end
end end
...@@ -32,13 +40,53 @@ describe Gitlab::HashedStorage::Migrator do ...@@ -32,13 +40,53 @@ describe Gitlab::HashedStorage::Migrator do
subject.bulk_migrate(start: ids.min, finish: ids.max) subject.bulk_migrate(start: ids.min, finish: ids.max)
end end
it 'has migrated projects set as writable' do it 'has all projects migrated and set as writable' do
perform_enqueued_jobs do perform_enqueued_jobs do
subject.bulk_migrate(start: ids.min, finish: ids.max) subject.bulk_migrate(start: ids.min, finish: ids.max)
end end
projects.each do |project| projects.each do |project|
expect(project.reload.repository_read_only?).to be_falsey project.reload
expect(project.hashed_storage?(:repository)).to be_truthy
expect(project.repository_read_only?).to be_falsey
end
end
end
describe '#bulk_rollback' do
let(:projects) { create_list(:project, 2, :empty_repo) }
let(:ids) { projects.map(&:id) }
it 'enqueue jobs to HashedStorage::ProjectRollbackWorker' do
Sidekiq::Testing.fake! do
expect { subject.bulk_rollback(start: ids.min, finish: ids.max) }.to change(HashedStorage::ProjectRollbackWorker.jobs, :size).by(2)
end
end
it 'rescues and log exceptions' do
allow_any_instance_of(Project).to receive(:rollback_to_legacy_storage!).and_raise(StandardError)
expect { subject.bulk_rollback(start: ids.min, finish: ids.max) }.not_to raise_error
end
it 'delegates each project in specified range to #rollback' do
projects.each do |project|
expect(subject).to receive(:rollback).with(project)
end
subject.bulk_rollback(start: ids.min, finish: ids.max)
end
it 'has all projects rolledback and set as writable' do
perform_enqueued_jobs do
subject.bulk_rollback(start: ids.min, finish: ids.max)
end
projects.each do |project|
project.reload
expect(project.legacy_storage?).to be_truthy
expect(project.repository_read_only?).to be_falsey
end end
end end
end end
...@@ -48,7 +96,7 @@ describe Gitlab::HashedStorage::Migrator do ...@@ -48,7 +96,7 @@ describe Gitlab::HashedStorage::Migrator do
it 'enqueues project migration job' do it 'enqueues project migration job' do
Sidekiq::Testing.fake! do Sidekiq::Testing.fake! do
expect { subject.migrate(project) }.to change(ProjectMigrateHashedStorageWorker.jobs, :size).by(1) expect { subject.migrate(project) }.to change(HashedStorage::ProjectMigrateWorker.jobs, :size).by(1)
end end
end end
...@@ -79,7 +127,7 @@ describe Gitlab::HashedStorage::Migrator do ...@@ -79,7 +127,7 @@ describe Gitlab::HashedStorage::Migrator do
it 'doesnt enqueue any migration job' do it 'doesnt enqueue any migration job' do
Sidekiq::Testing.fake! do Sidekiq::Testing.fake! do
expect { subject.migrate(project) }.not_to change(ProjectMigrateHashedStorageWorker.jobs, :size) expect { subject.migrate(project) }.not_to change(HashedStorage::ProjectMigrateWorker.jobs, :size)
end end
end end
...@@ -88,4 +136,50 @@ describe Gitlab::HashedStorage::Migrator do ...@@ -88,4 +136,50 @@ describe Gitlab::HashedStorage::Migrator do
end end
end end
end end
describe '#rollback' do
let(:project) { create(:project, :empty_repo) }
it 'enqueues project rollback job' do
Sidekiq::Testing.fake! do
expect { subject.rollback(project) }.to change(HashedStorage::ProjectRollbackWorker.jobs, :size).by(1)
end
end
it 'rescues and log exceptions' do
allow(project).to receive(:rollback_to_hashed_storage!).and_raise(StandardError)
expect { subject.rollback(project) }.not_to raise_error
end
it 'rolls-back project storage' do
perform_enqueued_jobs do
subject.rollback(project)
end
expect(project.reload.legacy_storage?).to be_truthy
end
it 'has rolled-back project set as writable' do
perform_enqueued_jobs do
subject.rollback(project)
end
expect(project.reload.repository_read_only?).to be_falsey
end
context 'when project is already on legacy storage' do
let(:project) { create(:project, :legacy_storage, :empty_repo) }
it 'doesnt enqueue any rollback job' do
Sidekiq::Testing.fake! do
expect { subject.rollback(project) }.not_to change(HashedStorage::ProjectRollbackWorker.jobs, :size)
end
end
it 'returns false' do
expect(subject.rollback(project)).to be_falsey
end
end
end
end end
...@@ -3430,28 +3430,42 @@ describe Project do ...@@ -3430,28 +3430,42 @@ describe Project do
project.migrate_to_hashed_storage! project.migrate_to_hashed_storage!
end end
it 'schedules ProjectMigrateHashedStorageWorker with delayed start when the project repo is in use' do it 'schedules HashedStorage::ProjectMigrateWorker with delayed start when the project repo is in use' do
Gitlab::ReferenceCounter.new(project.gl_repository(is_wiki: false)).increase Gitlab::ReferenceCounter.new(project.gl_repository(is_wiki: false)).increase
expect(ProjectMigrateHashedStorageWorker).to receive(:perform_in) expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_in)
project.migrate_to_hashed_storage! project.migrate_to_hashed_storage!
end end
it 'schedules ProjectMigrateHashedStorageWorker with delayed start when the wiki repo is in use' do it 'schedules HashedStorage::ProjectMigrateWorker with delayed start when the wiki repo is in use' do
Gitlab::ReferenceCounter.new(project.gl_repository(is_wiki: true)).increase Gitlab::ReferenceCounter.new(project.gl_repository(is_wiki: true)).increase
expect(ProjectMigrateHashedStorageWorker).to receive(:perform_in) expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_in)
project.migrate_to_hashed_storage! project.migrate_to_hashed_storage!
end end
it 'schedules ProjectMigrateHashedStorageWorker' do it 'schedules HashedStorage::ProjectMigrateWorker' do
expect(ProjectMigrateHashedStorageWorker).to receive(:perform_async).with(project.id) expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_async).with(project.id)
project.migrate_to_hashed_storage! project.migrate_to_hashed_storage!
end end
end end
describe '#rollback_to_legacy_storage!' do
let(:project) { create(:project, :empty_repo, :legacy_storage) }
it 'returns nil' do
expect(project.rollback_to_legacy_storage!).to be_nil
end
it 'does not run validations' do
expect(project).not_to receive(:valid?)
project.rollback_to_legacy_storage!
end
end
end end
context 'hashed storage' do context 'hashed storage' do
...@@ -3527,8 +3541,32 @@ describe Project do ...@@ -3527,8 +3541,32 @@ describe Project do
project = create(:project, storage_version: 1, skip_disk_validation: true) project = create(:project, storage_version: 1, skip_disk_validation: true)
Sidekiq::Testing.fake! do Sidekiq::Testing.fake! do
expect { project.migrate_to_hashed_storage! }.to change(ProjectMigrateHashedStorageWorker.jobs, :size).by(1) expect { project.migrate_to_hashed_storage! }.to change(HashedStorage::ProjectMigrateWorker.jobs, :size).by(1)
end
end
end
end
describe '#rollback_to_legacy_storage!' do
let(:project) { create(:project, :repository, skip_disk_validation: true) }
it 'returns true' do
expect(project.rollback_to_legacy_storage!).to be_truthy
end
it 'does not run validations' do
expect(project).not_to receive(:valid?)
project.rollback_to_legacy_storage!
end
it 'does not flag as read-only' do
expect { project.rollback_to_legacy_storage! }.not_to change { project.repository_read_only }
end end
it 'enqueues a job' do
Sidekiq::Testing.fake! do
expect { project.rollback_to_legacy_storage! }.to change(HashedStorage::ProjectRollbackWorker.jobs, :size).by(1)
end end
end end
end end
......
...@@ -70,10 +70,10 @@ describe Projects::HashedStorage::MigrateAttachmentsService do ...@@ -70,10 +70,10 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
FileUtils.mkdir_p(base_path(hashed_storage)) FileUtils.mkdir_p(base_path(hashed_storage))
end end
it 'raises AttachmentMigrationError' do it 'raises AttachmentCannotMoveError' do
expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage)) expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentMigrationError) expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentCannotMoveError)
end end
end end
end end
...@@ -86,6 +86,8 @@ describe Projects::HashedStorage::MigrateAttachmentsService do ...@@ -86,6 +86,8 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
context '#new_disk_path' do context '#new_disk_path' do
it 'returns new disk_path for project' do it 'returns new disk_path for project' do
service.execute
expect(service.new_disk_path).to eq(project.disk_path) expect(service.new_disk_path).to eq(project.disk_path)
end end
end end
......
...@@ -28,7 +28,17 @@ describe Projects::HashedStorage::MigrateRepositoryService do ...@@ -28,7 +28,17 @@ describe Projects::HashedStorage::MigrateRepositoryService do
it 'fails when a git operation is in progress' do it 'fails when a git operation is in progress' do
allow(project).to receive(:repo_reference_count) { 1 } allow(project).to receive(:repo_reference_count) { 1 }
expect { service.execute }.to raise_error(Projects::HashedStorage::RepositoryMigrationError) expect { service.execute }.to raise_error(Projects::HashedStorage::RepositoryInUseError)
end
end
context 'when repository doesnt exist on disk' do
let(:project) { create(:project, :legacy_storage) }
it 'skips the disk change but increase the version' do
service.execute
expect(project.hashed_storage?(:repository)).to be_truthy
end end
end end
......
# frozen_string_literal: true
require 'spec_helper'
describe Projects::HashedStorage::RollbackAttachmentsService do
subject(:service) { described_class.new(project, logger: nil) }
let(:project) { create(:project, :repository, skip_disk_validation: true) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let(:file_uploader) { build(:file_uploader, project: project) }
let(:old_disk_path) { File.join(base_path(hashed_storage), upload.path) }
let(:new_disk_path) { File.join(base_path(legacy_storage), upload.path) }
context '#execute' do
context 'when succeeds' do
it 'moves attachments to legacy storage layout' do
expect(File.file?(old_disk_path)).to be_truthy
expect(File.file?(new_disk_path)).to be_falsey
expect(File.exist?(base_path(hashed_storage))).to be_truthy
expect(File.exist?(base_path(legacy_storage))).to be_falsey
expect(FileUtils).to receive(:mv).with(base_path(hashed_storage), base_path(legacy_storage)).and_call_original
service.execute
expect(File.exist?(base_path(legacy_storage))).to be_truthy
expect(File.exist?(base_path(hashed_storage))).to be_falsey
expect(File.file?(old_disk_path)).to be_falsey
expect(File.file?(new_disk_path)).to be_truthy
end
it 'returns true' do
expect(service.execute).to be_truthy
end
it 'sets skipped to false' do
service.execute
expect(service.skipped?).to be_falsey
end
end
context 'when original folder does not exist anymore' do
before do
FileUtils.rm_rf(base_path(hashed_storage))
end
it 'skips moving folders and go to next' do
expect(FileUtils).not_to receive(:mv).with(base_path(hashed_storage), base_path(legacy_storage))
service.execute
expect(File.exist?(base_path(legacy_storage))).to be_falsey
expect(File.file?(new_disk_path)).to be_falsey
end
it 'returns true' do
expect(service.execute).to be_truthy
end
it 'sets skipped to true' do
service.execute
expect(service.skipped?).to be_truthy
end
end
context 'when target folder already exists' do
before do
FileUtils.mkdir_p(base_path(legacy_storage))
end
it 'raises AttachmentCannotMoveError' do
expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentCannotMoveError)
end
end
end
context '#old_disk_path' do
it 'returns old disk_path for project' do
expect(service.old_disk_path).to eq(project.disk_path)
end
end
context '#new_disk_path' do
it 'returns new disk_path for project' do
service.execute
expect(service.new_disk_path).to eq(project.full_path)
end
end
def base_path(storage)
File.join(FileUploader.root, storage.disk_path)
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis_shared_state do
include GitHelpers
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project) { create(:project, :repository, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
subject(:service) { described_class.new(project, project.disk_path) }
describe '#execute' do
let(:old_disk_path) { hashed_storage.disk_path }
let(:new_disk_path) { legacy_storage.disk_path }
before do
allow(service).to receive(:gitlab_shell) { gitlab_shell }
end
context 'repository lock' do
it 'tries to lock the repository' do
expect(service).to receive(:try_to_set_repository_read_only!)
service.execute
end
it 'fails when a git operation is in progress' do
allow(project).to receive(:repo_reference_count) { 1 }
expect { service.execute }.to raise_error(Projects::HashedStorage::RepositoryInUseError)
end
end
context 'when repository doesnt exist on disk' do
let(:project) { create(:project) }
it 'skips the disk change but decrease the version' do
service.execute
expect(project.legacy_storage?).to be_truthy
end
end
context 'when succeeds' do
it 'renames project and wiki repositories' do
service.execute
expect(gitlab_shell.exists?(project.repository_storage, "#{new_disk_path}.git")).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_truthy
end
it 'updates project to be legacy and not read-only' do
service.execute
expect(project.legacy_storage?).to be_truthy
expect(project.repository_read_only).to be_falsey
end
it 'move operation is called for both repositories' do
expect_move_repository(old_disk_path, new_disk_path)
expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
service.execute
end
it 'writes project full path to .git/config' do
service.execute
rugged_config = rugged_repo(project.repository).config['gitlab.fullpath']
expect(rugged_config).to eq project.full_path
end
end
context 'when one move fails' do
it 'rolls repositories back to original name' do
allow(service).to receive(:move_repository).and_call_original
allow(service).to receive(:move_repository).with(old_disk_path, new_disk_path).once { false } # will disable first move only
expect(service).to receive(:rollback_folder_move).and_call_original
service.execute
expect(gitlab_shell.exists?(project.repository_storage, "#{new_disk_path}.git")).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_falsey
expect(project.repository_read_only?).to be_falsey
end
context 'when rollback fails' do
before do
legacy_storage.ensure_storage_path_exists
gitlab_shell.mv_repository(project.repository_storage, old_disk_path, new_disk_path)
end
it 'does not try to move nil repository over existing' do
expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage, old_disk_path, new_disk_path)
expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
service.execute
end
end
end
def expect_move_repository(from_name, to_name)
expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage, from_name, to_name).and_call_original
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Projects::HashedStorage::RollbackService do
let(:project) { create(:project, :empty_repo, :wiki_repo) }
let(:logger) { double }
subject(:service) { described_class.new(project, project.full_path, logger: logger) }
describe '#execute' do
context 'attachments rollback' do
let(:attachments_service_class) { Projects::HashedStorage::RollbackAttachmentsService }
let(:attachments_service) { attachments_service_class.new(project, logger: logger) }
it 'delegates rollback to Projects::HashedStorage::RollbackAttachmentsService' do
expect(attachments_service_class).to receive(:new)
.with(project, logger: logger)
.and_return(attachments_service)
expect(attachments_service).to receive(:execute)
service.execute
end
it 'does not delegate rollback if repository is in legacy storage already' do
project.storage_version = nil
expect(attachments_service_class).not_to receive(:new)
service.execute
end
end
context 'repository rollback' do
let(:repository_service_class) { Projects::HashedStorage::RollbackRepositoryService }
let(:repository_service) { repository_service_class.new(project, project.full_path, logger: logger) }
it 'delegates rollback to RollbackRepositoryService' do
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:repository]
expect(repository_service_class).to receive(:new)
.with(project, project.full_path, logger: logger)
.and_return(repository_service)
expect(repository_service).to receive(:execute)
service.execute
end
it 'does not delegate rollback if repository is in legacy storage already' do
project.storage_version = nil
expect(repository_service_class).not_to receive(:new)
service.execute
end
end
end
end
require 'spec_helper' require 'spec_helper'
describe ProjectMigrateHashedStorageWorker, :clean_gitlab_redis_shared_state do describe HashedStorage::ProjectMigrateWorker, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers include ExclusiveLeaseHelpers
describe '#perform' do describe '#perform' do
......
# frozen_string_literal: true
require 'spec_helper'
describe HashedStorage::ProjectRollbackWorker, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
describe '#perform' do
let(:project) { create(:project, :empty_repo) }
let(:lease_key) { "project_migrate_hashed_storage_worker:#{project.id}" }
let(:lease_timeout) { described_class::LEASE_TIMEOUT }
let(:rollback_service) { ::Projects::HashedStorage::RollbackService }
it 'skips when project no longer exists' do
expect(rollback_service).not_to receive(:new)
subject.perform(-1)
end
it 'skips when project is pending delete' do
pending_delete_project = create(:project, :empty_repo, pending_delete: true)
expect(rollback_service).not_to receive(:new)
subject.perform(pending_delete_project.id)
end
it 'delegates rollback to service class when have exclusive lease' do
stub_exclusive_lease(lease_key, 'uuid', timeout: lease_timeout)
service_spy = spy
allow(rollback_service)
.to receive(:new).with(project, project.disk_path, logger: subject.logger)
.and_return(service_spy)
subject.perform(project.id)
expect(service_spy).to have_received(:execute)
end
it 'skips when it cant acquire the exclusive lease' do
stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
expect(rollback_service).not_to receive(:new)
subject.perform(project.id)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe HashedStorage::RollbackerWorker do
subject(:worker) { described_class.new }
let(:projects) { create_list(:project, 2, :empty_repo) }
let(:ids) { projects.map(&:id) }
describe '#perform' do
it 'delegates to MigratorService' do
expect_any_instance_of(Gitlab::HashedStorage::Migrator).to receive(:bulk_rollback).with(start: 5, finish: 10)
worker.perform(5, 10)
end
it 'rollsback projects in the specified range' do
perform_enqueued_jobs do
worker.perform(ids.min, ids.max)
end
projects.each do |project|
expect(project.reload.legacy_storage?).to be_truthy
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment