Commit 59aa8e51 authored by Douglas Barbosa Alexandre's avatar Douglas Barbosa Alexandre

Merge branch 'ab-remove-transaction-from-artifact-deletion' into 'master'

Remove project destroy transaction behind flag

See merge request gitlab-org/gitlab!39367
parents 7b550280 013ca207
......@@ -28,7 +28,7 @@ module Projects
Projects::UnlinkForkService.new(project, current_user).execute
attempt_destroy_transaction(project)
attempt_destroy(project)
system_hook_service.execute_hooks_for(project, :destroy)
log_info("Project \"#{project.full_path}\" was deleted")
......@@ -98,29 +98,35 @@ module Projects
log_error("Deletion failed on #{project.full_path} with the following message: #{message}")
end
def attempt_destroy_transaction(project)
def attempt_destroy(project)
unless remove_registry_tags
raise_error(s_('DeleteProject|Failed to remove some tags in project container registry. Please try again or contact administrator.'))
end
project.leave_pool_repository
Project.transaction do
log_destroy_event
trash_relation_repositories!
trash_project_repositories!
# Rails attempts to load all related records into memory before
# destroying: https://github.com/rails/rails/issues/22510
# This ensures we delete records in batches.
#
# Exclude container repositories because its before_destroy would be
# called multiple times, and it doesn't destroy any database records.
project.destroy_dependent_associations_in_batches(exclude: [:container_repositories, :snippets])
project.destroy!
if Gitlab::Ci::Features.project_transactionless_destroy?(project)
destroy_project_related_records(project)
else
Project.transaction { destroy_project_related_records(project) }
end
end
def destroy_project_related_records(project)
log_destroy_event
trash_relation_repositories!
trash_project_repositories!
# Rails attempts to load all related records into memory before
# destroying: https://github.com/rails/rails/issues/22510
# This ensures we delete records in batches.
#
# Exclude container repositories because its before_destroy would be
# called multiple times, and it doesn't destroy any database records.
project.destroy_dependent_associations_in_batches(exclude: [:container_repositories, :snippets])
project.destroy!
end
def log_destroy_event
log_info("Attempting to destroy #{project.full_path} (#{project.id})")
end
......
......@@ -17,10 +17,30 @@ module EE
end
end
override :log_destroy_event
def log_destroy_event
super
# Removes physical repository in a Geo replicated secondary node
# There is no need to do any database operation as it will be
# replicated by itself.
def geo_replicate
return unless ::Gitlab::Geo.secondary?
# Flush the cache for both repositories. This has to be done _before_
# removing the physical repositories as some expiration code depends on
# Git data (e.g. a list of branch names).
flush_caches(project)
trash_project_repositories!
log_info("Project \"#{project.name}\" was removed")
end
private
override :destroy_project_related_records
def destroy_project_related_records(project)
super && log_destroy_events
end
def log_destroy_events
log_geo_event(project)
log_audit_event(project)
end
......@@ -39,24 +59,6 @@ module EE
).create!
end
# Removes physical repository in a Geo replicated secondary node
# There is no need to do any database operation as it will be
# replicated by itself.
def geo_replicate
return unless ::Gitlab::Geo.secondary?
# Flush the cache for both repositories. This has to be done _before_
# removing the physical repositories as some expiration code depends on
# Git data (e.g. a list of branch names).
flush_caches(project)
trash_project_repositories!
log_info("Project \"#{project.name}\" was removed")
end
private
def log_audit_event(project)
::AuditEventService.new(
current_user,
......
......@@ -20,79 +20,93 @@ RSpec.describe Projects::DestroyService do
stub_container_registry_tags(repository: :any, tags: [])
end
context 'when project is a mirror' do
it 'decrements capacity if mirror was scheduled' do
max_capacity = Gitlab::CurrentSettings.mirror_max_capacity
project_mirror = create(:project, :mirror, :repository, :import_scheduled)
Gitlab::Mirror.increment_capacity(project_mirror.id)
shared_examples 'project destroy ee' do
context 'when project is a mirror' do
let(:max_capacity) { Gitlab::CurrentSettings.mirror_max_capacity }
let_it_be(:project_mirror) { create(:project, :mirror, :repository, :import_scheduled) }
let(:result) { described_class.new(project_mirror, project_mirror.owner, {}).execute }
before do
Gitlab::Mirror.increment_capacity(project_mirror.id)
end
expect do
described_class.new(project_mirror, project_mirror.owner, {}).execute
end.to change { Gitlab::Mirror.available_capacity }.from(max_capacity - 1).to(max_capacity)
it 'decrements capacity if mirror was scheduled' do
expect {result}.to change { Gitlab::Mirror.available_capacity }.from(max_capacity - 1).to(max_capacity)
end
end
end
context 'when running on a primary node' do
let_it_be(:primary) { create(:geo_node, :primary) }
let_it_be(:secondary) { create(:geo_node) }
context 'when running on a primary node' do
let_it_be(:primary) { create(:geo_node, :primary) }
let_it_be(:secondary) { create(:geo_node) }
before do
stub_current_geo_node(primary)
end
before do
stub_current_geo_node(primary)
end
it 'logs an event to the Geo event log' do
# Run Sidekiq immediately to check that renamed repository will be removed
Sidekiq::Testing.inline! do
expect(subject).to receive(:log_destroy_events).and_call_original
expect { subject.execute }.to change(Geo::RepositoryDeletedEvent, :count).by(1)
end
end
it 'logs an event to the Geo event log' do
# Run Sidekiq immediately to check that renamed repository will be removed
Sidekiq::Testing.inline! do
it 'does not log event to the Geo log if project deletion fails' do
expect(subject).to receive(:log_destroy_event).and_call_original
expect { subject.execute }.to change(Geo::RepositoryDeletedEvent, :count).by(1)
expect(project).to receive(:destroy!).and_raise(StandardError.new('Other error message'))
Sidekiq::Testing.inline! do
expect { subject.execute }.not_to change(Geo::RepositoryDeletedEvent, :count)
end
end
end
it 'does not log event to the Geo log if project deletion fails' do
expect(subject).to receive(:log_destroy_event).and_call_original
expect_any_instance_of(Project)
.to receive(:destroy!).and_raise(StandardError.new('Other error message'))
Sidekiq::Testing.inline! do
expect { subject.execute }.not_to change(Geo::RepositoryDeletedEvent, :count)
context 'audit events' do
include_examples 'audit event logging' do
let(:operation) { subject.execute }
let(:fail_condition!) do
expect(project).to receive(:destroy!).and_raise(StandardError.new('Other error message'))
end
let(:attributes) do
{
author_id: user.id,
entity_id: project.id,
entity_type: 'Project',
details: {
remove: 'project',
author_name: user.name,
target_id: project.full_path,
target_type: 'Project',
target_details: project.full_path
}
}
end
end
end
end
context 'audit events' do
include_examples 'audit event logging' do
let(:operation) { subject.execute }
let(:fail_condition!) do
expect_any_instance_of(Project)
.to receive(:destroy!).and_raise(StandardError.new('Other error message'))
context 'system hooks exception' do
before do
allow_any_instance_of(SystemHooksService).to receive(:execute_hooks_for).and_raise('something went wrong')
end
let(:attributes) do
{
author_id: user.id,
entity_id: project.id,
entity_type: 'Project',
details: {
remove: 'project',
author_name: user.name,
target_id: project.full_path,
target_type: 'Project',
target_details: project.full_path
}
}
it 'logs an audit event' do
expect(subject).to receive(:log_destroy_event).and_call_original
expect { subject.execute }.to change(AuditEvent, :count)
end
end
end
context 'system hooks exception' do
context 'when project_transactionless_destroy enabled' do
it_behaves_like 'project destroy ee'
end
context 'when project_transactionless_destroy disabled', :sidekiq_inline do
before do
allow_any_instance_of(SystemHooksService).to receive(:execute_hooks_for).and_raise('something went wrong')
stub_feature_flags(project_transactionless_destroy: false)
end
it 'logs an audit event' do
expect(subject).to receive(:log_destroy_event).and_call_original
expect { subject.execute }.to change(AuditEvent, :count)
end
it_behaves_like 'project destroy ee'
end
end
......@@ -79,6 +79,10 @@ module Gitlab
def self.expand_names_for_cross_pipeline_artifacts?(project)
::Feature.enabled?(:ci_expand_names_for_cross_pipeline_artifacts, project)
end
def self.project_transactionless_destroy?(project)
Feature.enabled?(:project_transactionless_destroy, project, default_enabled: false)
end
end
end
end
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Projects::DestroyService do
RSpec.describe Projects::DestroyService, :aggregate_failures do
include ProjectForksHelper
let_it_be(:user) { create(:user) }
......@@ -60,317 +60,353 @@ RSpec.describe Projects::DestroyService do
end
end
it_behaves_like 'deleting the project'
it 'invalidates personal_project_count cache' do
expect(user).to receive(:invalidate_personal_projects_count)
destroy_project(project, user, {})
end
context 'when project has remote mirrors' do
let!(:project) do
create(:project, :repository, namespace: user.namespace).tap do |project|
project.remote_mirrors.create(url: 'http://test.com')
end
end
shared_examples 'project destroy' do
it_behaves_like 'deleting the project'
it 'destroys them' do
expect(RemoteMirror.count).to eq(1)
it 'invalidates personal_project_count cache' do
expect(user).to receive(:invalidate_personal_projects_count)
destroy_project(project, user, {})
expect(RemoteMirror.count).to eq(0)
end
end
context 'when project has exports' do
let!(:project_with_export) do
create(:project, :repository, namespace: user.namespace).tap do |project|
create(:import_export_upload,
project: project,
export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
context 'when project has remote mirrors' do
let!(:project) do
create(:project, :repository, namespace: user.namespace).tap do |project|
project.remote_mirrors.create(url: 'http://test.com')
end
end
end
it 'destroys project and export' do
expect do
destroy_project(project_with_export, user, {})
end.to change(ImportExportUpload, :count).by(-1)
it 'destroys them' do
expect(RemoteMirror.count).to eq(1)
expect(Project.all).not_to include(project_with_export)
end
end
destroy_project(project, user, {})
context 'Sidekiq fake' do
before do
# Dont run sidekiq to check if renamed repository exists
Sidekiq::Testing.fake! { destroy_project(project, user, {}) }
expect(RemoteMirror.count).to eq(0)
end
end
it { expect(Project.all).not_to include(project) }
it do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
end
context 'when project has exports' do
let!(:project_with_export) do
create(:project, :repository, namespace: user.namespace).tap do |project|
create(:import_export_upload,
project: project,
export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
end
end
it do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy
end
end
it 'destroys project and export' do
expect do
destroy_project(project_with_export, user, {})
end.to change(ImportExportUpload, :count).by(-1)
context 'when flushing caches fail due to Git errors' do
before do
allow(project.repository).to receive(:before_delete).and_raise(::Gitlab::Git::CommandError)
allow(Gitlab::GitLogger).to receive(:warn).with(
class: Repositories::DestroyService.name,
container_id: project.id,
disk_path: project.disk_path,
message: 'Gitlab::Git::CommandError').and_call_original
expect(Project.all).not_to include(project_with_export)
end
end
it_behaves_like 'deleting the project'
end
context 'Sidekiq fake' do
before do
# Dont run sidekiq to check if renamed repository exists
Sidekiq::Testing.fake! { destroy_project(project, user, {}) }
end
context 'when flushing caches fail due to Redis' do
before do
new_user = create(:user)
project.team.add_user(new_user, Gitlab::Access::DEVELOPER)
allow_any_instance_of(described_class).to receive(:flush_caches).and_raise(::Redis::CannotConnectError)
end
it { expect(Project.all).not_to include(project) }
it 'keeps project team intact upon an error' do
perform_enqueued_jobs do
destroy_project(project, user, {})
rescue ::Redis::CannotConnectError
it do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
end
expect(project.team.members.count).to eq 2
it do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy
end
end
end
context 'with async_execute', :sidekiq_inline do
let(:async) { true }
context 'async delete of project with private issue visibility' do
context 'when flushing caches fail due to Git errors' do
before do
project.project_feature.update_attribute("issues_access_level", ProjectFeature::PRIVATE)
allow(project.repository).to receive(:before_delete).and_raise(::Gitlab::Git::CommandError)
allow(Gitlab::GitLogger).to receive(:warn).with(
class: Repositories::DestroyService.name,
container_id: project.id,
disk_path: project.disk_path,
message: 'Gitlab::Git::CommandError').and_call_original
end
it_behaves_like 'deleting the project'
end
it_behaves_like 'deleting the project with pipeline and build'
context 'when flushing caches fail due to Redis' do
before do
new_user = create(:user)
project.team.add_user(new_user, Gitlab::Access::DEVELOPER)
allow_any_instance_of(described_class).to receive(:flush_caches).and_raise(::Redis::CannotConnectError)
end
context 'errors' do
context 'when `remove_legacy_registry_tags` fails' do
before do
expect_any_instance_of(described_class)
.to receive(:remove_legacy_registry_tags).and_return(false)
it 'keeps project team intact upon an error' do
perform_enqueued_jobs do
destroy_project(project, user, {})
rescue ::Redis::CannotConnectError
end
it_behaves_like 'handles errors thrown during async destroy', "Failed to remove some tags"
expect(project.team.members.count).to eq 2
end
end
context 'with async_execute', :sidekiq_inline do
let(:async) { true }
context 'when `remove_repository` fails' do
context 'async delete of project with private issue visibility' do
before do
expect_any_instance_of(described_class)
.to receive(:remove_repository).and_return(false)
project.project_feature.update_attribute("issues_access_level", ProjectFeature::PRIVATE)
end
it_behaves_like 'handles errors thrown during async destroy', "Failed to remove project repository"
it_behaves_like 'deleting the project'
end
context 'when `execute` raises expected error' do
before do
expect_any_instance_of(Project)
.to receive(:destroy!).and_raise(StandardError.new("Other error message"))
it_behaves_like 'deleting the project with pipeline and build'
context 'errors' do
context 'when `remove_legacy_registry_tags` fails' do
before do
expect_any_instance_of(described_class)
.to receive(:remove_legacy_registry_tags).and_return(false)
end
it_behaves_like 'handles errors thrown during async destroy', "Failed to remove some tags"
end
it_behaves_like 'handles errors thrown during async destroy', "Other error message"
end
context 'when `remove_repository` fails' do
before do
expect_any_instance_of(described_class)
.to receive(:remove_repository).and_return(false)
end
context 'when `execute` raises unexpected error' do
before do
expect_any_instance_of(Project)
.to receive(:destroy!).and_raise(Exception.new('Other error message'))
it_behaves_like 'handles errors thrown during async destroy', "Failed to remove project repository"
end
it 'allows error to bubble up and rolls back project deletion' do
expect do
destroy_project(project, user, {})
end.to raise_error(Exception, 'Other error message')
context 'when `execute` raises expected error' do
before do
expect_any_instance_of(Project)
.to receive(:destroy!).and_raise(StandardError.new("Other error message"))
end
expect(project.reload.pending_delete).to be(false)
expect(project.delete_error).to include("Other error message")
it_behaves_like 'handles errors thrown during async destroy', "Other error message"
end
end
end
end
describe 'container registry' do
context 'when there are regular container repositories' do
let(:container_repository) { create(:container_repository) }
context 'when `execute` raises unexpected error' do
before do
expect_any_instance_of(Project)
.to receive(:destroy!).and_raise(Exception.new('Other error message'))
end
before do
stub_container_registry_tags(repository: project.full_path + '/image',
tags: ['tag'])
project.container_repositories << container_repository
it 'allows error to bubble up and rolls back project deletion' do
expect do
destroy_project(project, user, {})
end.to raise_error(Exception, 'Other error message')
expect(project.reload.pending_delete).to be(false)
expect(project.delete_error).to include("Other error message")
end
end
end
end
context 'when image repository deletion succeeds' do
it 'removes tags' do
expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(true)
describe 'container registry' do
context 'when there are regular container repositories' do
let(:container_repository) { create(:container_repository) }
destroy_project(project, user)
before do
stub_container_registry_tags(repository: project.full_path + '/image',
tags: ['tag'])
project.container_repositories << container_repository
end
end
context 'when image repository deletion fails' do
it 'raises an exception' do
expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_raise(RuntimeError)
context 'when image repository deletion succeeds' do
it 'removes tags' do
expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(true)
expect(destroy_project(project, user)).to be false
destroy_project(project, user)
end
end
end
context 'when registry is disabled' do
before do
stub_container_registry_config(enabled: false)
context 'when image repository deletion fails' do
it 'raises an exception' do
expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_raise(RuntimeError)
expect(destroy_project(project, user)).to be false
end
end
it 'does not attempting to remove any tags' do
expect(Projects::ContainerRepository::DestroyService).not_to receive(:new)
context 'when registry is disabled' do
before do
stub_container_registry_config(enabled: false)
end
destroy_project(project, user)
it 'does not attempting to remove any tags' do
expect(Projects::ContainerRepository::DestroyService).not_to receive(:new)
destroy_project(project, user)
end
end
end
end
context 'when there are tags for legacy root repository' do
before do
stub_container_registry_tags(repository: project.full_path,
tags: ['tag'])
end
context 'when there are tags for legacy root repository' do
before do
stub_container_registry_tags(repository: project.full_path,
tags: ['tag'])
end
context 'when image repository tags deletion succeeds' do
it 'removes tags' do
expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(true)
context 'when image repository tags deletion succeeds' do
it 'removes tags' do
expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(true)
destroy_project(project, user)
destroy_project(project, user)
end
end
end
context 'when image repository tags deletion fails' do
it 'raises an exception' do
expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(false)
context 'when image repository tags deletion fails' do
it 'raises an exception' do
expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(false)
expect(destroy_project(project, user)).to be false
expect(destroy_project(project, user)).to be false
end
end
end
end
end
context 'for a forked project with LFS objects' do
let(:forked_project) { fork_project(project, user) }
context 'for a forked project with LFS objects' do
let(:forked_project) { fork_project(project, user) }
before do
project.lfs_objects << create(:lfs_object)
forked_project.reload
end
before do
project.lfs_objects << create(:lfs_object)
forked_project.reload
end
it 'destroys the fork' do
expect { destroy_project(forked_project, user) }
.not_to raise_error
it 'destroys the fork' do
expect { destroy_project(forked_project, user) }
.not_to raise_error
end
end
end
context 'as the root of a fork network' do
let!(:fork_1) { fork_project(project, user) }
let!(:fork_2) { fork_project(project, user) }
context 'as the root of a fork network' do
let!(:fork_1) { fork_project(project, user) }
let!(:fork_2) { fork_project(project, user) }
it 'updates the fork network with the project name' do
fork_network = project.fork_network
it 'updates the fork network with the project name' do
fork_network = project.fork_network
destroy_project(project, user)
destroy_project(project, user)
fork_network.reload
fork_network.reload
expect(fork_network.deleted_root_project_name).to eq(project.full_name)
expect(fork_network.root_project).to be_nil
expect(fork_network.deleted_root_project_name).to eq(project.full_name)
expect(fork_network.root_project).to be_nil
end
end
end
context 'repository +deleted path removal' do
context 'regular phase' do
it 'schedules +deleted removal of existing repos' do
service = described_class.new(project, user, {})
allow(service).to receive(:schedule_stale_repos_removal)
context 'repository +deleted path removal' do
context 'regular phase' do
it 'schedules +deleted removal of existing repos' do
service = described_class.new(project, user, {})
allow(service).to receive(:schedule_stale_repos_removal)
expect(Repositories::ShellDestroyService).to receive(:new).and_call_original
expect(GitlabShellWorker).to receive(:perform_in)
.with(5.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
expect(Repositories::ShellDestroyService).to receive(:new).and_call_original
expect(GitlabShellWorker).to receive(:perform_in)
.with(5.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
service.execute
service.execute
end
end
end
context 'stale cleanup' do
let(:async) { true }
context 'stale cleanup' do
let(:async) { true }
it 'schedules +deleted wiki and repo removal' do
allow(ProjectDestroyWorker).to receive(:perform_async)
it 'schedules +deleted wiki and repo removal' do
allow(ProjectDestroyWorker).to receive(:perform_async)
expect(Repositories::ShellDestroyService).to receive(:new).with(project.repository).and_call_original
expect(GitlabShellWorker).to receive(:perform_in)
.with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
expect(Repositories::ShellDestroyService).to receive(:new).with(project.repository).and_call_original
expect(GitlabShellWorker).to receive(:perform_in)
.with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
expect(Repositories::ShellDestroyService).to receive(:new).with(project.wiki.repository).and_call_original
expect(GitlabShellWorker).to receive(:perform_in)
.with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.wiki.disk_path))
expect(Repositories::ShellDestroyService).to receive(:new).with(project.wiki.repository).and_call_original
expect(GitlabShellWorker).to receive(:perform_in)
.with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.wiki.disk_path))
destroy_project(project, user, {})
destroy_project(project, user, {})
end
end
end
end
context 'snippets' do
let!(:snippet1) { create(:project_snippet, project: project, author: user) }
let!(:snippet2) { create(:project_snippet, project: project, author: user) }
context 'snippets' do
let!(:snippet1) { create(:project_snippet, project: project, author: user) }
let!(:snippet2) { create(:project_snippet, project: project, author: user) }
it 'does not include snippets when deleting in batches' do
expect(project).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:container_repositories, :snippets] })
it 'does not include snippets when deleting in batches' do
expect(project).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:container_repositories, :snippets] })
destroy_project(project, user)
end
destroy_project(project, user)
end
it 'calls the bulk snippet destroy service' do
expect(project.snippets.count).to eq 2
it 'calls the bulk snippet destroy service' do
expect(project.snippets.count).to eq 2
expect(Snippets::BulkDestroyService).to receive(:new)
.with(user, project.snippets).and_call_original
expect(Snippets::BulkDestroyService).to receive(:new)
.with(user, project.snippets).and_call_original
expect do
destroy_project(project, user)
end.to change(Snippet, :count).by(-2)
end
expect do
destroy_project(project, user)
end.to change(Snippet, :count).by(-2)
end
context 'when an error is raised deleting snippets' do
it 'does not delete project' do
allow_next_instance_of(Snippets::BulkDestroyService) do |instance|
allow(instance).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
context 'when an error is raised deleting snippets' do
it 'does not delete project' do
allow_next_instance_of(Snippets::BulkDestroyService) do |instance|
allow(instance).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
end
expect(destroy_project(project, user)).to be_falsey
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
end
end
end
expect(destroy_project(project, user)).to be_falsey
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
context 'error while destroying', :sidekiq_inline do
let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:builds) { create_list(:ci_build, 2, :artifacts, pipeline: pipeline) }
let!(:build_trace) { create(:ci_build_trace_chunk, build: builds[0]) }
it 'deletes on retry' do
# We can expect this to timeout for very large projects
# TODO: remove allow_next_instance_of: https://gitlab.com/gitlab-org/gitlab/-/issues/220440
allow_any_instance_of(Ci::Build).to receive(:destroy).and_raise('boom')
destroy_project(project, user, {})
allow_any_instance_of(Ci::Build).to receive(:destroy).and_call_original
destroy_project(project, user, {})
expect(Project.unscoped.all).not_to include(project)
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
expect(project.all_pipelines).to be_empty
expect(project.builds).to be_empty
end
end
end
context 'when project_transactionless_destroy enabled' do
it_behaves_like 'project destroy'
end
context 'when project_transactionless_destroy disabled', :sidekiq_inline do
before do
stub_feature_flags(project_transactionless_destroy: false)
end
it_behaves_like 'project destroy'
end
def destroy_project(project, user, params = {})
described_class.new(project, user, params).public_send(async ? :async_execute : :execute)
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment