Commit cd2f34b7 authored by Dmytro Zaporozhets (DZ)'s avatar Dmytro Zaporozhets (DZ)

Merge branch 'remove-transactionless-destroy-feature-flag' into 'master'

Remove project_transactionless_destroy flag [RUN ALL RSPEC] [RUN AS-IF-FOSS]

See merge request gitlab-org/gitlab!55795
parents 77fb4ebc fc194ab2
...@@ -107,12 +107,7 @@ module Projects ...@@ -107,12 +107,7 @@ module Projects
end end
project.leave_pool_repository project.leave_pool_repository
destroy_project_related_records(project)
if Gitlab::Ci::Features.project_transactionless_destroy?(project)
destroy_project_related_records(project)
else
Project.transaction { destroy_project_related_records(project) }
end
end end
def destroy_project_related_records(project) def destroy_project_related_records(project)
......
---
title: Remove project_transactionless_destroy feature flag
merge_request: 55795
author:
type: changed
---
name: project_transactionless_destroy
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39367
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/255972
milestone: '13.4'
type: development
group: group::continuous integration
default_enabled: false
...@@ -20,93 +20,79 @@ RSpec.describe Projects::DestroyService do ...@@ -20,93 +20,79 @@ RSpec.describe Projects::DestroyService do
stub_container_registry_tags(repository: :any, tags: []) stub_container_registry_tags(repository: :any, tags: [])
end end
shared_examples 'project destroy ee' do context 'when project is a mirror' do
context 'when project is a mirror' do let(:max_capacity) { Gitlab::CurrentSettings.mirror_max_capacity }
let(:max_capacity) { Gitlab::CurrentSettings.mirror_max_capacity } let_it_be(:project_mirror) { create(:project, :mirror, :repository, :import_scheduled) }
let_it_be(:project_mirror) { create(:project, :mirror, :repository, :import_scheduled) } let(:result) { described_class.new(project_mirror, project_mirror.owner, {}).execute }
let(:result) { described_class.new(project_mirror, project_mirror.owner, {}).execute }
before do
Gitlab::Mirror.increment_capacity(project_mirror.id)
end
it 'decrements capacity if mirror was scheduled' do before do
expect {result}.to change { Gitlab::Mirror.available_capacity }.from(max_capacity - 1).to(max_capacity) Gitlab::Mirror.increment_capacity(project_mirror.id)
end
end end
context 'when running on a primary node' do it 'decrements capacity if mirror was scheduled' do
let_it_be(:primary) { create(:geo_node, :primary) } expect {result}.to change { Gitlab::Mirror.available_capacity }.from(max_capacity - 1).to(max_capacity)
let_it_be(:secondary) { create(:geo_node) } end
end
before do
stub_current_geo_node(primary)
end
it 'logs an event to the Geo event log' do context 'when running on a primary node' do
# Run Sidekiq immediately to check that renamed repository will be removed let_it_be(:primary) { create(:geo_node, :primary) }
Sidekiq::Testing.inline! do let_it_be(:secondary) { create(:geo_node) }
expect(subject).to receive(:log_destroy_events).and_call_original
expect { subject.execute }.to change(Geo::RepositoryDeletedEvent, :count).by(1)
end
end
it 'does not log event to the Geo log if project deletion fails' do before do
expect(subject).to receive(:log_destroy_event).and_call_original stub_current_geo_node(primary)
expect(project).to receive(:destroy!).and_raise(StandardError.new('Other error message')) end
Sidekiq::Testing.inline! do it 'logs an event to the Geo event log' do
expect { subject.execute }.not_to change(Geo::RepositoryDeletedEvent, :count) # Run Sidekiq immediately to check that renamed repository will be removed
end Sidekiq::Testing.inline! do
expect(subject).to receive(:log_destroy_events).and_call_original
expect { subject.execute }.to change(Geo::RepositoryDeletedEvent, :count).by(1)
end end
end end
context 'audit events' do it 'does not log event to the Geo log if project deletion fails' do
include_examples 'audit event logging' do expect(subject).to receive(:log_destroy_event).and_call_original
let(:operation) { subject.execute } expect(project).to receive(:destroy!).and_raise(StandardError.new('Other error message'))
let(:fail_condition!) do Sidekiq::Testing.inline! do
expect(project).to receive(:destroy!).and_raise(StandardError.new('Other error message')) expect { subject.execute }.not_to change(Geo::RepositoryDeletedEvent, :count)
end
let(:attributes) do
{
author_id: user.id,
entity_id: project.id,
entity_type: 'Project',
details: {
remove: 'project',
author_name: user.name,
target_id: project.id,
target_type: 'Project',
target_details: project.full_path
}
}
end
end end
end end
end
context 'audit events' do
include_examples 'audit event logging' do
let(:operation) { subject.execute }
context 'system hooks exception' do let(:fail_condition!) do
before do expect(project).to receive(:destroy!).and_raise(StandardError.new('Other error message'))
allow_any_instance_of(SystemHooksService).to receive(:execute_hooks_for).and_raise('something went wrong')
end end
it 'logs an audit event' do let(:attributes) do
expect(subject).to receive(:log_destroy_event).and_call_original {
expect { subject.execute }.to change(AuditEvent, :count) author_id: user.id,
entity_id: project.id,
entity_type: 'Project',
details: {
remove: 'project',
author_name: user.name,
target_id: project.id,
target_type: 'Project',
target_details: project.full_path
}
}
end end
end end
end end
context 'when project_transactionless_destroy enabled' do context 'system hooks exception' do
it_behaves_like 'project destroy ee'
end
context 'when project_transactionless_destroy disabled', :sidekiq_inline do
before do before do
stub_feature_flags(project_transactionless_destroy: false) allow_any_instance_of(SystemHooksService).to receive(:execute_hooks_for).and_raise('something went wrong')
end end
it_behaves_like 'project destroy ee' it 'logs an audit event' do
expect(subject).to receive(:log_destroy_event).and_call_original
expect { subject.execute }.to change(AuditEvent, :count)
end
end end
end end
...@@ -38,10 +38,6 @@ module Gitlab ...@@ -38,10 +38,6 @@ module Gitlab
::Feature.enabled?(:ci_disallow_to_create_merge_request_pipelines_in_target_project, target_project) ::Feature.enabled?(:ci_disallow_to_create_merge_request_pipelines_in_target_project, target_project)
end end
def self.project_transactionless_destroy?(project)
Feature.enabled?(:project_transactionless_destroy, project, default_enabled: false)
end
def self.trace_overwrite? def self.trace_overwrite?
::Feature.enabled?(:ci_trace_overwrite, type: :ops, default_enabled: false) ::Feature.enabled?(:ci_trace_overwrite, type: :ops, default_enabled: false)
end end
......
...@@ -85,357 +85,343 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do ...@@ -85,357 +85,343 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
end end
end end
shared_examples 'project destroy' do it_behaves_like 'deleting the project'
it_behaves_like 'deleting the project'
it 'invalidates personal_project_count cache' do it 'invalidates personal_project_count cache' do
expect(user).to receive(:invalidate_personal_projects_count) expect(user).to receive(:invalidate_personal_projects_count)
destroy_project(project, user, {}) destroy_project(project, user, {})
end
it 'performs cancel for project ci pipelines' do
expect(::Ci::AbortProjectPipelinesService).to receive_message_chain(:new, :execute).with(project)
destroy_project(project, user, {})
end
context 'when project has remote mirrors' do
let!(:project) do
create(:project, :repository, namespace: user.namespace).tap do |project|
project.remote_mirrors.create!(url: 'http://test.com')
end
end end
it 'performs cancel for project ci pipelines' do it 'destroys them' do
expect(::Ci::AbortProjectPipelinesService).to receive_message_chain(:new, :execute).with(project) expect(RemoteMirror.count).to eq(1)
destroy_project(project, user, {}) destroy_project(project, user, {})
expect(RemoteMirror.count).to eq(0)
end end
end
context 'when project has remote mirrors' do context 'when project has exports' do
let!(:project) do let!(:project_with_export) do
create(:project, :repository, namespace: user.namespace).tap do |project| create(:project, :repository, namespace: user.namespace).tap do |project|
project.remote_mirrors.create!(url: 'http://test.com') create(:import_export_upload,
end project: project,
export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
end end
end
it 'destroys them' do it 'destroys project and export' do
expect(RemoteMirror.count).to eq(1) expect do
destroy_project(project_with_export, user, {})
destroy_project(project, user, {}) end.to change(ImportExportUpload, :count).by(-1)
expect(RemoteMirror.count).to eq(0) expect(Project.all).not_to include(project_with_export)
end
end end
end
context 'when project has exports' do context 'Sidekiq fake' do
let!(:project_with_export) do before do
create(:project, :repository, namespace: user.namespace).tap do |project| # Dont run sidekiq to check if renamed repository exists
create(:import_export_upload, Sidekiq::Testing.fake! { destroy_project(project, user, {}) }
project: project, end
export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
end
end
it 'destroys project and export' do it { expect(Project.all).not_to include(project) }
expect do
destroy_project(project_with_export, user, {})
end.to change(ImportExportUpload, :count).by(-1)
expect(Project.all).not_to include(project_with_export) it do
end expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
end end
context 'Sidekiq fake' do it do
before do expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy
# Dont run sidekiq to check if renamed repository exists end
Sidekiq::Testing.fake! { destroy_project(project, user, {}) } end
end
it { expect(Project.all).not_to include(project) } context 'when flushing caches fail due to Git errors' do
before do
allow(project.repository).to receive(:before_delete).and_raise(::Gitlab::Git::CommandError)
allow(Gitlab::GitLogger).to receive(:warn).with(
class: Repositories::DestroyService.name,
container_id: project.id,
disk_path: project.disk_path,
message: 'Gitlab::Git::CommandError').and_call_original
end
it do it_behaves_like 'deleting the project'
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey end
end
it do context 'when flushing caches fail due to Redis' do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy before do
end new_user = create(:user)
project.team.add_user(new_user, Gitlab::Access::DEVELOPER)
allow_any_instance_of(described_class).to receive(:flush_caches).and_raise(::Redis::CannotConnectError)
end end
context 'when flushing caches fail due to Git errors' do it 'keeps project team intact upon an error' do
before do perform_enqueued_jobs do
allow(project.repository).to receive(:before_delete).and_raise(::Gitlab::Git::CommandError) destroy_project(project, user, {})
allow(Gitlab::GitLogger).to receive(:warn).with( rescue ::Redis::CannotConnectError
class: Repositories::DestroyService.name,
container_id: project.id,
disk_path: project.disk_path,
message: 'Gitlab::Git::CommandError').and_call_original
end end
it_behaves_like 'deleting the project' expect(project.team.members.count).to eq 2
end end
end
context 'when flushing caches fail due to Redis' do context 'with async_execute', :sidekiq_inline do
let(:async) { true }
context 'async delete of project with private issue visibility' do
before do before do
new_user = create(:user) project.project_feature.update_attribute("issues_access_level", ProjectFeature::PRIVATE)
project.team.add_user(new_user, Gitlab::Access::DEVELOPER)
allow_any_instance_of(described_class).to receive(:flush_caches).and_raise(::Redis::CannotConnectError)
end end
it 'keeps project team intact upon an error' do it_behaves_like 'deleting the project'
perform_enqueued_jobs do
destroy_project(project, user, {})
rescue ::Redis::CannotConnectError
end
expect(project.team.members.count).to eq 2
end
end end
context 'with async_execute', :sidekiq_inline do it_behaves_like 'deleting the project with pipeline and build'
let(:async) { true }
context 'async delete of project with private issue visibility' do context 'errors' do
context 'when `remove_legacy_registry_tags` fails' do
before do before do
project.project_feature.update_attribute("issues_access_level", ProjectFeature::PRIVATE) expect_any_instance_of(described_class)
.to receive(:remove_legacy_registry_tags).and_return(false)
end end
it_behaves_like 'deleting the project' it_behaves_like 'handles errors thrown during async destroy', "Failed to remove some tags"
end end
it_behaves_like 'deleting the project with pipeline and build' context 'when `remove_repository` fails' do
before do
context 'errors' do expect_any_instance_of(described_class)
context 'when `remove_legacy_registry_tags` fails' do .to receive(:remove_repository).and_return(false)
before do
expect_any_instance_of(described_class)
.to receive(:remove_legacy_registry_tags).and_return(false)
end
it_behaves_like 'handles errors thrown during async destroy', "Failed to remove some tags"
end end
context 'when `remove_repository` fails' do it_behaves_like 'handles errors thrown during async destroy', "Failed to remove project repository"
before do end
expect_any_instance_of(described_class)
.to receive(:remove_repository).and_return(false)
end
it_behaves_like 'handles errors thrown during async destroy', "Failed to remove project repository" context 'when `execute` raises expected error' do
before do
expect_any_instance_of(Project)
.to receive(:destroy!).and_raise(StandardError.new("Other error message"))
end end
context 'when `execute` raises expected error' do it_behaves_like 'handles errors thrown during async destroy', "Other error message"
before do end
expect_any_instance_of(Project)
.to receive(:destroy!).and_raise(StandardError.new("Other error message"))
end
it_behaves_like 'handles errors thrown during async destroy', "Other error message" context 'when `execute` raises unexpected error' do
before do
expect_any_instance_of(Project)
.to receive(:destroy!).and_raise(Exception.new('Other error message'))
end end
context 'when `execute` raises unexpected error' do it 'allows error to bubble up and rolls back project deletion' do
before do expect do
expect_any_instance_of(Project) destroy_project(project, user, {})
.to receive(:destroy!).and_raise(Exception.new('Other error message')) end.to raise_error(Exception, 'Other error message')
end
it 'allows error to bubble up and rolls back project deletion' do
expect do
destroy_project(project, user, {})
end.to raise_error(Exception, 'Other error message')
expect(project.reload.pending_delete).to be(false) expect(project.reload.pending_delete).to be(false)
expect(project.delete_error).to include("Other error message") expect(project.delete_error).to include("Other error message")
end
end end
end end
end end
end
describe 'container registry' do describe 'container registry' do
context 'when there are regular container repositories' do context 'when there are regular container repositories' do
let(:container_repository) { create(:container_repository) } let(:container_repository) { create(:container_repository) }
before do before do
stub_container_registry_tags(repository: project.full_path + '/image', stub_container_registry_tags(repository: project.full_path + '/image',
tags: ['tag']) tags: ['tag'])
project.container_repositories << container_repository project.container_repositories << container_repository
end end
context 'when image repository deletion succeeds' do context 'when image repository deletion succeeds' do
it 'removes tags' do it 'removes tags' do
expect_any_instance_of(ContainerRepository) expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(true) .to receive(:delete_tags!).and_return(true)
destroy_project(project, user) destroy_project(project, user)
end
end end
end
context 'when image repository deletion fails' do context 'when image repository deletion fails' do
it 'raises an exception' do it 'raises an exception' do
expect_any_instance_of(ContainerRepository) expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_raise(RuntimeError) .to receive(:delete_tags!).and_raise(RuntimeError)
expect(destroy_project(project, user)).to be false expect(destroy_project(project, user)).to be false
end
end end
end
context 'when registry is disabled' do context 'when registry is disabled' do
before do before do
stub_container_registry_config(enabled: false) stub_container_registry_config(enabled: false)
end end
it 'does not attempting to remove any tags' do it 'does not attempting to remove any tags' do
expect(Projects::ContainerRepository::DestroyService).not_to receive(:new) expect(Projects::ContainerRepository::DestroyService).not_to receive(:new)
destroy_project(project, user) destroy_project(project, user)
end
end end
end end
end
context 'when there are tags for legacy root repository' do context 'when there are tags for legacy root repository' do
before do before do
stub_container_registry_tags(repository: project.full_path, stub_container_registry_tags(repository: project.full_path,
tags: ['tag']) tags: ['tag'])
end end
context 'when image repository tags deletion succeeds' do context 'when image repository tags deletion succeeds' do
it 'removes tags' do it 'removes tags' do
expect_any_instance_of(ContainerRepository) expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(true) .to receive(:delete_tags!).and_return(true)
destroy_project(project, user) destroy_project(project, user)
end
end end
end
context 'when image repository tags deletion fails' do context 'when image repository tags deletion fails' do
it 'raises an exception' do it 'raises an exception' do
expect_any_instance_of(ContainerRepository) expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(false) .to receive(:delete_tags!).and_return(false)
expect(destroy_project(project, user)).to be false expect(destroy_project(project, user)).to be false
end
end end
end end
end end
end
context 'for a forked project with LFS objects' do context 'for a forked project with LFS objects' do
let(:forked_project) { fork_project(project, user) } let(:forked_project) { fork_project(project, user) }
before do before do
project.lfs_objects << create(:lfs_object) project.lfs_objects << create(:lfs_object)
forked_project.reload forked_project.reload
end end
it 'destroys the fork' do it 'destroys the fork' do
expect { destroy_project(forked_project, user) } expect { destroy_project(forked_project, user) }
.not_to raise_error .not_to raise_error
end
end end
end
context 'as the root of a fork network' do context 'as the root of a fork network' do
let!(:fork_1) { fork_project(project, user) } let!(:fork_1) { fork_project(project, user) }
let!(:fork_2) { fork_project(project, user) } let!(:fork_2) { fork_project(project, user) }
it 'updates the fork network with the project name' do it 'updates the fork network with the project name' do
fork_network = project.fork_network fork_network = project.fork_network
destroy_project(project, user) destroy_project(project, user)
fork_network.reload fork_network.reload
expect(fork_network.deleted_root_project_name).to eq(project.full_name) expect(fork_network.deleted_root_project_name).to eq(project.full_name)
expect(fork_network.root_project).to be_nil expect(fork_network.root_project).to be_nil
end
end end
end
context 'repository +deleted path removal' do context 'repository +deleted path removal' do
context 'regular phase' do context 'regular phase' do
it 'schedules +deleted removal of existing repos' do it 'schedules +deleted removal of existing repos' do
service = described_class.new(project, user, {}) service = described_class.new(project, user, {})
allow(service).to receive(:schedule_stale_repos_removal) allow(service).to receive(:schedule_stale_repos_removal)
expect(Repositories::ShellDestroyService).to receive(:new).and_call_original expect(Repositories::ShellDestroyService).to receive(:new).and_call_original
expect(GitlabShellWorker).to receive(:perform_in) expect(GitlabShellWorker).to receive(:perform_in)
.with(5.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path)) .with(5.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
service.execute service.execute
end
end end
end
context 'stale cleanup' do context 'stale cleanup' do
let(:async) { true } let(:async) { true }
it 'schedules +deleted wiki and repo removal' do it 'schedules +deleted wiki and repo removal' do
allow(ProjectDestroyWorker).to receive(:perform_async) allow(ProjectDestroyWorker).to receive(:perform_async)
expect(Repositories::ShellDestroyService).to receive(:new).with(project.repository).and_call_original expect(Repositories::ShellDestroyService).to receive(:new).with(project.repository).and_call_original
expect(GitlabShellWorker).to receive(:perform_in) expect(GitlabShellWorker).to receive(:perform_in)
.with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path)) .with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
expect(Repositories::ShellDestroyService).to receive(:new).with(project.wiki.repository).and_call_original expect(Repositories::ShellDestroyService).to receive(:new).with(project.wiki.repository).and_call_original
expect(GitlabShellWorker).to receive(:perform_in) expect(GitlabShellWorker).to receive(:perform_in)
.with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.wiki.disk_path)) .with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.wiki.disk_path))
destroy_project(project, user, {}) destroy_project(project, user, {})
end
end end
end end
end
context 'snippets' do context 'snippets' do
let!(:snippet1) { create(:project_snippet, project: project, author: user) } let!(:snippet1) { create(:project_snippet, project: project, author: user) }
let!(:snippet2) { create(:project_snippet, project: project, author: user) } let!(:snippet2) { create(:project_snippet, project: project, author: user) }
it 'does not include snippets when deleting in batches' do
expect(project).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:container_repositories, :snippets] })
destroy_project(project, user) it 'does not include snippets when deleting in batches' do
end expect(project).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:container_repositories, :snippets] })
it 'calls the bulk snippet destroy service' do destroy_project(project, user)
expect(project.snippets.count).to eq 2 end
expect(Snippets::BulkDestroyService).to receive(:new)
.with(user, project.snippets).and_call_original
expect do it 'calls the bulk snippet destroy service' do
destroy_project(project, user) expect(project.snippets.count).to eq 2
end.to change(Snippet, :count).by(-2)
end
context 'when an error is raised deleting snippets' do expect(Snippets::BulkDestroyService).to receive(:new)
it 'does not delete project' do .with(user, project.snippets).and_call_original
allow_next_instance_of(Snippets::BulkDestroyService) do |instance|
allow(instance).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
end
expect(destroy_project(project, user)).to be_falsey expect do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy destroy_project(project, user)
end end.to change(Snippet, :count).by(-2)
end
end end
context 'error while destroying', :sidekiq_inline do context 'when an error is raised deleting snippets' do
let!(:pipeline) { create(:ci_pipeline, project: project) } it 'does not delete project' do
let!(:builds) { create_list(:ci_build, 2, :artifacts, pipeline: pipeline) } allow_next_instance_of(Snippets::BulkDestroyService) do |instance|
let!(:build_trace) { create(:ci_build_trace_chunk, build: builds[0]) } allow(instance).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
end
it 'deletes on retry' do
# We can expect this to timeout for very large projects
# TODO: remove allow_next_instance_of: https://gitlab.com/gitlab-org/gitlab/-/issues/220440
allow_any_instance_of(Ci::Build).to receive(:destroy).and_raise('boom')
destroy_project(project, user, {})
allow_any_instance_of(Ci::Build).to receive(:destroy).and_call_original
destroy_project(project, user, {})
expect(Project.unscoped.all).not_to include(project) expect(destroy_project(project, user)).to be_falsey
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
expect(project.all_pipelines).to be_empty
expect(project.builds).to be_empty
end end
end end
end end
context 'when project_transactionless_destroy enabled' do context 'error while destroying', :sidekiq_inline do
it_behaves_like 'project destroy' let!(:pipeline) { create(:ci_pipeline, project: project) }
end let!(:builds) { create_list(:ci_build, 2, :artifacts, pipeline: pipeline) }
let!(:build_trace) { create(:ci_build_trace_chunk, build: builds[0]) }
context 'when project_transactionless_destroy disabled', :sidekiq_inline do it 'deletes on retry' do
before do # We can expect this to timeout for very large projects
stub_feature_flags(project_transactionless_destroy: false) # TODO: remove allow_next_instance_of: https://gitlab.com/gitlab-org/gitlab/-/issues/220440
end allow_any_instance_of(Ci::Build).to receive(:destroy).and_raise('boom')
destroy_project(project, user, {})
allow_any_instance_of(Ci::Build).to receive(:destroy).and_call_original
destroy_project(project, user, {})
it_behaves_like 'project destroy' expect(Project.unscoped.all).not_to include(project)
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
expect(project.all_pipelines).to be_empty
expect(project.builds).to be_empty
end
end end
def destroy_project(project, user, params = {}) def destroy_project(project, user, params = {})
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment