Commit b4bdcd48 authored by Dylan Griffith's avatar Dylan Griffith

Merge branch '346236-destroy-security-findings' into 'master'

Move findings deletion into a worker using pub-sub system for `Ci::JobArtifacts::DestroyBatchService`

See merge request gitlab-org/gitlab!80096
parents 952a3aa2 1a231d0e
# frozen_string_literal: true
module Ci
class JobArtifactsDeletedEvent < ::Gitlab::EventStore::Event
def schema
{
'type' => 'object',
'required' => ['job_ids'],
'properties' => {
'job_ids' => {
'type' => 'array',
'properties' => {
'job_id' => { 'type' => 'integer' }
}
}
}
}
end
end
end
......@@ -413,6 +413,8 @@
- 1
- - security_auto_fix
- 1
- - security_findings_delete_by_job_id
- 1
- - security_scans
- 2
- - self_monitoring_project_create
......
......@@ -8,22 +8,18 @@ module EE
private
override :destroy_related_records
def destroy_related_records(artifacts)
::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/346236') do
destroy_security_findings(artifacts)
end
end
override :after_batch_destroy_hook
def after_batch_destroy_hook(artifacts)
insert_geo_event_records(artifacts)
end
def destroy_security_findings(artifacts)
job_ids = artifacts.map(&:job_id)
# This DestroyBatchService is used from different services.
# One of them is when pipeline is destroyed, and then eventually call DestroyBatchService via DestroyAssociationsService.
# So in this case even if it is invoked after a transaction but it is still under Ci::Pipeline.transaction.
Sidekiq::Worker.skipping_transaction_check do
::Gitlab::EventStore.publish(
::Ci::JobArtifactsDeletedEvent.new(data: { job_ids: artifacts.map(&:job_id) })
)
end
::Security::Finding.by_build_ids(job_ids).delete_all
insert_geo_event_records(artifacts)
end
def insert_geo_event_records(artifacts)
......
......@@ -1282,6 +1282,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: security_findings_delete_by_job_id
:worker_name: Security::Findings::DeleteByJobIdWorker
:feature_category: :vulnerability_management
:has_external_dependencies:
:urgency: :low
:resource_boundary: :cpu
:weight: 1
:idempotent: true
:tags: []
- :name: set_user_status_based_on_user_cap_setting
:worker_name: SetUserStatusBasedOnUserCapSettingWorker
:feature_category: :users
......
# frozen_string_literal: true
module Security
module Findings
class DeleteByJobIdWorker
include ApplicationWorker
include Gitlab::EventStore::Subscriber
feature_category :vulnerability_management
urgency :low
worker_resource_boundary :cpu
data_consistency :always
idempotent!
def handle_event(event)
::Security::Finding.by_build_ids(event.data[:job_ids]).delete_all
end
end
end
end
......@@ -24,6 +24,7 @@ module EE
# Add EE only subscriptions here:
store.subscribe ::GitlabSubscriptions::NotifySeatsExceededWorker, to: ::Members::MembersAddedEvent
store.subscribe ::Security::Findings::DeleteByJobIdWorker, to: ::Ci::JobArtifactsDeletedEvent
end
end
end
......
......@@ -9,7 +9,8 @@ RSpec.describe Gitlab::EventStore do
expect(instance.subscriptions.keys).to include(
::Ci::PipelineCreatedEvent,
::Members::MembersAddedEvent
::Members::MembersAddedEvent,
::Ci::JobArtifactsDeletedEvent
)
end
end
......
......@@ -20,11 +20,25 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
context 'when artifact is expired' do
context 'when artifact is not locked' do
let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
let(:event_data) { { job_ids: [artifact.job_id] } }
it 'destroys job artifact and the security finding' do
it 'destroys job artifact and the security finding', :sidekiq_inline do
expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
.and change { Security::Finding.count }.by(-1)
end
it 'publishes Ci::JobArtifactsDeletedEvent' do
event = double(:event)
expect(Ci::JobArtifactsDeletedEvent)
.to receive(:new)
.with(data: event_data)
.and_return(event)
expect(Gitlab::EventStore).to receive(:publish).with(event)
subject
end
end
context 'when artifact is locked' do
......@@ -55,23 +69,6 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
end
context 'when failed to destroy artifact' do
before do
stub_const('Ci::JobArtifacts::DestroyAllExpiredService::LOOP_LIMIT', 10)
expect(Ci::DeletedObject)
.to receive(:bulk_import)
.once
.and_raise(ActiveRecord::RecordNotDestroyed)
end
let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
it 'raises an exception but destroys the security_finding object regardless' do
expect { subject }.to raise_error(ActiveRecord::RecordNotDestroyed)
.and change { Security::Finding.count }.by(-1)
end
end
context 'when there are artifacts more than batch sizes' do
before do
stub_const('Ci::JobArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1)
......@@ -83,13 +80,13 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
let!(:second_security_scan) { create(:security_scan, build: second_job) }
let!(:second_security_finding) { create(:security_finding, scan: second_security_scan) }
it 'destroys all expired artifacts' do
it 'destroys all expired artifacts', :sidekiq_inline do
expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
.and change { Security::Finding.count }.by(-2)
end
end
context 'when some artifacts are locked' do
context 'when some artifacts are locked', :sidekiq_inline do
let!(:artifact) { create(:ci_job_artifact, :expired, job: job, locked: job.pipeline.locked) }
let!(:second_artifact) { create(:ci_job_artifact, :expired, job: locked_job, locked: locked_job.pipeline.locked) }
......
......@@ -13,12 +13,26 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
let_it_be(:artifact) { create(:ci_job_artifact) }
let_it_be(:security_scan) { create(:security_scan, build: artifact.job) }
let_it_be(:security_finding) { create(:security_finding, scan: security_scan) }
let_it_be(:event_data) { { job_ids: [artifact.job_id] } }
it 'destroys all expired artifacts' do
it 'destroys all expired artifacts', :sidekiq_inline do
expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
.and change { Security::Finding.count }.from(1).to(0)
end
it 'publishes Ci::JobArtifactsDeletedEvent' do
event = double(:event)
expect(Ci::JobArtifactsDeletedEvent)
.to receive(:new)
.with(data: event_data)
.and_return(event)
expect(Gitlab::EventStore).to receive(:publish).with(event)
subject
end
context 'with Geo replication' do
let_it_be(:primary) { create(:geo_node, :primary) }
let_it_be(:secondary) { create(:geo_node) }
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Findings::DeleteByJobIdWorker do
let_it_be(:security_scan1) { create(:security_scan) }
let_it_be(:security_scan2) { create(:security_scan) }
let_it_be(:security_finding_to_be_deleted) { create(:security_finding, scan: security_scan1) }
let_it_be(:security_finding_not_to_be_deleted) { create(:security_finding, scan: security_scan2) }
let(:event) { Ci::PipelineCreatedEvent.new(data: { job_ids: [security_scan1.build_id] }) }
subject { consume_event(event) }
def consume_event(event)
described_class.new.perform(event.class.name, event.data)
end
it 'destroys all expired artifacts' do
expect { subject }.to change { Security::Finding.count }.from(2).to(1)
end
end
......@@ -15,6 +15,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: [])
allow(Gitlab::EventStore).to receive(:publish)
end
shared_examples 'deleting the project' do
......@@ -42,7 +43,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
end
it 'does not publish an event' do
expect(Gitlab::EventStore).not_to receive(:publish)
expect(Gitlab::EventStore).not_to receive(:publish).with(event_type(Projects::ProjectDeletedEvent))
destroy_project(project, user, {})
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment