Commit abb7fc8f authored by Gabriel Mazetto's avatar Gabriel Mazetto

Merge branch 'kassio/bulkimports-add-bulk-import-id-to-logs' into 'master'

BulkImports: Add more information to the logs

See merge request gitlab-org/gitlab!55866
parents 2e13e8c5 f64052d8
......@@ -8,7 +8,7 @@ module BulkImports
MarkedAsFailedError = Class.new(StandardError)
def run
raise MarkedAsFailedError if marked_as_failed?
raise MarkedAsFailedError if context.entity.failed?
info(message: 'Pipeline started')
......@@ -40,7 +40,7 @@ module BulkImports
private # rubocop:disable Lint/UselessAccessModifier
def run_pipeline_step(step, class_name = nil)
raise MarkedAsFailedError if marked_as_failed?
raise MarkedAsFailedError if context.entity.failed?
info(pipeline_step: step, step_class: class_name)
......@@ -62,24 +62,13 @@ module BulkImports
end
def mark_as_failed
warn(message: 'Pipeline failed', pipeline_class: pipeline)
warn(message: 'Pipeline failed')
context.entity.fail_op!
end
def marked_as_failed?
return true if context.entity.failed?
false
end
def log_skip(extra = {})
log = {
message: 'Skipping due to failed pipeline status',
pipeline_class: pipeline
}.merge(extra)
info(log)
info({ message: 'Skipping due to failed pipeline status' }.merge(extra))
end
def log_import_failure(exception, step)
......@@ -92,25 +81,39 @@ module BulkImports
correlation_id_value: Labkit::Correlation::CorrelationId.current_or_new_id
}
error(
pipeline_step: step,
exception_class: exception.class.to_s,
exception_message: exception.message
)
BulkImports::Failure.create(attributes)
end
def info(extra = {})
logger.info(log_params(extra))
end
def warn(extra = {})
logger.warn(log_params(extra))
end
def info(extra = {})
logger.info(log_params(extra))
def error(extra = {})
logger.error(log_params(extra))
end
def log_params(extra)
defaults = {
bulk_import_id: context.bulk_import.id,
bulk_import_entity_id: context.entity.id,
bulk_import_entity_type: context.entity.source_type,
pipeline_class: pipeline
pipeline_class: pipeline,
context_extra: context.extra
}
defaults.merge(extra).compact
defaults
.merge(extra)
.reject { |_key, value| value.blank? }
end
def logger
......
......@@ -27,29 +27,31 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
end
describe 'pipeline runner' do
before do
stub_const('BulkImports::Extractor', extractor)
stub_const('BulkImports::Transformer', transformer)
stub_const('BulkImports::Loader', loader)
pipeline = Class.new do
include BulkImports::Pipeline
before do
stub_const('BulkImports::Extractor', extractor)
stub_const('BulkImports::Transformer', transformer)
stub_const('BulkImports::Loader', loader)
extractor BulkImports::Extractor
transformer BulkImports::Transformer
loader BulkImports::Loader
pipeline = Class.new do
include BulkImports::Pipeline
def after_run(_); end
end
extractor BulkImports::Extractor
transformer BulkImports::Transformer
loader BulkImports::Loader
stub_const('BulkImports::MyPipeline', pipeline)
def after_run(_); end
end
context 'when entity is not marked as failed' do
let(:entity) { create(:bulk_import_entity) }
let(:context) { BulkImports::Pipeline::Context.new(entity) }
stub_const('BulkImports::MyPipeline', pipeline)
end
let_it_be_with_refind(:entity) { create(:bulk_import_entity) }
let(:context) { BulkImports::Pipeline::Context.new(entity, extra: :data) }
subject { BulkImports::MyPipeline.new(context) }
describe 'pipeline runner' do
context 'when entity is not marked as failed' do
it 'runs pipeline extractor, transformer, loader' do
extracted_data = BulkImports::Pipeline::ExtractedData.new(data: { foo: :bar })
......@@ -76,58 +78,61 @@ RSpec.describe BulkImports::Pipeline::Runner do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:info)
.with(
bulk_import_entity_id: entity.id,
bulk_import_entity_type: 'group_entity',
message: 'Pipeline started',
pipeline_class: 'BulkImports::MyPipeline'
log_params(
context,
message: 'Pipeline started',
pipeline_class: 'BulkImports::MyPipeline'
)
)
expect(logger).to receive(:info)
.with(
bulk_import_entity_id: entity.id,
bulk_import_entity_type: 'group_entity',
pipeline_class: 'BulkImports::MyPipeline',
pipeline_step: :extractor,
step_class: 'BulkImports::Extractor'
log_params(
context,
pipeline_class: 'BulkImports::MyPipeline',
pipeline_step: :extractor,
step_class: 'BulkImports::Extractor'
)
)
expect(logger).to receive(:info)
.with(
bulk_import_entity_id: entity.id,
bulk_import_entity_type: 'group_entity',
pipeline_class: 'BulkImports::MyPipeline',
pipeline_step: :transformer,
step_class: 'BulkImports::Transformer'
log_params(
context,
pipeline_class: 'BulkImports::MyPipeline',
pipeline_step: :transformer,
step_class: 'BulkImports::Transformer'
)
)
expect(logger).to receive(:info)
.with(
bulk_import_entity_id: entity.id,
bulk_import_entity_type: 'group_entity',
pipeline_class: 'BulkImports::MyPipeline',
pipeline_step: :loader,
step_class: 'BulkImports::Loader'
log_params(
context,
pipeline_class: 'BulkImports::MyPipeline',
pipeline_step: :loader,
step_class: 'BulkImports::Loader'
)
)
expect(logger).to receive(:info)
.with(
bulk_import_entity_id: entity.id,
bulk_import_entity_type: 'group_entity',
pipeline_class: 'BulkImports::MyPipeline',
pipeline_step: :after_run
log_params(
context,
pipeline_class: 'BulkImports::MyPipeline',
pipeline_step: :after_run
)
)
expect(logger).to receive(:info)
.with(
bulk_import_entity_id: entity.id,
bulk_import_entity_type: 'group_entity',
message: 'Pipeline finished',
pipeline_class: 'BulkImports::MyPipeline'
log_params(
context,
message: 'Pipeline finished',
pipeline_class: 'BulkImports::MyPipeline'
)
)
end
BulkImports::MyPipeline.new(context).run
subject.run
end
context 'when exception is raised' do
let(:entity) { create(:bulk_import_entity, :created) }
let(:context) { BulkImports::Pipeline::Context.new(entity) }
before do
allow_next_instance_of(BulkImports::Extractor) do |extractor|
allow(extractor).to receive(:extract).with(context).and_raise(StandardError, 'Error!')
......@@ -135,7 +140,21 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
it 'logs import failure' do
BulkImports::MyPipeline.new(context).run
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:error)
.with(
log_params(
context,
pipeline_step: :extractor,
pipeline_class: 'BulkImports::MyPipeline',
exception_class: 'StandardError',
exception_message: 'Error!'
)
)
end
expect { subject.run }
.to change(entity.failures, :count).by(1)
failure = entity.failures.first
......@@ -152,29 +171,29 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
it 'marks entity as failed' do
BulkImports::MyPipeline.new(context).run
expect(entity.failed?).to eq(true)
expect { subject.run }
.to change(entity, :status_name).to(:failed)
end
it 'logs warn message' do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:warn)
.with(
message: 'Pipeline failed',
pipeline_class: 'BulkImports::MyPipeline',
bulk_import_entity_id: entity.id,
bulk_import_entity_type: entity.source_type
log_params(
context,
message: 'Pipeline failed',
pipeline_class: 'BulkImports::MyPipeline'
)
)
end
BulkImports::MyPipeline.new(context).run
subject.run
end
end
context 'when pipeline is not marked to abort on failure' do
it 'marks entity as failed' do
BulkImports::MyPipeline.new(context).run
it 'does not mark entity as failed' do
subject.run
expect(entity.failed?).to eq(false)
end
......@@ -183,24 +202,31 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
context 'when entity is marked as failed' do
let(:entity) { create(:bulk_import_entity) }
let(:context) { BulkImports::Pipeline::Context.new(entity) }
it 'logs and returns without execution' do
allow(entity).to receive(:failed?).and_return(true)
entity.fail_op!
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:info)
.with(
message: 'Skipping due to failed pipeline status',
pipeline_class: 'BulkImports::MyPipeline',
bulk_import_entity_id: entity.id,
bulk_import_entity_type: 'group_entity'
log_params(
context,
message: 'Skipping due to failed pipeline status',
pipeline_class: 'BulkImports::MyPipeline'
)
)
end
BulkImports::MyPipeline.new(context).run
subject.run
end
end
end
def log_params(context, extra = {})
{
bulk_import_id: context.bulk_import.id,
bulk_import_entity_id: context.entity.id,
bulk_import_entity_type: context.entity.source_type,
context_extra: context.extra
}.merge(extra)
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment