Commit a5327bf5 authored by Markus Koller's avatar Markus Koller Committed by Alex Kalderimis

Include retried jobs in pipeline payloads for Datadog integration

Retried jobs were removed from the default pipeline payload for
performance reasons, but some integrations like Datadog want to include
these as well.

This modifies the payload so integrations can opt-in to retried builds
by calling `.with_retried_builds` on the hash.

Changelog: changed
parent 5e292260
...@@ -123,6 +123,8 @@ module Integrations ...@@ -123,6 +123,8 @@ module Integrations
object_kind = 'job' if object_kind == 'build' object_kind = 'job' if object_kind == 'build'
return unless supported_events.include?(object_kind) return unless supported_events.include?(object_kind)
data = data.with_retried_builds if data.respond_to?(:with_retried_builds)
execute_web_hook!(data, "#{object_kind} hook") execute_web_hook!(data, "#{object_kind} hook")
end end
......
...@@ -2,21 +2,35 @@ ...@@ -2,21 +2,35 @@
module Gitlab module Gitlab
module DataBuilder module DataBuilder
module Pipeline # Some callers want to include retried builds, so we wrap the payload hash
extend self # in a SimpleDelegator with additional methods.
class Pipeline < SimpleDelegator
def self.build(pipeline)
new(pipeline)
end
def build(pipeline) def initialize(pipeline)
{ @pipeline = pipeline
super(
object_kind: 'pipeline', object_kind: 'pipeline',
object_attributes: hook_attrs(pipeline), object_attributes: hook_attrs(pipeline),
merge_request: pipeline.merge_request && merge_request_attrs(pipeline.merge_request), merge_request: pipeline.merge_request && merge_request_attrs(pipeline.merge_request),
user: pipeline.user.try(:hook_attrs), user: pipeline.user.try(:hook_attrs),
project: pipeline.project.hook_attrs(backward: false), project: pipeline.project.hook_attrs(backward: false),
commit: pipeline.commit.try(:hook_attrs), commit: pipeline.commit.try(:hook_attrs),
builds: pipeline.builds.latest.map(&method(:build_hook_attrs)) builds: Gitlab::Lazy.new { pipeline.builds.latest.map(&method(:build_hook_attrs)) }
} )
end end
def with_retried_builds
merge(
builds: Gitlab::Lazy.new { @pipeline.builds.map(&method(:build_hook_attrs)) }
)
end
private
def hook_attrs(pipeline) def hook_attrs(pipeline)
{ {
id: pipeline.id, id: pipeline.id,
......
...@@ -3,10 +3,10 @@ ...@@ -3,10 +3,10 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Gitlab::DataBuilder::Pipeline do RSpec.describe Gitlab::DataBuilder::Pipeline do
let(:user) { create(:user) } let_it_be(:user) { create(:user) }
let(:project) { create(:project, :repository) } let_it_be(:project) { create(:project, :repository) }
let(:pipeline) do let_it_be_with_reload(:pipeline) do
create(:ci_pipeline, create(:ci_pipeline,
project: project, project: project,
status: 'success', status: 'success',
...@@ -15,12 +15,12 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do ...@@ -15,12 +15,12 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
user: user) user: user)
end end
let!(:build) { create(:ci_build, pipeline: pipeline) } let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
describe '.build' do describe '.build' do
let(:data) { described_class.build(pipeline) } let(:data) { described_class.build(pipeline) }
let(:attributes) { data[:object_attributes] } let(:attributes) { data[:object_attributes] }
let(:build_data) { data[:builds].first } let(:build_data) { data[:builds].last }
let(:runner_data) { build_data[:runner] } let(:runner_data) { build_data[:runner] }
let(:project_data) { data[:project] } let(:project_data) { data[:project] }
...@@ -51,9 +51,9 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do ...@@ -51,9 +51,9 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
end end
context 'build with runner' do context 'build with runner' do
let!(:build) { create(:ci_build, pipeline: pipeline, runner: ci_runner) } let_it_be(:tag_names) { %w(tag-1 tag-2) }
let!(:tag_names) { %w(tag-1 tag-2) } let_it_be(:ci_runner) { create(:ci_runner, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n)}) }
let(:ci_runner) { create(:ci_runner, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n)}) } let_it_be(:build) { create(:ci_build, pipeline: pipeline, runner: ci_runner) }
it 'has runner attributes', :aggregate_failures do it 'has runner attributes', :aggregate_failures do
expect(runner_data[:id]).to eq(ci_runner.id) expect(runner_data[:id]).to eq(ci_runner.id)
...@@ -73,18 +73,15 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do ...@@ -73,18 +73,15 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
end end
context 'pipeline with variables' do context 'pipeline with variables' do
let(:build) { create(:ci_build, pipeline: pipeline) } let_it_be(:pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline, key: 'TRIGGER_KEY_1', value: 'TRIGGER_VALUE_1') }
let(:data) { described_class.build(pipeline) }
let(:attributes) { data[:object_attributes] }
let!(:pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline, key: 'TRIGGER_KEY_1', value: 'TRIGGER_VALUE_1') }
it { expect(attributes[:variables]).to be_a(Array) } it { expect(attributes[:variables]).to be_a(Array) }
it { expect(attributes[:variables]).to contain_exactly({ key: 'TRIGGER_KEY_1', value: 'TRIGGER_VALUE_1' }) } it { expect(attributes[:variables]).to contain_exactly({ key: 'TRIGGER_KEY_1', value: 'TRIGGER_VALUE_1' }) }
end end
context 'when pipeline is a detached merge request pipeline' do context 'when pipeline is a detached merge request pipeline' do
let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) } let_it_be(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
let(:pipeline) { merge_request.all_pipelines.first } let_it_be(:pipeline) { merge_request.all_pipelines.first }
it 'returns a source ref' do it 'returns a source ref' do
expect(attributes[:ref]).to eq(merge_request.source_branch) expect(attributes[:ref]).to eq(merge_request.source_branch)
...@@ -108,19 +105,25 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do ...@@ -108,19 +105,25 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
end end
context 'when pipeline has retried builds' do context 'when pipeline has retried builds' do
before do let_it_be(:retried_build) { create(:ci_build, :retried, pipeline: pipeline) }
create(:ci_build, :retried, pipeline: pipeline)
end
it 'does not contain retried builds in payload' do it 'does not contain retried builds in payload' do
expect(data[:builds].count).to eq(1) builds = data[:builds]
expect(build_data[:id]).to eq(build.id)
expect(builds.pluck(:id)).to contain_exactly(build.id)
end
it 'contains retried builds if requested' do
builds = data.with_retried_builds[:builds]
expect(builds.pluck(:id)).to contain_exactly(build.id, retried_build.id)
end end
end end
context 'build with environment' do context 'build with environment' do
let!(:build) { create(:ci_build, :environment_with_deployment_tier, :with_deployment, pipeline: pipeline) } let_it_be(:build) { create(:ci_build, :environment_with_deployment_tier, :with_deployment, pipeline: pipeline) }
let!(:build_environment_data) { build_data[:environment] }
let(:build_environment_data) { build_data[:environment] }
it 'has environment attributes', :aggregate_failures do it 'has environment attributes', :aggregate_failures do
expect(build_environment_data[:name]).to eq(build.expanded_environment_name) expect(build_environment_data[:name]).to eq(build.expanded_environment_name)
......
...@@ -6,7 +6,8 @@ require 'spec_helper' ...@@ -6,7 +6,8 @@ require 'spec_helper'
RSpec.describe Integrations::Datadog do RSpec.describe Integrations::Datadog do
let_it_be(:project) { create(:project) } let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) } let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:build) { create(:ci_build, project: project) } let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
let_it_be(:retried_build) { create(:ci_build, :retried, pipeline: pipeline) }
let(:active) { true } let(:active) { true }
let(:dd_site) { 'datadoghq.com' } let(:dd_site) { 'datadoghq.com' }
...@@ -159,6 +160,10 @@ RSpec.describe Integrations::Datadog do ...@@ -159,6 +160,10 @@ RSpec.describe Integrations::Datadog do
end end
describe '#execute' do describe '#execute' do
around do |example|
freeze_time { example.run }
end
before do before do
stub_request(:post, expected_hook_url) stub_request(:post, expected_hook_url)
saved_instance.execute(data) saved_instance.execute(data)
...@@ -166,20 +171,18 @@ RSpec.describe Integrations::Datadog do ...@@ -166,20 +171,18 @@ RSpec.describe Integrations::Datadog do
context 'with pipeline data' do context 'with pipeline data' do
let(:data) { pipeline_data } let(:data) { pipeline_data }
let(:expected_headers) do let(:expected_headers) { { WebHookService::GITLAB_EVENT_HEADER => 'Pipeline Hook' } }
{ WebHookService::GITLAB_EVENT_HEADER => 'Pipeline Hook' } let(:expected_body) { data.with_retried_builds.to_json }
end
it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers)).to have_been_made } it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers, body: expected_body)).to have_been_made }
end end
context 'with job data' do context 'with job data' do
let(:data) { build_data } let(:data) { build_data }
let(:expected_headers) do let(:expected_headers) { { WebHookService::GITLAB_EVENT_HEADER => 'Job Hook' } }
{ WebHookService::GITLAB_EVENT_HEADER => 'Job Hook' } let(:expected_body) { data.to_json }
end
it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers)).to have_been_made } it { expect(a_request(:post, expected_hook_url).with(headers: expected_headers, body: expected_body)).to have_been_made }
end end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment