Commit 976ab01d authored by Paul Slaughter's avatar Paul Slaughter

Merge branch 'eb-sitespeed-degradation-threshold' into 'master'

Expose degradation_threshold from CI config

See merge request gitlab-org/gitlab!21824
parents 0b90507d c8761bb0
...@@ -32,6 +32,9 @@ module Ci ...@@ -32,6 +32,9 @@ module Ci
scheduler_failure: 2 scheduler_failure: 2
}.freeze }.freeze
CODE_NAVIGATION_JOB_NAME = 'code_navigation'
DEGRADATION_THRESHOLD_VARIABLE_NAME = 'DEGRADATION_THRESHOLD'
has_one :deployment, as: :deployable, class_name: 'Deployment' has_one :deployment, as: :deployable, class_name: 'Deployment'
has_one :resource, class_name: 'Ci::Resource', inverse_of: :build has_one :resource, class_name: 'Ci::Resource', inverse_of: :build
has_many :trace_sections, class_name: 'Ci::BuildTraceSection' has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
...@@ -917,6 +920,11 @@ module Ci ...@@ -917,6 +920,11 @@ module Ci
failure_reason: :data_integrity_failure) failure_reason: :data_integrity_failure)
end end
def degradation_threshold
var = yaml_variables.find { |v| v[:key] == DEGRADATION_THRESHOLD_VARIABLE_NAME }
var[:value]&.to_i if var
end
private private
def dependencies def dependencies
......
...@@ -123,6 +123,26 @@ documentation. ...@@ -123,6 +123,26 @@ documentation.
TIP: **Tip:** TIP: **Tip:**
Key metrics are automatically extracted and shown in the merge request widget. Key metrics are automatically extracted and shown in the merge request widget.
### Configuring degradation threshold
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/27599) in GitLab 13.0.
You can configure the sensitivity of degradation alerts to avoid getting alerts for minor drops in metrics.
This is done by setting the `DEGRADATION_THRESHOLD` variable. In the example below, the alert will only show up
if the `Total Score` metric degrades by 5 points or more:
```yaml
include:
template: Verify/Browser-Performance.gitlab-ci.yml
performance:
variables:
URL: https://example.com
DEGRADATION_THRESHOLD: 5
```
The `Total Score` metric is based on sitespeed.io's [coach performance score](https://www.sitespeed.io/documentation/sitespeed.io/metrics/#performance-score). There is more information in [the coach documentation](https://www.sitespeed.io/documentation/coach/how-to/#what-do-the-coach-do).
### Performance testing on Review Apps ### Performance testing on Review Apps
The above CI YAML configuration is great for testing against static environments, and it can The above CI YAML configuration is great for testing against static environments, and it can
......
...@@ -14,6 +14,7 @@ import MrWidgetApprovals from './components/approvals/approvals.vue'; ...@@ -14,6 +14,7 @@ import MrWidgetApprovals from './components/approvals/approvals.vue';
import MrWidgetGeoSecondaryNode from './components/states/mr_widget_secondary_geo_node.vue'; import MrWidgetGeoSecondaryNode from './components/states/mr_widget_secondary_geo_node.vue';
import MergeTrainHelperText from './components/merge_train_helper_text.vue'; import MergeTrainHelperText from './components/merge_train_helper_text.vue';
import { MTWPS_MERGE_STRATEGY } from '~/vue_merge_request_widget/constants'; import { MTWPS_MERGE_STRATEGY } from '~/vue_merge_request_widget/constants';
import { TOTAL_SCORE_METRIC_NAME } from 'ee/vue_merge_request_widget/stores/constants';
export default { export default {
components: { components: {
...@@ -65,9 +66,29 @@ export default { ...@@ -65,9 +66,29 @@ export default {
(this.mr.performanceMetrics.improved && this.mr.performanceMetrics.improved.length > 0)) (this.mr.performanceMetrics.improved && this.mr.performanceMetrics.improved.length > 0))
); );
}, },
shouldRenderPerformance() { hasPerformancePaths() {
const { performance } = this.mr || {}; const { performance } = this.mr || {};
return performance && performance.head_path && performance.base_path;
return Boolean(performance?.head_path && performance?.base_path);
},
degradedTotalScore() {
return this.mr?.performanceMetrics?.degraded.find(
metric => metric.name === TOTAL_SCORE_METRIC_NAME,
);
},
hasPerformanceDegradation() {
const threshold = this.mr?.performance?.degradation_threshold || 0;
if (!threshold) {
return true;
}
const totalScoreDelta = this.degradedTotalScore?.delta || 0;
return threshold + totalScoreDelta <= 0;
},
shouldRenderPerformance() {
return this.hasPerformancePaths && this.hasPerformanceDegradation;
}, },
shouldRenderSecurityReport() { shouldRenderSecurityReport() {
const { enabledReports } = this.mr; const { enabledReports } = this.mr;
...@@ -170,7 +191,7 @@ export default { ...@@ -170,7 +191,7 @@ export default {
this.fetchCodeQuality(); this.fetchCodeQuality();
} }
}, },
shouldRenderPerformance(newVal) { hasPerformancePaths(newVal) {
if (newVal) { if (newVal) {
this.fetchPerformance(); this.fetchPerformance();
} }
......
/* eslint-disable import/prefer-default-export */
// This is the name of Sitespeed's Overall Score metric in the performance report
export const TOTAL_SCORE_METRIC_NAME = 'Total Score';
...@@ -122,7 +122,6 @@ export default class MergeRequestStore extends CEMergeRequestStore { ...@@ -122,7 +122,6 @@ export default class MergeRequestStore extends CEMergeRequestStore {
comparePerformanceMetrics(headMetrics, baseMetrics) { comparePerformanceMetrics(headMetrics, baseMetrics) {
const headMetricsIndexed = MergeRequestStore.normalizePerformanceMetrics(headMetrics); const headMetricsIndexed = MergeRequestStore.normalizePerformanceMetrics(headMetrics);
const baseMetricsIndexed = MergeRequestStore.normalizePerformanceMetrics(baseMetrics); const baseMetricsIndexed = MergeRequestStore.normalizePerformanceMetrics(baseMetrics);
const improved = []; const improved = [];
const degraded = []; const degraded = [];
......
...@@ -37,6 +37,13 @@ module EE ...@@ -37,6 +37,13 @@ module EE
proxy: true) proxy: true)
end end
end end
def degradation_threshold
if (job_artifact = batch_lookup_report_artifact_for_file_type(:performance)) &&
can?(current_user, :read_build, job_artifact.job)
job_artifact.job.degradation_threshold
end
end
end end
end end
end end
...@@ -27,6 +27,11 @@ module EE ...@@ -27,6 +27,11 @@ module EE
end end
expose :performance, if: -> (mr, _) { head_pipeline_downloadable_path_for_report_type(:performance) } do expose :performance, if: -> (mr, _) { head_pipeline_downloadable_path_for_report_type(:performance) } do
expose :degradation_threshold do |merge_request|
merge_request.head_pipeline&.present(current_user: current_user)
&.degradation_threshold
end
expose :head_path do |merge_request| expose :head_path do |merge_request|
head_pipeline_downloadable_path_for_report_type(:performance) head_pipeline_downloadable_path_for_report_type(:performance)
end end
......
---
title: Support setting threshold for browser performance degradation through CI config
merge_request: 21824
author:
type: added
...@@ -40,6 +40,11 @@ describe('ee merge request widget options', () => { ...@@ -40,6 +40,11 @@ describe('ee merge request widget options', () => {
let mock; let mock;
let Component; let Component;
const DEFAULT_PERFORMANCE = {
head_path: 'head.json',
base_path: 'base.json',
};
beforeEach(() => { beforeEach(() => {
delete mrWidgetOptions.extends.el; // Prevent component mounting delete mrWidgetOptions.extends.el; // Prevent component mounting
...@@ -65,8 +70,15 @@ describe('ee merge request widget options', () => { ...@@ -65,8 +70,15 @@ describe('ee merge request widget options', () => {
}); });
}); });
const findPerformanceWidget = () => vm.$el.querySelector('.js-performance-widget');
const findSecurityWidget = () => vm.$el.querySelector('.js-security-widget'); const findSecurityWidget = () => vm.$el.querySelector('.js-security-widget');
const setPerformance = (data = {}) => {
const performance = { ...DEFAULT_PERFORMANCE, ...data };
gl.mrWidgetData.performance = performance;
vm.mr.performance = performance;
};
const VULNERABILITY_FEEDBACK_ENDPOINT = 'vulnerability_feedback_path'; const VULNERABILITY_FEEDBACK_ENDPOINT = 'vulnerability_feedback_path';
describe('SAST', () => { describe('SAST', () => {
...@@ -487,13 +499,10 @@ describe('ee merge request widget options', () => { ...@@ -487,13 +499,10 @@ describe('ee merge request widget options', () => {
mock.onGet('base.json').reply(200, basePerformance); mock.onGet('base.json').reply(200, basePerformance);
vm = mountComponent(Component, { mrData: gl.mrWidgetData }); vm = mountComponent(Component, { mrData: gl.mrWidgetData });
vm.mr.performance = { vm.mr.performance = { ...DEFAULT_PERFORMANCE };
head_path: 'head.json',
base_path: 'base.json',
};
vm.$nextTick(() => { vm.$nextTick(() => {
expect(trimText(vm.$el.querySelector('.js-performance-widget').textContent)).toContain( expect(trimText(findPerformanceWidget().textContent)).toContain(
'Loading performance report', 'Loading performance report',
); );
...@@ -504,65 +513,91 @@ describe('ee merge request widget options', () => { ...@@ -504,65 +513,91 @@ describe('ee merge request widget options', () => {
describe('with successful request', () => { describe('with successful request', () => {
beforeEach(() => { beforeEach(() => {
mock.onGet('head.json').reply(200, headPerformance); mock.onGet(DEFAULT_PERFORMANCE.head_path).reply(200, headPerformance);
mock.onGet('base.json').reply(200, basePerformance); mock.onGet(DEFAULT_PERFORMANCE.base_path).reply(200, basePerformance);
vm = mountComponent(Component, { mrData: gl.mrWidgetData }); vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.performance = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.performance = gl.mrWidgetData.performance;
}); });
it('should render provided data', done => { describe('default', () => {
setImmediate(() => { beforeEach(() => {
expect( setPerformance();
trimText(vm.$el.querySelector('.js-performance-widget .js-code-text').textContent),
).toEqual('Performance metrics improved on 2 points and degraded on 1 point');
done();
}); });
});
describe('text connector', () => { it('should render provided data', done => {
it('should only render information about fixed issues', done => {
setImmediate(() => { setImmediate(() => {
vm.mr.performanceMetrics.degraded = []; expect(
trimText(vm.$el.querySelector('.js-performance-widget .js-code-text').textContent),
Vue.nextTick(() => { ).toEqual('Performance metrics improved on 2 points and degraded on 1 point');
expect( done();
trimText(vm.$el.querySelector('.js-performance-widget .js-code-text').textContent),
).toEqual('Performance metrics improved on 2 points');
done();
});
}); });
}); });
it('should only render information about added issues', done => { describe('text connector', () => {
setImmediate(() => { it('should only render information about fixed issues', done => {
vm.mr.performanceMetrics.improved = []; setImmediate(() => {
vm.mr.performanceMetrics.degraded = [];
Vue.nextTick(() => {
expect(
trimText(
vm.$el.querySelector('.js-performance-widget .js-code-text').textContent,
),
).toEqual('Performance metrics improved on 2 points');
done();
});
});
});
Vue.nextTick(() => { it('should only render information about added issues', done => {
expect( setImmediate(() => {
trimText(vm.$el.querySelector('.js-performance-widget .js-code-text').textContent), vm.mr.performanceMetrics.improved = [];
).toEqual('Performance metrics degraded on 1 point');
done(); Vue.nextTick(() => {
expect(
trimText(
vm.$el.querySelector('.js-performance-widget .js-code-text').textContent,
),
).toEqual('Performance metrics degraded on 1 point');
done();
});
}); });
}); });
}); });
}); });
describe.each`
degradation_threshold | shouldExist
${1} | ${true}
${3} | ${false}
`(
'with degradation_threshold = $degradation_threshold',
({ degradation_threshold, shouldExist }) => {
beforeEach(() => {
setPerformance({ degradation_threshold });
return waitForPromises();
});
if (shouldExist) {
it('should render widget when total score degradation is above threshold', () => {
expect(findPerformanceWidget()).toExist();
});
} else {
it('should not render widget when total score degradation is below threshold', () => {
expect(findPerformanceWidget()).not.toExist();
});
}
},
);
}); });
describe('with empty successful request', () => { describe('with empty successful request', () => {
beforeEach(done => { beforeEach(done => {
mock.onGet('head.json').reply(200, []); mock.onGet(DEFAULT_PERFORMANCE.head_path).reply(200, []);
mock.onGet('base.json').reply(200, []); mock.onGet(DEFAULT_PERFORMANCE.base_path).reply(200, []);
vm = mountComponent(Component, { mrData: gl.mrWidgetData }); vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.performance = { gl.mrWidgetData.performance = { ...DEFAULT_PERFORMANCE };
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.performance = gl.mrWidgetData.performance; vm.mr.performance = gl.mrWidgetData.performance;
// wait for network request from component watch update method // wait for network request from component watch update method
...@@ -590,14 +625,11 @@ describe('ee merge request widget options', () => { ...@@ -590,14 +625,11 @@ describe('ee merge request widget options', () => {
describe('with failed request', () => { describe('with failed request', () => {
beforeEach(() => { beforeEach(() => {
mock.onGet('head.json').reply(500, []); mock.onGet(DEFAULT_PERFORMANCE.head_path).reply(500, []);
mock.onGet('base.json').reply(500, []); mock.onGet(DEFAULT_PERFORMANCE.base_path).reply(500, []);
vm = mountComponent(Component, { mrData: gl.mrWidgetData }); vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.performance = { gl.mrWidgetData.performance = { ...DEFAULT_PERFORMANCE };
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.performance = gl.mrWidgetData.performance; vm.mr.performance = gl.mrWidgetData.performance;
}); });
......
...@@ -113,7 +113,7 @@ export const headPerformance = [ ...@@ -113,7 +113,7 @@ export const headPerformance = [
subject: '/some/other/path', subject: '/some/other/path',
metrics: [ metrics: [
{ {
name: 'Sitespeed Score', name: 'Total Score',
value: 79, value: 79,
desiredSize: 'larger', desiredSize: 'larger',
}, },
...@@ -149,7 +149,7 @@ export const basePerformance = [ ...@@ -149,7 +149,7 @@ export const basePerformance = [
subject: '/some/other/path', subject: '/some/other/path',
metrics: [ metrics: [
{ {
name: 'Sitespeed Score', name: 'Total Score',
value: 80, value: 80,
desiredSize: 'larger', desiredSize: 'larger',
}, },
......
...@@ -99,6 +99,51 @@ describe MergeRequestWidgetEntity do ...@@ -99,6 +99,51 @@ describe MergeRequestWidgetEntity do
end end
end end
describe 'degradation_threshold' do
let!(:head_pipeline) { create(:ci_empty_pipeline, project: project) }
before do
allow(merge_request).to receive_messages(
base_pipeline: pipeline,
head_pipeline: head_pipeline
)
allow(head_pipeline).to receive(:available_licensed_report_type?).and_return(true)
create(
:ee_ci_build,
:performance,
pipeline: head_pipeline,
yaml_variables: yaml_variables
)
end
context "when head pipeline's performance build has the threshold variable defined" do
let(:yaml_variables) do
[
{ key: 'FOO', value: 'BAR' },
{ key: 'DEGRADATION_THRESHOLD', value: '5' }
]
end
it "returns the value of the variable" do
expect(subject.as_json[:performance][:degradation_threshold]).to eq(5)
end
end
context "when head pipeline's performance build has no threshold variable defined" do
let(:yaml_variables) do
[
{ key: 'FOO', value: 'BAR' }
]
end
it "returns nil" do
expect(subject.as_json[:performance][:degradation_threshold]).to be_nil
end
end
end
describe '#license_scanning', :request_store do describe '#license_scanning', :request_store do
before do before do
allow(merge_request).to receive_messages(head_pipeline: pipeline, target_project: project) allow(merge_request).to receive_messages(head_pipeline: pipeline, target_project: project)
......
...@@ -4414,4 +4414,31 @@ describe Ci::Build do ...@@ -4414,4 +4414,31 @@ describe Ci::Build do
it { is_expected.to be_nil } it { is_expected.to be_nil }
end end
end end
describe '#degradation_threshold' do
subject { build.degradation_threshold }
context 'when threshold variable is defined' do
before do
build.yaml_variables = [
{ key: 'SOME_VAR_1', value: 'SOME_VAL_1' },
{ key: 'DEGRADATION_THRESHOLD', value: '5' },
{ key: 'SOME_VAR_2', value: 'SOME_VAL_2' }
]
end
it { is_expected.to eq(5) }
end
context 'when threshold variable is not defined' do
before do
build.yaml_variables = [
{ key: 'SOME_VAR_1', value: 'SOME_VAL_1' },
{ key: 'SOME_VAR_2', value: 'SOME_VAL_2' }
]
end
it { is_expected.to be_nil }
end
end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment