Commit 6440ea2b authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents 1f2a8719 60af5d34
<script> <script>
import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui'; import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
import { flatten } from 'lodash'; import { flatten, isEqual } from 'lodash';
import createFlash from '~/flash'; import createFlash from '~/flash';
import { sprintf, s__ } from '~/locale'; import { sprintf, s__ } from '~/locale';
import { redirectTo } from '~/lib/utils/url_utility';
import { METRICS_POPOVER_CONTENT } from '../constants'; import { METRICS_POPOVER_CONTENT } from '../constants';
import { removeFlash, prepareTimeMetricsData } from '../utils'; import { removeFlash, prepareTimeMetricsData } from '../utils';
import MetricTile from './metric_tile.vue'; import MetricTile from './metric_tile.vue';
...@@ -48,6 +47,11 @@ export default { ...@@ -48,6 +47,11 @@ export default {
type: Array, type: Array,
required: true, required: true,
}, },
filterFn: {
type: Function,
required: false,
default: null,
},
}, },
data() { data() {
return { return {
...@@ -56,8 +60,10 @@ export default { ...@@ -56,8 +60,10 @@ export default {
}; };
}, },
watch: { watch: {
requestParams() { requestParams(newVal, oldVal) {
this.fetchData(); if (!isEqual(newVal, oldVal)) {
this.fetchData();
}
}, },
}, },
mounted() { mounted() {
...@@ -69,25 +75,13 @@ export default { ...@@ -69,25 +75,13 @@ export default {
this.isLoading = true; this.isLoading = true;
return fetchMetricsData(this.requests, this.requestPath, this.requestParams) return fetchMetricsData(this.requests, this.requestPath, this.requestParams)
.then((data) => { .then((data) => {
this.metrics = data; this.metrics = this.filterFn ? this.filterFn(data) : data;
this.isLoading = false; this.isLoading = false;
}) })
.catch(() => { .catch(() => {
this.isLoading = false; this.isLoading = false;
}); });
}, },
hasLinks(links) {
return links?.length && links[0].url;
},
clickHandler({ links }) {
if (this.hasLinks(links)) {
redirectTo(links[0].url);
}
},
getDecimalPlaces(value) {
const parsedFloat = parseFloat(value);
return Number.isNaN(parsedFloat) || Number.isInteger(parsedFloat) ? 0 : 1;
},
}, },
}; };
</script> </script>
......
...@@ -45,7 +45,8 @@ export default { ...@@ -45,7 +45,8 @@ export default {
:chart-data="chart.data" :chart-data="chart.data"
:area-chart-options="chartOptions" :area-chart-options="chartOptions"
> >
{{ dateRange }} <p>{{ dateRange }}</p>
<slot name="metrics" :selected-chart="selectedChart"></slot>
<template #tooltip-title> <template #tooltip-title>
<slot name="tooltip-title"></slot> <slot name="tooltip-title"></slot>
</template> </template>
......
...@@ -14,8 +14,8 @@ module MergeRequests ...@@ -14,8 +14,8 @@ module MergeRequests
def async_execute def async_execute
return service_error if service_error return service_error if service_error
return unless merge_request.mark_as_checking
merge_request.mark_as_checking
MergeRequestMergeabilityCheckWorker.perform_async(merge_request.id) MergeRequestMergeabilityCheckWorker.perform_async(merge_request.id)
end end
......
...@@ -162,6 +162,7 @@ options: ...@@ -162,6 +162,7 @@ options:
- p_ci_templates_implicit_security_dast - p_ci_templates_implicit_security_dast
- p_ci_templates_implicit_security_cluster_image_scanning - p_ci_templates_implicit_security_cluster_image_scanning
- p_ci_templates_kaniko - p_ci_templates_kaniko
- p_ci_templates_qualys_iac_security
distribution: distribution:
- ce - ce
- ee - ee
......
---
key_path: redis_hll_counters.ci_templates.p_ci_templates_qualys_iac_security_monthly
description: ""
product_section: ""
product_stage: ""
product_group: ""
product_category: ""
value_type: number
status: active
milestone: "14.7"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/77362
time_frame: 28d
data_source: redis_hll
data_category: optional
instrumentation_class: RedisHLLMetric
performance_indicator_type: []
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
options:
events:
- p_ci_templates_qualys_iac_security
...@@ -162,6 +162,7 @@ options: ...@@ -162,6 +162,7 @@ options:
- p_ci_templates_implicit_security_dast - p_ci_templates_implicit_security_dast
- p_ci_templates_implicit_security_cluster_image_scanning - p_ci_templates_implicit_security_cluster_image_scanning
- p_ci_templates_kaniko - p_ci_templates_kaniko
- p_ci_templates_qualys_iac_security
distribution: distribution:
- ce - ce
- ee - ee
......
---
key_path: redis_hll_counters.ci_templates.p_ci_templates_qualys_iac_security_weekly
description: ""
product_section: ""
product_stage: ""
product_group: ""
product_category: ""
value_type: number
status: active
milestone: "14.7"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/77362
time_frame: 7d
data_source: redis_hll
data_category: optional
instrumentation_class: RedisHLLMetric
performance_indicator_type: []
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
options:
events:
- p_ci_templates_qualys_iac_security
...@@ -319,11 +319,11 @@ Parameters: ...@@ -319,11 +319,11 @@ Parameters:
| ---------------------- | ------- | -------- | ----------- | | ---------------------- | ------- | -------- | ----------- |
| `api_key` | string | true | API key used for authentication with Datadog | | `api_key` | string | true | API key used for authentication with Datadog |
| `api_url` | string | false | (Advanced) The full URL for your Datadog site | | `api_url` | string | false | (Advanced) The full URL for your Datadog site |
<!-- | `archive_trace_events` | boolean | false | When enabled, job logs are collected by Datadog and displayed along with pipeline execution traces ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/346339) in GitLab 14.7) | -->
<!-- TODO: uncomment the archive_trace_events field once :datadog_integration_logs_collection is rolled out. Rollout issue: https://gitlab.com/gitlab-org/gitlab/-/issues/346339 -->
| `datadog_env` | string | false | For self-managed deployments, set the env% tag for all the data sent to Datadog. | | `datadog_env` | string | false | For self-managed deployments, set the env% tag for all the data sent to Datadog. |
| `datadog_service` | string | false | Tag all data from this GitLab instance in Datadog. Useful when managing several self-managed deployments | | `datadog_service` | string | false | Tag all data from this GitLab instance in Datadog. Useful when managing several self-managed deployments |
| `datadog_site` | string | false | The Datadog site to send data to. To send data to the EU site, use `datadoghq.eu` | | `datadog_site` | string | false | The Datadog site to send data to. To send data to the EU site, use `datadoghq.eu` |
<!-- | `archive_trace_events` | boolean | false | When enabled, job logs are collected by Datadog and displayed along with pipeline execution traces ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/346339) in GitLab 14.7) | -->
<!-- TODO: uncomment the archive_trace_events field once :datadog_integration_logs_collection is rolled out. Rollout issue: https://gitlab.com/gitlab-org/gitlab/-/issues/346339 -->
### Disable Datadog integration ### Disable Datadog integration
......
<script> <script>
import * as Sentry from '@sentry/browser'; import * as Sentry from '@sentry/browser';
import * as DoraApi from 'ee/api/dora_api'; import * as DoraApi from 'ee/api/dora_api';
import { toYmd } from '~/analytics/shared/utils';
import createFlash from '~/flash'; import createFlash from '~/flash';
import { s__, sprintf } from '~/locale'; import { s__, sprintf } from '~/locale';
import ValueStreamMetrics from '~/cycle_analytics/components/value_stream_metrics.vue';
import { SUMMARY_METRICS_REQUEST } from '~/cycle_analytics/constants';
import CiCdAnalyticsCharts from '~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue'; import CiCdAnalyticsCharts from '~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue';
import DoraChartHeader from './dora_chart_header.vue'; import DoraChartHeader from './dora_chart_header.vue';
import { import {
...@@ -19,11 +22,16 @@ import { ...@@ -19,11 +22,16 @@ import {
} from './static_data/deployment_frequency'; } from './static_data/deployment_frequency';
import { apiDataToChartSeries, seriesToAverageSeries } from './util'; import { apiDataToChartSeries, seriesToAverageSeries } from './util';
const VISIBLE_METRICS = ['deploys', 'deployment-frequency', 'deployment_frequency'];
const filterFn = (data) =>
data.filter((d) => VISIBLE_METRICS.includes(d.identifier)).map(({ links, ...rest }) => rest);
export default { export default {
name: 'DeploymentFrequencyCharts', name: 'DeploymentFrequencyCharts',
components: { components: {
CiCdAnalyticsCharts, CiCdAnalyticsCharts,
DoraChartHeader, DoraChartHeader,
ValueStreamMetrics,
}, },
inject: { inject: {
projectPath: { projectPath: {
...@@ -56,6 +64,9 @@ export default { ...@@ -56,6 +64,9 @@ export default {
data: this.chartData[chart.id], data: this.chartData[chart.id],
})); }));
}, },
metricsRequestPath() {
return this.projectPath ? this.projectPath : `groups/${this.groupPath}`;
},
}, },
async mounted() { async mounted() {
const results = await Promise.allSettled( const results = await Promise.allSettled(
...@@ -114,9 +125,23 @@ export default { ...@@ -114,9 +125,23 @@ export default {
); );
} }
}, },
methods: {
getMetricsRequestParams(selectedChart) {
const {
requestParams: { start_date },
} = allChartDefinitions[selectedChart];
return {
created_after: toYmd(start_date),
};
},
},
areaChartOptions, areaChartOptions,
chartDescriptionText, chartDescriptionText,
chartDocumentationHref, chartDocumentationHref,
metricsRequest: SUMMARY_METRICS_REQUEST,
filterFn,
}; };
</script> </script>
<template> <template>
...@@ -126,6 +151,15 @@ export default { ...@@ -126,6 +151,15 @@ export default {
:chart-description-text="$options.chartDescriptionText" :chart-description-text="$options.chartDescriptionText"
:chart-documentation-href="$options.chartDocumentationHref" :chart-documentation-href="$options.chartDocumentationHref"
/> />
<ci-cd-analytics-charts :charts="charts" :chart-options="$options.areaChartOptions" /> <ci-cd-analytics-charts :charts="charts" :chart-options="$options.areaChartOptions">
<template #metrics="{ selectedChart }">
<value-stream-metrics
:request-path="metricsRequestPath"
:requests="$options.metricsRequest"
:request-params="getMetricsRequestParams(selectedChart)"
:filter-fn="$options.filterFn"
/>
</template>
</ci-cd-analytics-charts>
</div> </div>
</template> </template>
...@@ -10,6 +10,7 @@ ...@@ -10,6 +10,7 @@
module Security module Security
class Finding < ApplicationRecord class Finding < ApplicationRecord
include IgnorableColumns include IgnorableColumns
include EachBatch
self.table_name = 'security_findings' self.table_name = 'security_findings'
......
...@@ -3,15 +3,18 @@ ...@@ -3,15 +3,18 @@
module Security module Security
# This service class stores the findings metadata for all pipelines. # This service class stores the findings metadata for all pipelines.
class StoreFindingsMetadataService < ::BaseService class StoreFindingsMetadataService < ::BaseService
attr_reader :security_scan, :report BATCH_SIZE = 50
def self.execute(security_scan, report) attr_reader :security_scan, :report, :deduplicated_finding_uuids
new(security_scan, report).execute
def self.execute(security_scan, report, deduplicated_finding_uuids)
new(security_scan, report, deduplicated_finding_uuids).execute
end end
def initialize(security_scan, report) def initialize(security_scan, report, deduplicated_finding_uuids)
@security_scan = security_scan @security_scan = security_scan
@report = report @report = report
@deduplicated_finding_uuids = deduplicated_finding_uuids
end end
def execute def execute
...@@ -30,28 +33,39 @@ module Security ...@@ -30,28 +33,39 @@ module Security
end end
def store_findings def store_findings
report_findings.each { |report_finding| store_finding!(report_finding) } report_findings.each_slice(BATCH_SIZE) { |batch| store_finding_batch(batch) }
end end
def report_findings def report_findings
report.findings.select(&:valid?) report.findings.select(&:valid?)
end end
def store_finding!(report_finding) def store_finding_batch(batch)
security_scan.findings.create!(finding_data(report_finding)) batch.map(&method(:finding_data))
.then(&method(:import_batch))
end
def import_batch(report_finding_data)
Security::Finding.insert_all(report_finding_data)
end end
def finding_data(report_finding) def finding_data(report_finding)
{ {
scan_id: security_scan.id,
severity: report_finding.severity, severity: report_finding.severity,
confidence: report_finding.confidence, confidence: report_finding.confidence,
uuid: report_finding.uuid, uuid: report_finding.uuid,
overridden_uuid: report_finding.overridden_uuid, overridden_uuid: report_finding.overridden_uuid,
project_fingerprint: report_finding.project_fingerprint, project_fingerprint: report_finding.project_fingerprint,
scanner: persisted_scanner_for(report_finding.scanner) scanner_id: persisted_scanner_for(report_finding.scanner).id,
deduplicated: deduplicated?(report_finding)
} }
end end
def deduplicated?(report_finding)
deduplicated_finding_uuids.include?(report_finding.uuid)
end
def persisted_scanner_for(report_scanner) def persisted_scanner_for(report_scanner)
existing_scanners[report_scanner.key] ||= create_scanner!(report_scanner) existing_scanners[report_scanner.key] ||= create_scanner!(report_scanner)
end end
......
...@@ -8,6 +8,8 @@ ...@@ -8,6 +8,8 @@
# @param deduplicate [Boolean] attribute to force running deduplication logic. # @param deduplicate [Boolean] attribute to force running deduplication logic.
module Security module Security
class StoreScanService class StoreScanService
DEDUPLICATE_BATCH_SIZE = 50
def self.execute(artifact, known_keys, deduplicate) def self.execute(artifact, known_keys, deduplicate)
new(artifact, known_keys, deduplicate).execute new(artifact, known_keys, deduplicate).execute
end end
...@@ -49,8 +51,11 @@ module Security ...@@ -49,8 +51,11 @@ module Security
end end
def store_findings def store_findings
StoreFindingsMetadataService.execute(security_scan, security_report) StoreFindingsMetadataService.execute(security_scan, security_report, register_finding_keys).then do |result|
deduplicate_findings? ? update_deduplicated_findings : register_finding_keys # If `StoreFindingsMetadataService` returns error, it means the findings
# have already been stored before so we may re-run the deduplication logic.
update_deduplicated_findings if result[:status] == :error && deduplicate_findings?
end
deduplicate_findings? deduplicate_findings?
end end
...@@ -65,10 +70,19 @@ module Security ...@@ -65,10 +70,19 @@ module Security
def update_deduplicated_findings def update_deduplicated_findings
Security::Scan.transaction do Security::Scan.transaction do
security_scan.findings.update_all(deduplicated: false) mark_all_findings_as_duplicate
mark_unique_findings
end
end
def mark_all_findings_as_duplicate
security_scan.findings.deduplicated.each_batch(of: DEDUPLICATE_BATCH_SIZE) { |batch| batch.update_all(deduplicated: false) }
end
def mark_unique_findings
register_finding_keys.each_slice(DEDUPLICATE_BATCH_SIZE) do |batch|
security_scan.findings security_scan.findings
.by_uuid(register_finding_keys) .by_uuid(batch)
.update_all(deduplicated: true) .update_all(deduplicated: true)
end end
end end
......
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
module BackfillVersionDataFromGitaly
extend ::Gitlab::Utils::Override
class Version < ActiveRecord::Base
self.table_name = 'design_management_versions'
self.inheritance_column = :_type_disabled
# The `sha` of a version record must be deserialized from binary
# in order to convert it to a `sha` String that can be used to fetch
# a corresponding Commit from Git.
def sha
value = super
value.unpack1('H*')
end
scope :backfillable_for_issue, -> (issue_id) do
where(author_id: nil).or(where(created_at: nil))
.where(issue_id: issue_id)
end
end
class Issue < ActiveRecord::Base
self.table_name = 'issues'
self.inheritance_column = :_type_disabled
end
override :perform
def perform(issue_id)
issue = Issue.find_by_id(issue_id)
return unless issue
# We need a full Project instance in order to initialize a
# Repository instance that can perform Gitaly calls.
project = ::Project.find_by_id(issue.project_id)
return if project.nil? || project.pending_delete?
# We need a full Repository instance to perform Gitaly calls.
repository = ::DesignManagement::Repository.new(project)
versions = Version.backfillable_for_issue(issue_id)
commits = commits_for_versions(versions, repository)
ActiveRecord::Base.transaction do
versions.each do |version|
commit = commits[version.sha]
unless commit.nil?
version.update_columns(created_at: commit.created_at, author_id: commit.author&.id)
end
end
end
end
private
# Performs a Gitaly request to fetch the corresponding Commit data
# for the given versions.
#
# Returns Commits as a Hash of { sha => Commit }
def commits_for_versions(versions, repository)
shas = versions.map(&:sha)
commits = repository.commits_by(oids: shas)
# Batch load the commit authors so the `User` records are fetched
# all at once the first time we call `commit.author.id`.
commits.each(&:lazy_author)
commits.each_with_object({}) do |commit, hash|
hash[commit.id] = commit
end
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
module GenerateGitlabSubscriptions
extend ::Gitlab::Utils::Override
class Namespace < ActiveRecord::Base
self.table_name = 'namespaces'
self.inheritance_column = :_type_disabled # Disable STI
scope :with_plan, -> { where.not(plan_id: nil) }
scope :without_subscription, -> do
joins("LEFT JOIN gitlab_subscriptions ON namespaces.id = gitlab_subscriptions.namespace_id")
.where(gitlab_subscriptions: { id: nil })
end
def trial_active?
trial_ends_on.present? && trial_ends_on >= Date.today
end
end
class GitlabSubscription < ActiveRecord::Base
self.table_name = 'gitlab_subscriptions'
end
override :perform
def perform(start_id, stop_id)
now = Time.now
# Some fields like seats or end_date will be properly updated by a script executed
# from the subscription portal after this MR hits production.
rows = Namespace
.with_plan
.without_subscription
.where(id: start_id..stop_id)
.select(:id, :plan_id, :trial_ends_on, :created_at)
.map do |namespace|
{
namespace_id: namespace.id,
hosted_plan_id: namespace.plan_id,
trial: namespace.trial_active?,
start_date: namespace.created_at.to_date,
auto_renew: false,
seats: 0,
created_at: now,
updated_at: now
}
end
ApplicationRecord.legacy_bulk_insert(:gitlab_subscriptions, rows) # rubocop:disable Gitlab/BulkInsert
end
end
end
end
end
# frozen_string_literal: true
#
module EE
module Gitlab
module BackgroundMigration
module MigrateDevopsSegmentsToGroups
class AdoptionSegmentSelection < ActiveRecord::Base
self.table_name = 'analytics_devops_adoption_segment_selections'
end
class AdoptionSegment < ActiveRecord::Base
SNAPSHOT_CALCULATION_DELAY = 60.seconds
self.table_name = 'analytics_devops_adoption_segments'
has_many :selections, class_name: 'AdoptionSegmentSelection', foreign_key: :segment_id
scope :without_namespace_id, -> { where(namespace_id: nil) }
after_commit :schedule_data_calculation, on: :create
private
def schedule_data_calculation
::Analytics::DevopsAdoption::CreateSnapshotWorker.perform_in(SNAPSHOT_CALCULATION_DELAY + rand(10), id)
end
end
def perform
ActiveRecord::Base.transaction do
AdoptionSegment
.without_namespace_id
.includes(:selections)
.sort_by { |segment| segment.selections.size }
.each do |segment|
if segment.selections.size == 1
group_id = segment.selections.first.group_id
if segment_exists?(group_id)
segment.delete
else
segment.update(namespace_id: group_id)
end
else
segment.selections.each do |selection|
unless segment_exists?(selection.group_id)
AdoptionSegment.create(namespace_id: selection.group_id)
end
end
segment.delete
end
end
end
end
private
def segment_exists?(namespace_id)
AdoptionSegment.where(namespace_id: namespace_id).exists?
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module BackgroundMigration
module SyncBlockingIssuesCount
extend ::Gitlab::Utils::Override
override :perform
def perform(start_id, end_id)
ActiveRecord::Base.connection.execute <<~SQL
UPDATE issues
SET blocking_issues_count = grouped_counts.count
FROM
(
SELECT blocking_issue_id, SUM(blocked_count) AS count
FROM (
SELECT COUNT(*) AS blocked_count, issue_links.source_id AS blocking_issue_id
FROM issue_links
INNER JOIN issues ON issue_links.source_id = issues.id
WHERE issue_links.link_type = 1
AND issues.state_id = 1
AND issues.blocking_issues_count = 0
AND issue_links.source_id BETWEEN #{start_id} AND #{end_id}
GROUP BY blocking_issue_id HAVING COUNT(*) > 0
UNION ALL
SELECT COUNT(*) AS blocked_count, issue_links.target_id AS blocking_issue_id
FROM issue_links
INNER JOIN issues ON issue_links.target_id = issues.id
WHERE issue_links.link_type = 2
AND issues.state_id = 1
AND issues.blocking_issues_count = 0
AND issue_links.target_id BETWEEN #{start_id} AND #{end_id}
GROUP BY blocking_issue_id HAVING COUNT(*) > 0
) blocking_counts
GROUP BY blocking_issue_id
) AS grouped_counts
WHERE issues.blocking_issues_count = 0
AND issues.state_id = 1
AND issues.id = grouped_counts.blocking_issue_id
AND grouped_counts.count > 0
SQL
end
end
end
end
end
...@@ -5,6 +5,8 @@ import lastWeekData from 'test_fixtures/api/dora/metrics/daily_deployment_freque ...@@ -5,6 +5,8 @@ import lastWeekData from 'test_fixtures/api/dora/metrics/daily_deployment_freque
import lastMonthData from 'test_fixtures/api/dora/metrics/daily_deployment_frequency_for_last_month.json'; import lastMonthData from 'test_fixtures/api/dora/metrics/daily_deployment_frequency_for_last_month.json';
import last90DaysData from 'test_fixtures/api/dora/metrics/daily_deployment_frequency_for_last_90_days.json'; import last90DaysData from 'test_fixtures/api/dora/metrics/daily_deployment_frequency_for_last_90_days.json';
import { useFixturesFakeDate } from 'helpers/fake_date'; import { useFixturesFakeDate } from 'helpers/fake_date';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import ValueStreamMetrics from '~/cycle_analytics/components/value_stream_metrics.vue';
import createFlash from '~/flash'; import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import httpStatus from '~/lib/utils/http_status'; import httpStatus from '~/lib/utils/http_status';
...@@ -12,6 +14,14 @@ import CiCdAnalyticsCharts from '~/vue_shared/components/ci_cd_analytics/ci_cd_a ...@@ -12,6 +14,14 @@ import CiCdAnalyticsCharts from '~/vue_shared/components/ci_cd_analytics/ci_cd_a
jest.mock('~/flash'); jest.mock('~/flash');
const makeMockCiCdAnalyticsCharts = ({ selectedChart = 0 } = {}) => ({
render() {
return this.$scopedSlots.metrics({
selectedChart,
});
},
});
describe('deployment_frequency_charts.vue', () => { describe('deployment_frequency_charts.vue', () => {
useFixturesFakeDate(); useFixturesFakeDate();
...@@ -36,7 +46,7 @@ describe('deployment_frequency_charts.vue', () => { ...@@ -36,7 +46,7 @@ describe('deployment_frequency_charts.vue', () => {
}; };
const createComponent = (mountOptions = defaultMountOptions) => { const createComponent = (mountOptions = defaultMountOptions) => {
wrapper = shallowMount(DeploymentFrequencyCharts, mountOptions); wrapper = extendedWrapper(shallowMount(DeploymentFrequencyCharts, mountOptions));
}; };
// Initializes the mock endpoint to return a specific set of deployment // Initializes the mock endpoint to return a specific set of deployment
...@@ -55,6 +65,8 @@ describe('deployment_frequency_charts.vue', () => { ...@@ -55,6 +65,8 @@ describe('deployment_frequency_charts.vue', () => {
.replyOnce(httpStatus.OK, data); .replyOnce(httpStatus.OK, data);
}; };
const findValueStreamMetrics = () => wrapper.findComponent(ValueStreamMetrics);
afterEach(() => { afterEach(() => {
wrapper.destroy(); wrapper.destroy();
wrapper = null; wrapper = null;
...@@ -99,6 +111,31 @@ describe('deployment_frequency_charts.vue', () => { ...@@ -99,6 +111,31 @@ describe('deployment_frequency_charts.vue', () => {
it('renders a header', () => { it('renders a header', () => {
expect(wrapper.findComponent(DoraChartHeader).exists()).toBe(true); expect(wrapper.findComponent(DoraChartHeader).exists()).toBe(true);
}); });
describe('value stream metrics', () => {
beforeEach(() => {
createComponent({
...defaultMountOptions,
stubs: {
CiCdAnalyticsCharts: makeMockCiCdAnalyticsCharts({
selectedChart: 1,
}),
},
});
});
it('renders the value stream metrics component', () => {
const metricsComponent = findValueStreamMetrics();
expect(metricsComponent.exists()).toBe(true);
});
it('passes the selectedChart correctly and computes the requestParams', () => {
const metricsComponent = findValueStreamMetrics();
expect(metricsComponent.props('requestParams')).toMatchObject({
created_after: '2015-06-04',
});
});
});
}); });
describe('when there are network errors', () => { describe('when there are network errors', () => {
......
# frozen_string_literal: true
require 'spec_helper'
# rubocop:disable RSpec/FactoriesInMigrationSpecs
RSpec.describe Gitlab::BackgroundMigration::BackfillVersionDataFromGitaly do
let(:issue) { create(:issue) }
def perform_worker
described_class.new.perform(issue.id)
end
def create_version(attrs)
# Use the `:design` factory to create a version that has a
# correponding Git commit.
attrs[:issue] ||= issue
design = create(:design, :with_file, attrs)
design.versions.first
end
def create_version_with_missing_data(attrs = {})
version = create_version(attrs)
version.update_columns(author_id: nil)
version
end
it 'correctly sets version author_id and created_at properties from the Git commit' do
version = create_version_with_missing_data
commit = issue.project.design_repository.commit(version.sha)
expect(version).to have_attributes(
author_id: nil
)
expect(commit.author.id).to be_present
expect { perform_worker }.to(
change do
version.reload
version.author_id
end
.from(nil)
.to(commit.author.id)
)
end
it 'avoids N+1 issues and fetches all User records in one call' do
author_1, author_2, author_3 = create_list(:user, 3)
create_version_with_missing_data(author: author_1)
create_version_with_missing_data(author: author_2)
create_version_with_missing_data(author: author_3)
expect(User).to receive(:by_any_email).with(
array_including(author_1.email, author_2.email, author_3.email),
confirmed: true
).once.and_call_original
perform_worker
end
it 'leaves versions in a valid state' do
version = create_version_with_missing_data
expect(version).to be_valid
expect { perform_worker }.not_to change { version.reload.valid? }
end
it 'skips versions that are in projects that are pending deletion' do
version = create_version_with_missing_data
version.issue.project.update!(pending_delete: true)
expect { perform_worker }.not_to(
change do
version.reload
version.author_id
end
)
end
end
# rubocop:enable RSpec/FactoriesInMigrationSpecs
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MigrateDevopsSegmentsToGroups, schema: 20210301200959 do
let(:segments_table) { table(:analytics_devops_adoption_segments) }
let(:selections_table) { table(:analytics_devops_adoption_segment_selections) }
let(:namespaces_table) { table(:namespaces) }
let(:namespace) { namespaces_table.create!(name: 'gitlab', path: 'gitlab-org') }
let(:namespace2) { namespaces_table.create!(name: 'gitlab-test', path: 'gitlab-test') }
let!(:single_group_segment) do
segments_table.create!.tap do |segment|
selections_table.create!(group_id: namespace.id, segment_id: segment.id)
end
end
let!(:multiple_groups_segment) do
segments_table.create!.tap do |segment|
selections_table.create!(group_id: namespace.id, segment_id: segment.id)
selections_table.create!(group_id: namespace2.id, segment_id: segment.id)
end
end
let!(:empty_segment) do
segments_table.create!
end
describe '#perform' do
it 'removes empty segments' do
expect { subject.perform }.to change { segments_table.where(id: empty_segment.id).exists? }.to(false)
end
it 'sets namespace id for segments with single group' do
expect do
subject.perform
single_group_segment.reload
end.to change { single_group_segment.namespace_id }.from(nil).to(namespace.id)
end
it 'creates segment with namespace_id for each unique group across all selections' do
expect do
subject.perform
end.to change { segments_table.where(namespace_id: [namespace.id, namespace2.id]).count }.from(0).to(2)
end
it 'schedules data calculation for fresh segments' do
expect(Analytics::DevopsAdoption::CreateSnapshotWorker).to receive(:perform_in).with(kind_of(Integer), kind_of(Integer))
subject.perform
end
it 'removes old multi-group segment' do
expect { subject.perform }.to change { segments_table.where(id: multiple_groups_segment.id).exists? }.to(false)
end
context 'with duplicated segments' do
let!(:single_group_segment_duplicate) do
segments_table.create!.tap do |segment|
selections_table.create!(group_id: namespace.id, segment_id: segment.id)
end
end
it 'removes duplicated segments' do
expect { subject.perform }.to change { segments_table.where(id: single_group_segment_duplicate).exists? }.to(false)
end
end
end
end
...@@ -7,18 +7,20 @@ RSpec.describe Security::StoreFindingsMetadataService do ...@@ -7,18 +7,20 @@ RSpec.describe Security::StoreFindingsMetadataService do
let_it_be(:project) { security_scan.project } let_it_be(:project) { security_scan.project }
let_it_be(:security_finding_1) { build(:ci_reports_security_finding) } let_it_be(:security_finding_1) { build(:ci_reports_security_finding) }
let_it_be(:security_finding_2) { build(:ci_reports_security_finding) } let_it_be(:security_finding_2) { build(:ci_reports_security_finding) }
let_it_be(:security_finding_3) { build(:ci_reports_security_finding, uuid: nil) } let_it_be(:security_finding_3) { build(:ci_reports_security_finding) }
let_it_be(:security_finding_4) { build(:ci_reports_security_finding, uuid: nil) }
let_it_be(:deduplicated_finding_uuids) { [security_finding_1.uuid, security_finding_3.uuid] }
let_it_be(:security_scanner) { build(:ci_reports_security_scanner) } let_it_be(:security_scanner) { build(:ci_reports_security_scanner) }
let_it_be(:report) do let_it_be(:report) do
build( build(
:ci_reports_security_report, :ci_reports_security_report,
findings: [security_finding_1, security_finding_2, security_finding_3], findings: [security_finding_1, security_finding_2, security_finding_3, security_finding_4],
scanners: [security_scanner] scanners: [security_scanner]
) )
end end
describe '#execute' do describe '#execute' do
let(:service_object) { described_class.new(security_scan, report) } let(:service_object) { described_class.new(security_scan, report, deduplicated_finding_uuids) }
subject(:store_findings) { service_object.execute } subject(:store_findings) { service_object.execute }
...@@ -27,6 +29,10 @@ RSpec.describe Security::StoreFindingsMetadataService do ...@@ -27,6 +29,10 @@ RSpec.describe Security::StoreFindingsMetadataService do
create(:security_finding, scan: security_scan) create(:security_finding, scan: security_scan)
end end
it 'returns error message' do
expect(store_findings).to eq({ status: :error, message: "Findings are already stored!" })
end
it 'does not create new findings in database' do it 'does not create new findings in database' do
expect { store_findings }.not_to change { Security::Finding.count } expect { store_findings }.not_to change { Security::Finding.count }
end end
...@@ -38,11 +44,14 @@ RSpec.describe Security::StoreFindingsMetadataService do ...@@ -38,11 +44,14 @@ RSpec.describe Security::StoreFindingsMetadataService do
end end
it 'creates the security finding entries in database' do it 'creates the security finding entries in database' do
expect { store_findings }.to change { security_scan.findings.count }.by(2) store_findings
.and change { security_scan.findings.first&.severity }.to(security_finding_1.severity.to_s)
.and change { security_scan.findings.first&.confidence }.to(security_finding_1.confidence.to_s) expect(security_scan.findings.reload.as_json(only: [:uuid, :deduplicated]))
.and change { security_scan.findings.first&.uuid }.to(security_finding_1.uuid) .to match_array([
.and change { security_scan.findings.last&.uuid }.to(security_finding_2.uuid) { "uuid" => security_finding_1.uuid, "deduplicated" => true },
{ "uuid" => security_finding_2.uuid, "deduplicated" => false },
{ "uuid" => security_finding_3.uuid, "deduplicated" => true }
])
end end
context 'when the scanners already exist in the database' do context 'when the scanners already exist in the database' do
......
...@@ -56,7 +56,7 @@ RSpec.describe Security::StoreScanService do ...@@ -56,7 +56,7 @@ RSpec.describe Security::StoreScanService do
subject(:store_scan) { service_object.execute } subject(:store_scan) { service_object.execute }
before do before do
allow(Security::StoreFindingsMetadataService).to receive(:execute) allow(Security::StoreFindingsMetadataService).to receive(:execute).and_return(status: :success)
known_keys.add(finding_key) known_keys.add(finding_key)
end end
...@@ -170,12 +170,24 @@ RSpec.describe Security::StoreScanService do ...@@ -170,12 +170,24 @@ RSpec.describe Security::StoreScanService do
context 'when the `deduplicate` param is set as true' do context 'when the `deduplicate` param is set as true' do
let(:deduplicate) { true } let(:deduplicate) { true }
it 'does not change the deduplicated flag of duplicated finding false' do context 'when the `StoreFindingsMetadataService` returns success' do
expect { store_scan }.not_to change { duplicated_security_finding.reload.deduplicated }.from(false) it 'does not run the re-deduplicate logic' do
expect { store_scan }.not_to change { unique_security_finding.reload.deduplicated }.from(false)
end
end end
it 'sets the deduplicated flag of unique finding as true' do context 'when the `StoreFindingsMetadataService` returns error' do
expect { store_scan }.to change { unique_security_finding.reload.deduplicated }.to(true) before do
allow(Security::StoreFindingsMetadataService).to receive(:execute).and_return({ status: :error })
end
it 'does not change the deduplicated flag of duplicated finding from false' do
expect { store_scan }.not_to change { duplicated_security_finding.reload.deduplicated }.from(false)
end
it 'sets the deduplicated flag of unique finding as true' do
expect { store_scan }.to change { unique_security_finding.reload.deduplicated }.to(true)
end
end end
end end
end end
......
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This migration is not needed anymore and was disabled, because we're now
# also backfilling design positions immediately before moving a design.
#
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39555
class BackfillDesignsRelativePosition
def perform(issue_ids)
# no-op
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Class that will fill the project_repositories table for projects that
# are on legacy storage and an entry is is missing in this table.
class BackfillLegacyProjectRepositories < BackfillProjectRepositories
private
def projects
Project.with_parent.on_legacy_storage
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Update existent project update_at column after their repository storage was moved
class BackfillProjectUpdatedAtAfterRepositoryStorageMove
def perform(*project_ids)
updated_repository_storages = Projects::RepositoryStorageMove.select("project_id, MAX(updated_at) as updated_at").where(project_id: project_ids).group(:project_id)
Project.connection.execute <<-SQL
WITH repository_storage_cte as #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
#{updated_repository_storages.to_sql}
)
UPDATE projects
SET updated_at = (repository_storage_cte.updated_at + interval '1 second')
FROM repository_storage_cte
WHERE projects.id = repository_storage_cte.project_id AND projects.updated_at <= repository_storage_cte.updated_at
SQL
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class BackfillVersionDataFromGitaly
def perform(issue_id)
end
end
end
end
Gitlab::BackgroundMigration::BackfillVersionDataFromGitaly.prepend_mod_with('Gitlab::BackgroundMigration::BackfillVersionDataFromGitaly')
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class CalculateWikiSizes
def perform(start_id, stop_id)
::ProjectStatistics.where(wiki_size: nil)
.where(id: start_id..stop_id)
.includes(project: [:route, :group, namespace: [:owner]]).find_each do |statistics|
statistics.refresh!(only: [:wiki_size])
rescue StandardError => e
Gitlab::AppLogger.error "Failed to update wiki statistics. id: #{statistics.id} message: #{e.message}"
end
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class CleanupOptimisticLockingNulls
QUERY_ITEM_SIZE = 1_000
# table - The name of the table the migration is performed for.
# start_id - The ID of the object to start at
# stop_id - The ID of the object to end at
def perform(start_id, stop_id, table)
model = define_model_for(table)
# After analysis done, a batch size of 1,000 items per query was found to be
# the most optimal. Discussion in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18418#note_282285336
(start_id..stop_id).each_slice(QUERY_ITEM_SIZE).each do |range|
model
.where(lock_version: nil)
.where("ID BETWEEN ? AND ?", range.first, range.last)
.update_all(lock_version: 0)
end
end
def define_model_for(table)
Class.new(ActiveRecord::Base) do
self.table_name = table
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# save validity time pages domain
class FillValidTimeForPagesDomainCertificate
# define PagesDomain with only needed code
class PagesDomain < ActiveRecord::Base
self.table_name = 'pages_domains'
def x509
return unless certificate.present?
@x509 ||= OpenSSL::X509::Certificate.new(certificate)
rescue OpenSSL::X509::CertificateError
nil
end
end
def perform(start_id, stop_id)
PagesDomain.where(id: start_id..stop_id).find_each do |domain|
# for some reason activerecord doesn't append timezone, iso8601 forces this
domain.update_columns(
certificate_valid_not_before: domain.x509&.not_before&.iso8601,
certificate_valid_not_after: domain.x509&.not_after&.iso8601
)
rescue StandardError => e
Gitlab::AppLogger.error "Failed to update pages domain certificate valid time. id: #{domain.id}, message: #{e.message}"
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# corrects stored pages access level on db depending on project visibility
class FixPagesAccessLevel
# Copy routable here to avoid relying on application logic
module Routable
def build_full_path
if parent && path
parent.build_full_path + '/' + path
else
path
end
end
end
# Namespace
class Namespace < ActiveRecord::Base
self.table_name = 'namespaces'
self.inheritance_column = :_type_disabled
include Routable
belongs_to :parent, class_name: "Namespace"
end
# Project
class Project < ActiveRecord::Base
self.table_name = 'projects'
self.inheritance_column = :_type_disabled
include Routable
belongs_to :namespace
alias_method :parent, :namespace
alias_attribute :parent_id, :namespace_id
PRIVATE = 0
INTERNAL = 10
PUBLIC = 20
def pages_deployed?
Dir.exist?(public_pages_path)
end
def public_pages_path
File.join(pages_path, 'public')
end
def pages_path
# TODO: when we migrate Pages to work with new storage types, change here to use disk_path
File.join(Settings.pages.path, build_full_path)
end
end
# ProjectFeature
class ProjectFeature < ActiveRecord::Base
include ::EachBatch
self.table_name = 'project_features'
belongs_to :project
PRIVATE = 10
ENABLED = 20
PUBLIC = 30
end
def perform(start_id, stop_id)
fix_public_access_level(start_id, stop_id)
make_internal_projects_public(start_id, stop_id)
fix_private_access_level(start_id, stop_id)
end
private
def access_control_is_enabled
@access_control_is_enabled = Gitlab.config.pages.access_control
end
# Public projects are allowed to have only enabled pages_access_level
# which is equivalent to public
def fix_public_access_level(start_id, stop_id)
project_features(start_id, stop_id, ProjectFeature::PUBLIC, Project::PUBLIC).each_batch do |features|
features.update_all(pages_access_level: ProjectFeature::ENABLED)
end
end
# If access control is disabled and project has pages deployed
# project will become unavailable when access control will become enabled
# we make these projects public to avoid negative surprise to user
def make_internal_projects_public(start_id, stop_id)
return if access_control_is_enabled
project_features(start_id, stop_id, ProjectFeature::ENABLED, Project::INTERNAL).find_each do |project_feature|
next unless project_feature.project.pages_deployed?
project_feature.update(pages_access_level: ProjectFeature::PUBLIC)
end
end
# Private projects are not allowed to have enabled access level, only `private` and `public`
# If access control is enabled, these projects currently behave as if they have `private` pages_access_level
# if access control is disabled, these projects currently behave as if they have `public` pages_access_level
# so we preserve this behaviour for projects with pages already deployed
# for project without pages we always set `private` access_level
def fix_private_access_level(start_id, stop_id)
project_features(start_id, stop_id, ProjectFeature::ENABLED, Project::PRIVATE).find_each do |project_feature|
if access_control_is_enabled
project_feature.update!(pages_access_level: ProjectFeature::PRIVATE)
else
fixed_access_level = project_feature.project.pages_deployed? ? ProjectFeature::PUBLIC : ProjectFeature::PRIVATE
project_feature.update!(pages_access_level: fixed_access_level)
end
end
end
def project_features(start_id, stop_id, pages_access_level, project_visibility_level)
ProjectFeature.where(id: start_id..stop_id).joins(:project)
.where(pages_access_level: pages_access_level)
.where(projects: { visibility_level: project_visibility_level })
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class GenerateGitlabSubscriptions
def perform(start_id, stop_id)
end
end
end
end
Gitlab::BackgroundMigration::GenerateGitlabSubscriptions.prepend_mod_with('Gitlab::BackgroundMigration::GenerateGitlabSubscriptions')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# EE-specific migration
class MigrateDevopsSegmentsToGroups
def perform
# no-op for CE
end
end
end
end
Gitlab::BackgroundMigration::MigrateDevopsSegmentsToGroups.prepend_mod_with('Gitlab::BackgroundMigration::MigrateDevopsSegmentsToGroups')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class populates the `finding_uuid` attribute for
# the existing `vulnerability_feedback` records.
class PopulateFindingUuidForVulnerabilityFeedback
REPORT_TYPES = {
sast: 0,
dependency_scanning: 1,
container_scanning: 2,
dast: 3,
secret_detection: 4,
coverage_fuzzing: 5,
api_fuzzing: 6
}.freeze
class VulnerabilityFeedback < ActiveRecord::Base # rubocop:disable Style/Documentation
include EachBatch
self.table_name = 'vulnerability_feedback'
enum category: REPORT_TYPES
scope :in_range, -> (start, stop) { where(id: start..stop) }
scope :without_uuid, -> { where(finding_uuid: nil) }
def self.load_vulnerability_findings
all.to_a.tap { |collection| collection.each(&:vulnerability_finding) }
end
def set_finding_uuid
return unless vulnerability_finding.present? && vulnerability_finding.primary_identifier.present?
update_column(:finding_uuid, calculated_uuid)
rescue StandardError => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
end
def vulnerability_finding
BatchLoader.for(finding_key).batch do |finding_keys, loader|
project_ids = finding_keys.map { |key| key[:project_id] }
categories = finding_keys.map { |key| key[:category] }
fingerprints = finding_keys.map { |key| key[:project_fingerprint] }
findings = Finding.with_primary_identifier.where(
project_id: project_ids.uniq,
report_type: categories.uniq,
project_fingerprint: fingerprints.uniq
).to_a
finding_keys.each do |finding_key|
loader.call(
finding_key,
findings.find { |f| finding_key == f.finding_key }
)
end
end
end
private
def calculated_uuid
::Security::VulnerabilityUUID.generate(
report_type: category,
primary_identifier_fingerprint: vulnerability_finding.primary_identifier.fingerprint,
location_fingerprint: vulnerability_finding.location_fingerprint,
project_id: project_id
)
end
def finding_key
{
project_id: project_id,
category: category,
project_fingerprint: project_fingerprint
}
end
end
class Finding < ActiveRecord::Base # rubocop:disable Style/Documentation
include ShaAttribute
self.table_name = 'vulnerability_occurrences'
sha_attribute :project_fingerprint
sha_attribute :location_fingerprint
belongs_to :primary_identifier, class_name: 'Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityFeedback::Identifier'
enum report_type: REPORT_TYPES
scope :with_primary_identifier, -> { includes(:primary_identifier) }
def finding_key
{
project_id: project_id,
category: report_type,
project_fingerprint: project_fingerprint
}
end
end
class Identifier < ActiveRecord::Base # rubocop:disable Style/Documentation
self.table_name = 'vulnerability_identifiers'
end
def perform(*range)
feedback = VulnerabilityFeedback.without_uuid.in_range(*range).load_vulnerability_findings
feedback.each(&:set_finding_uuid)
log_info(feedback.count)
end
def log_info(feedback_count)
::Gitlab::BackgroundMigration::Logger.info(
migrator: self.class.name,
message: '`finding_uuid` attributes has been set',
count: feedback_count
)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Class to migrate service_desk_reply_to email addresses to issue_email_participants
class PopulateIssueEmailParticipants
# rubocop:disable Style/Documentation
class TmpIssue < ActiveRecord::Base
self.table_name = 'issues'
end
def perform(start_id, stop_id)
issues = TmpIssue.select(:id, :service_desk_reply_to, :created_at).where(id: (start_id..stop_id)).where.not(service_desk_reply_to: nil)
rows = issues.map do |issue|
{
issue_id: issue.id,
email: issue.service_desk_reply_to,
created_at: issue.created_at,
updated_at: issue.created_at
}
end
ApplicationRecord.legacy_bulk_insert(:issue_email_participants, rows, on_conflict: :do_nothing) # rubocop:disable Gitlab/BulkInsert
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop:disable Style/Documentation
class RecalculateProjectAuthorizations
def perform(user_ids)
# no-op
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class SyncBlockingIssuesCount
def perform(start_id, end_id)
end
end
end
end
Gitlab::BackgroundMigration::SyncBlockingIssuesCount.prepend_mod_with('Gitlab::BackgroundMigration::SyncBlockingIssuesCount')
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class SyncIssuesStateId
def perform(start_id, end_id)
ActiveRecord::Base.connection.execute <<~SQL
UPDATE issues
SET state_id =
CASE state
WHEN 'opened' THEN 1
WHEN 'closed' THEN 2
END
WHERE state_id IS NULL
AND id BETWEEN #{start_id} AND #{end_id}
SQL
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class SyncMergeRequestsStateId
def perform(start_id, end_id)
ActiveRecord::Base.connection.execute <<~SQL
UPDATE merge_requests
SET state_id =
CASE state
WHEN 'opened' THEN 1
WHEN 'closed' THEN 2
WHEN 'merged' THEN 3
WHEN 'locked' THEN 4
END
WHERE state_id IS NULL
AND id BETWEEN #{start_id} AND #{end_id}
SQL
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class WrongfullyConfirmedEmailUnconfirmer
class UserModel < ActiveRecord::Base
alias_method :reset, :reload
self.table_name = 'users'
scope :active, -> { where(state: 'active', user_type: nil) } # only humans, skip bots
devise :confirmable
end
class EmailModel < ActiveRecord::Base
alias_method :reset, :reload
self.table_name = 'emails'
belongs_to :user
devise :confirmable
def self.wrongfully_confirmed_emails(start_id, stop_id)
joins(:user)
.merge(UserModel.active)
.where(id: (start_id..stop_id))
.where.not('emails.confirmed_at' => nil)
.where('emails.confirmed_at = users.confirmed_at')
.where('emails.email <> users.email')
.where('NOT EXISTS (SELECT 1 FROM user_synced_attributes_metadata WHERE user_id=users.id AND email_synced IS true)')
end
end
def perform(start_id, stop_id)
email_records = EmailModel
.wrongfully_confirmed_emails(start_id, stop_id)
.to_a
user_ids = email_records.map(&:user_id).uniq
ActiveRecord::Base.transaction do
update_email_records(start_id, stop_id)
update_user_records(user_ids)
end
# Refind the records with the "real" Email model so devise will notice that the user / email is unconfirmed
unconfirmed_email_records = ::Email.where(id: email_records.map(&:id))
ActiveRecord::Associations::Preloader.new.preload(unconfirmed_email_records, [:user])
send_emails(unconfirmed_email_records)
end
private
def update_email_records(start_id, stop_id)
EmailModel.connection.execute <<-SQL
WITH md5_strings as #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
#{email_query_for_update(start_id, stop_id).to_sql}
)
UPDATE #{EmailModel.connection.quote_table_name(EmailModel.table_name)}
SET confirmed_at = NULL,
confirmation_token = md5_strings.md5_string,
confirmation_sent_at = NOW()
FROM md5_strings
WHERE id = md5_strings.email_id
SQL
end
def update_user_records(user_ids)
UserModel
.where(id: user_ids)
.update_all("confirmed_at = NULL, confirmation_sent_at = NOW(), unconfirmed_email = NULL, confirmation_token=md5(users.id::varchar || users.created_at || users.encrypted_password || '#{Integer(Time.now.to_i)}')")
end
def email_query_for_update(start_id, stop_id)
EmailModel
.wrongfully_confirmed_emails(start_id, stop_id)
.select('emails.id as email_id', "md5(emails.id::varchar || emails.created_at || users.encrypted_password || '#{Integer(Time.now.to_i)}') as md5_string")
end
def send_emails(email_records)
user_records = email_records.map(&:user).uniq
user_records.each do |user|
Gitlab::BackgroundMigration::Mailers::UnconfirmMailer.unconfirm_notification_email(user).deliver_later
DeviseMailer.confirmation_instructions(user, user.confirmation_token).deliver_later(wait: 1.minute)
end
email_records.each do |email|
DeviseMailer.confirmation_instructions(email, email.confirmation_token).deliver_later(wait: 1.minute)
end
end
end
end
end
# This template is provided and maintained by Qualys Inc., an official Technology Partner with GitLab.
# See https://about.gitlab.com/partners/technology-partners/#security for more information.
#
# This template shows how to use Qualys IaC Scan with a GitLab CI/CD pipeline.
# Qualys and GitLab users can use this to scan their IaC templates for misconfigurations.
# Documentation about this integration: https://www.qualys.com/documentation/qualys-iac-gitlab-integration.pdf
#
# This template should not need editing to work in your project.
# It is not designed to be included in an existing CI/CD configuration with the "include:" keyword.
#
# The `qualys_iac_sast` job runs for branch (push) pipelines, including scheduled
# and manually run branch pipelines.
#
# The sast-report output complies with GitLab's format. This report displays Qualys IaC Scan's
# results in the Security tab in the pipeline view, if you have that feature enabled (GitLab Ultimate only).
# The Qualys IaC Scan output is available in the Jobs tab in the pipeline view.
#
# Requirements:
# Before you can use this template, add the following CI/CD variables to your
# project CI/CD settings:
#
# - QUALYS_URL: The Qualys guard URL.
# - QUALYS_USERNAME: The Qualys username.
# - QUALYS_PASSWORD: The Qualys password. Make this variable masked.
# - BREAK_ON_ERROR: (optional) If you don't want the pipeline to fail on an error,
# then add this variable and set it to "false". Otherwise set it
# to "true", or omit the variable.
stages:
- build
- test
- qualys_iac_scan
- deploy
qualys_iac_sast:
stage: qualys_iac_scan
image:
name: qualys/qiac_security_cli:latest
entrypoint: [""]
script:
- sh /home/qiac/gitlab.sh
artifacts:
name: "qualys-iac-sast-artifacts"
paths:
- qualys_iac_ci_result.json
reports:
sast: gl-sast-qualys-iac-ci-report.json
...@@ -151,6 +151,10 @@ ...@@ -151,6 +151,10 @@
category: ci_templates category: ci_templates
redis_slot: ci_templates redis_slot: ci_templates
aggregation: weekly aggregation: weekly
- name: p_ci_templates_qualys_iac_security
category: ci_templates
redis_slot: ci_templates
aggregation: weekly
- name: p_ci_templates_ios_fastlane - name: p_ci_templates_ios_fastlane
category: ci_templates category: ci_templates
redis_slot: ci_templates redis_slot: ci_templates
......
...@@ -5,6 +5,8 @@ import metricsData from 'test_fixtures/projects/analytics/value_stream_analytics ...@@ -5,6 +5,8 @@ import metricsData from 'test_fixtures/projects/analytics/value_stream_analytics
import waitForPromises from 'helpers/wait_for_promises'; import waitForPromises from 'helpers/wait_for_promises';
import { METRIC_TYPE_SUMMARY } from '~/api/analytics_api'; import { METRIC_TYPE_SUMMARY } from '~/api/analytics_api';
import ValueStreamMetrics from '~/cycle_analytics/components/value_stream_metrics.vue'; import ValueStreamMetrics from '~/cycle_analytics/components/value_stream_metrics.vue';
import { METRICS_POPOVER_CONTENT } from '~/cycle_analytics/constants';
import { prepareTimeMetricsData } from '~/cycle_analytics/utils';
import MetricTile from '~/cycle_analytics/components/metric_tile.vue'; import MetricTile from '~/cycle_analytics/components/metric_tile.vue';
import createFlash from '~/flash'; import createFlash from '~/flash';
import { group } from './mock_data'; import { group } from './mock_data';
...@@ -14,6 +16,7 @@ jest.mock('~/flash'); ...@@ -14,6 +16,7 @@ jest.mock('~/flash');
describe('ValueStreamMetrics', () => { describe('ValueStreamMetrics', () => {
let wrapper; let wrapper;
let mockGetValueStreamSummaryMetrics; let mockGetValueStreamSummaryMetrics;
let mockFilterFn;
const { full_path: requestPath } = group; const { full_path: requestPath } = group;
const fakeReqName = 'Mock metrics'; const fakeReqName = 'Mock metrics';
...@@ -23,12 +26,13 @@ describe('ValueStreamMetrics', () => { ...@@ -23,12 +26,13 @@ describe('ValueStreamMetrics', () => {
name: fakeReqName, name: fakeReqName,
}); });
const createComponent = ({ requestParams = {} } = {}) => { const createComponent = (props = {}) => {
return shallowMount(ValueStreamMetrics, { return shallowMount(ValueStreamMetrics, {
propsData: { propsData: {
requestPath, requestPath,
requestParams, requestParams: {},
requests: [metricsRequestFactory()], requests: [metricsRequestFactory()],
...props,
}, },
}); });
}; };
...@@ -104,6 +108,35 @@ describe('ValueStreamMetrics', () => { ...@@ -104,6 +108,35 @@ describe('ValueStreamMetrics', () => {
expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false); expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false);
}); });
describe('filterFn', () => {
const transferedMetricsData = prepareTimeMetricsData(metricsData, METRICS_POPOVER_CONTENT);
it('with a filter function, will call the function with the metrics data', async () => {
const filteredData = [
{ identifier: 'issues', value: '3', title: 'New Issues', description: 'foo' },
];
mockFilterFn = jest.fn(() => filteredData);
wrapper = createComponent({
filterFn: mockFilterFn,
});
await waitForPromises();
expect(mockFilterFn).toHaveBeenCalledWith(transferedMetricsData);
expect(wrapper.vm.metrics).toEqual(filteredData);
});
it('without a filter function, it will only update the metrics', async () => {
wrapper = createComponent();
await waitForPromises();
expect(mockFilterFn).not.toHaveBeenCalled();
expect(wrapper.vm.metrics).toEqual(transferedMetricsData);
});
});
describe('with additional params', () => { describe('with additional params', () => {
beforeEach(async () => { beforeEach(async () => {
wrapper = createComponent({ wrapper = createComponent({
......
import { GlSegmentedControl } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue'; import { nextTick } from 'vue';
import { GlSegmentedControl } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import CiCdAnalyticsAreaChart from '~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_area_chart.vue'; import CiCdAnalyticsAreaChart from '~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_area_chart.vue';
import CiCdAnalyticsCharts from '~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue'; import CiCdAnalyticsCharts from '~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue';
import { transformedAreaChartData, chartOptions } from '../mock_data'; import { transformedAreaChartData, chartOptions } from '../mock_data';
...@@ -29,12 +29,15 @@ const DEFAULT_PROPS = { ...@@ -29,12 +29,15 @@ const DEFAULT_PROPS = {
describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', () => { describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', () => {
let wrapper; let wrapper;
const createWrapper = (props = {}) => const createWrapper = (props = {}, slots = {}) =>
shallowMount(CiCdAnalyticsCharts, { shallowMountExtended(CiCdAnalyticsCharts, {
propsData: { propsData: {
...DEFAULT_PROPS, ...DEFAULT_PROPS,
...props, ...props,
}, },
scopedSlots: {
...slots,
},
}); });
afterEach(() => { afterEach(() => {
...@@ -44,20 +47,20 @@ describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', ( ...@@ -44,20 +47,20 @@ describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', (
} }
}); });
describe('segmented control', () => { const findMetricsSlot = () => wrapper.findByTestId('metrics-slot');
let segmentedControl; const findSegmentedControl = () => wrapper.findComponent(GlSegmentedControl);
describe('segmented control', () => {
beforeEach(() => { beforeEach(() => {
wrapper = createWrapper(); wrapper = createWrapper();
segmentedControl = wrapper.find(GlSegmentedControl);
}); });
it('should default to the first chart', () => { it('should default to the first chart', () => {
expect(segmentedControl.props('checked')).toBe(0); expect(findSegmentedControl().props('checked')).toBe(0);
}); });
it('should use the title and index as values', () => { it('should use the title and index as values', () => {
const options = segmentedControl.props('options'); const options = findSegmentedControl().props('options');
expect(options).toHaveLength(3); expect(options).toHaveLength(3);
expect(options).toEqual([ expect(options).toEqual([
{ {
...@@ -76,7 +79,7 @@ describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', ( ...@@ -76,7 +79,7 @@ describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', (
}); });
it('should select a different chart on change', async () => { it('should select a different chart on change', async () => {
segmentedControl.vm.$emit('input', 1); findSegmentedControl().vm.$emit('input', 1);
const chart = wrapper.find(CiCdAnalyticsAreaChart); const chart = wrapper.find(CiCdAnalyticsAreaChart);
...@@ -91,4 +94,24 @@ describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', ( ...@@ -91,4 +94,24 @@ describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', (
wrapper = createWrapper({ charts: [] }); wrapper = createWrapper({ charts: [] });
expect(wrapper.find(CiCdAnalyticsAreaChart).exists()).toBe(false); expect(wrapper.find(CiCdAnalyticsAreaChart).exists()).toBe(false);
}); });
describe('slots', () => {
beforeEach(() => {
wrapper = createWrapper(
{},
{
metrics: '<div data-testid="metrics-slot">selected chart: {{props.selectedChart}}</div>',
},
);
});
it('renders a metrics slot', async () => {
const selectedChart = 1;
findSegmentedControl().vm.$emit('input', selectedChart);
await nextTick();
expect(findMetricsSlot().text()).toBe(`selected chart: ${selectedChart}`);
});
});
}); });
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillLegacyProjectRepositories do
it_behaves_like 'backfill migration for project repositories', :legacy
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillProjectUpdatedAtAfterRepositoryStorageMove, :migration, schema: 20210301200959 do
let(:projects) { table(:projects) }
let(:project_repository_storage_moves) { table(:project_repository_storage_moves) }
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
subject { described_class.new }
describe '#perform' do
it 'updates project updated_at column if they were moved to a different repository storage' do
freeze_time do
project_1 = projects.create!(id: 1, namespace_id: namespace.id, updated_at: 1.day.ago)
project_2 = projects.create!(id: 2, namespace_id: namespace.id, updated_at: Time.current)
original_project_3_updated_at = 2.minutes.from_now
project_3 = projects.create!(id: 3, namespace_id: namespace.id, updated_at: original_project_3_updated_at)
original_project_4_updated_at = 10.days.ago
project_4 = projects.create!(id: 4, namespace_id: namespace.id, updated_at: original_project_4_updated_at)
repository_storage_move_1 = project_repository_storage_moves.create!(project_id: project_1.id, updated_at: 2.hours.ago, source_storage_name: 'default', destination_storage_name: 'default')
repository_storage_move_2 = project_repository_storage_moves.create!(project_id: project_2.id, updated_at: Time.current, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(project_id: project_3.id, updated_at: Time.current, source_storage_name: 'default', destination_storage_name: 'default')
subject.perform([1, 2, 3, 4, non_existing_record_id])
expect(project_1.reload.updated_at).to eq(repository_storage_move_1.updated_at + 1.second)
expect(project_2.reload.updated_at).to eq(repository_storage_move_2.updated_at + 1.second)
expect(project_3.reload.updated_at).to eq(original_project_3_updated_at)
expect(project_4.reload.updated_at).to eq(original_project_4_updated_at)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityFeedback, schema: 20210301200959 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
let(:scanners) { table(:vulnerability_scanners) }
let(:identifiers) { table(:vulnerability_identifiers) }
let(:findings) { table(:vulnerability_occurrences) }
let(:vulnerability_feedback) { table(:vulnerability_feedback) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
let(:user) { users.create!(username: 'john.doe', projects_limit: 5) }
let(:scanner) { scanners.create!(project_id: project.id, external_id: 'foo', name: 'bar') }
let(:identifier) { identifiers.create!(project_id: project.id, fingerprint: 'foo', external_type: 'bar', external_id: 'zoo', name: 'baz') }
let(:sast_report) { 0 }
let(:dependency_scanning_report) { 1 }
let(:dast_report) { 3 }
let(:secret_detection_report) { 4 }
let(:project_fingerprint) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
let(:location_fingerprint_1) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
let(:location_fingerprint_2) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
let(:location_fingerprint_3) { Digest::SHA1.hexdigest(SecureRandom.uuid) }
let(:finding_1) { finding_creator.call(sast_report, location_fingerprint_1) }
let(:finding_2) { finding_creator.call(dast_report, location_fingerprint_2) }
let(:finding_3) { finding_creator.call(secret_detection_report, location_fingerprint_3) }
let(:expected_uuid_1) do
Security::VulnerabilityUUID.generate(
report_type: 'sast',
primary_identifier_fingerprint: identifier.fingerprint,
location_fingerprint: location_fingerprint_1,
project_id: project.id
)
end
let(:expected_uuid_2) do
Security::VulnerabilityUUID.generate(
report_type: 'dast',
primary_identifier_fingerprint: identifier.fingerprint,
location_fingerprint: location_fingerprint_2,
project_id: project.id
)
end
let(:expected_uuid_3) do
Security::VulnerabilityUUID.generate(
report_type: 'secret_detection',
primary_identifier_fingerprint: identifier.fingerprint,
location_fingerprint: location_fingerprint_3,
project_id: project.id
)
end
let(:finding_creator) do
-> (report_type, location_fingerprint) do
findings.create!(
project_id: project.id,
primary_identifier_id: identifier.id,
scanner_id: scanner.id,
report_type: report_type,
uuid: SecureRandom.uuid,
name: 'Foo',
location_fingerprint: Gitlab::Database::ShaAttribute.serialize(location_fingerprint),
project_fingerprint: Gitlab::Database::ShaAttribute.serialize(project_fingerprint),
metadata_version: '1',
severity: 0,
confidence: 5,
raw_metadata: '{}'
)
end
end
let(:feedback_creator) do
-> (category, project_fingerprint) do
vulnerability_feedback.create!(
project_id: project.id,
author_id: user.id,
feedback_type: 0,
category: category,
project_fingerprint: project_fingerprint
)
end
end
let!(:feedback_1) { feedback_creator.call(finding_1.report_type, project_fingerprint) }
let!(:feedback_2) { feedback_creator.call(finding_2.report_type, project_fingerprint) }
let!(:feedback_3) { feedback_creator.call(finding_3.report_type, project_fingerprint) }
let!(:feedback_4) { feedback_creator.call(finding_1.report_type, 'foo') }
let!(:feedback_5) { feedback_creator.call(dependency_scanning_report, project_fingerprint) }
subject(:populate_finding_uuids) { described_class.new.perform(feedback_1.id, feedback_5.id) }
before do
allow(Gitlab::BackgroundMigration::Logger).to receive(:info)
end
describe '#perform' do
it 'updates the `finding_uuid` attributes of the feedback records' do
expect { populate_finding_uuids }.to change { feedback_1.reload.finding_uuid }.from(nil).to(expected_uuid_1)
.and change { feedback_2.reload.finding_uuid }.from(nil).to(expected_uuid_2)
.and change { feedback_3.reload.finding_uuid }.from(nil).to(expected_uuid_3)
.and not_change { feedback_4.reload.finding_uuid }
.and not_change { feedback_5.reload.finding_uuid }
expect(Gitlab::BackgroundMigration::Logger).to have_received(:info).once
end
it 'preloads the finding and identifier records to prevent N+1 queries' do
# Load feedback records(1), load findings(2), load identifiers(3) and finally update feedback records one by one(6)
expect { populate_finding_uuids }.not_to exceed_query_limit(6)
end
context 'when setting the `finding_uuid` attribute of a feedback record fails' do
let(:expected_error) { RuntimeError.new }
before do
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
allow_next_found_instance_of(described_class::VulnerabilityFeedback) do |feedback|
allow(feedback).to receive(:update_column).and_raise(expected_error)
end
end
it 'captures the errors and does not crash entirely' do
expect { populate_finding_uuids }.not_to raise_error
expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception).with(expected_error).exactly(3).times
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateIssueEmailParticipants, schema: 20210301200959 do
let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
let!(:project) { table(:projects).create!(id: 1, namespace_id: namespace.id) }
let!(:issue1) { table(:issues).create!(id: 1, project_id: project.id, service_desk_reply_to: "a@gitlab.com") }
let!(:issue2) { table(:issues).create!(id: 2, project_id: project.id, service_desk_reply_to: "b@gitlab.com") }
let(:issue_email_participants) { table(:issue_email_participants) }
describe '#perform' do
it 'migrates email addresses from service desk issues', :aggregate_failures do
expect { subject.perform(1, 2) }.to change { issue_email_participants.count }.by(2)
expect(issue_email_participants.find_by(issue_id: 1).email).to eq("a@gitlab.com")
expect(issue_email_participants.find_by(issue_id: 2).email).to eq("b@gitlab.com")
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::WrongfullyConfirmedEmailUnconfirmer, schema: 20210301200959 do
let(:users) { table(:users) }
let(:emails) { table(:emails) }
let(:user_synced_attributes_metadata) { table(:user_synced_attributes_metadata) }
let(:confirmed_at_2_days_ago) { 2.days.ago }
let(:confirmed_at_3_days_ago) { 3.days.ago }
let(:one_year_ago) { 1.year.ago }
let!(:user_needs_migration_1) { users.create!(name: 'user1', email: 'test1@test.com', state: 'active', projects_limit: 1, confirmed_at: confirmed_at_2_days_ago, confirmation_sent_at: one_year_ago) }
let!(:user_needs_migration_2) { users.create!(name: 'user2', email: 'test2@test.com', unconfirmed_email: 'unconfirmed@test.com', state: 'active', projects_limit: 1, confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
let!(:user_does_not_need_migration) { users.create!(name: 'user3', email: 'test3@test.com', state: 'active', projects_limit: 1) }
let!(:inactive_user) { users.create!(name: 'user4', email: 'test4@test.com', state: 'blocked', projects_limit: 1, confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
let!(:alert_bot_user) { users.create!(name: 'user5', email: 'test5@test.com', state: 'active', user_type: 2, projects_limit: 1, confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
let!(:user_has_synced_email) { users.create!(name: 'user6', email: 'test6@test.com', state: 'active', projects_limit: 1, confirmed_at: confirmed_at_2_days_ago, confirmation_sent_at: one_year_ago) }
let!(:synced_attributes_metadata_for_user) { user_synced_attributes_metadata.create!(user_id: user_has_synced_email.id, email_synced: true) }
let!(:bad_email_1) { emails.create!(user_id: user_needs_migration_1.id, email: 'other1@test.com', confirmed_at: confirmed_at_2_days_ago, confirmation_sent_at: one_year_ago) }
let!(:bad_email_2) { emails.create!(user_id: user_needs_migration_2.id, email: 'other2@test.com', confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
let!(:bad_email_3_inactive_user) { emails.create!(user_id: inactive_user.id, email: 'other-inactive@test.com', confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
let!(:bad_email_4_bot_user) { emails.create!(user_id: alert_bot_user.id, email: 'other-bot@test.com', confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
let!(:good_email_1) { emails.create!(user_id: user_needs_migration_2.id, email: 'other3@test.com', confirmed_at: confirmed_at_2_days_ago, confirmation_sent_at: one_year_ago) }
let!(:good_email_2) { emails.create!(user_id: user_needs_migration_2.id, email: 'other4@test.com', confirmed_at: nil) }
let!(:good_email_3) { emails.create!(user_id: user_does_not_need_migration.id, email: 'other5@test.com', confirmed_at: confirmed_at_2_days_ago, confirmation_sent_at: one_year_ago) }
let!(:second_email_for_user_with_synced_email) { emails.create!(user_id: user_has_synced_email.id, email: 'other6@test.com', confirmed_at: confirmed_at_2_days_ago, confirmation_sent_at: one_year_ago) }
subject do
email_ids = [bad_email_1, bad_email_2, good_email_1, good_email_2, good_email_3, second_email_for_user_with_synced_email].map(&:id)
described_class.new.perform(email_ids.min, email_ids.max)
end
it 'does not change irrelevant email records' do
subject
expect(good_email_1.reload.confirmed_at).to be_within(1.second).of(confirmed_at_2_days_ago)
expect(good_email_2.reload.confirmed_at).to be_nil
expect(good_email_3.reload.confirmed_at).to be_within(1.second).of(confirmed_at_2_days_ago)
expect(bad_email_3_inactive_user.reload.confirmed_at).to be_within(1.second).of(confirmed_at_3_days_ago)
expect(bad_email_4_bot_user.reload.confirmed_at).to be_within(1.second).of(confirmed_at_3_days_ago)
expect(good_email_1.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
expect(good_email_2.reload.confirmation_sent_at).to be_nil
expect(good_email_3.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
expect(bad_email_3_inactive_user.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
expect(bad_email_4_bot_user.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
end
it 'clears the `unconfirmed_email` field' do
subject
user_needs_migration_2.reload
expect(user_needs_migration_2.unconfirmed_email).to be_nil
end
it 'does not change irrelevant user records' do
subject
expect(user_does_not_need_migration.reload.confirmed_at).to be_nil
expect(inactive_user.reload.confirmed_at).to be_within(1.second).of(confirmed_at_3_days_ago)
expect(alert_bot_user.reload.confirmed_at).to be_within(1.second).of(confirmed_at_3_days_ago)
expect(user_has_synced_email.reload.confirmed_at).to be_within(1.second).of(confirmed_at_2_days_ago)
expect(user_does_not_need_migration.reload.confirmation_sent_at).to be_nil
expect(inactive_user.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
expect(alert_bot_user.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
expect(user_has_synced_email.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
end
it 'updates confirmation_sent_at column' do
subject
expect(user_needs_migration_1.reload.confirmation_sent_at).to be_within(1.minute).of(Time.now)
expect(user_needs_migration_2.reload.confirmation_sent_at).to be_within(1.minute).of(Time.now)
expect(bad_email_1.reload.confirmation_sent_at).to be_within(1.minute).of(Time.now)
expect(bad_email_2.reload.confirmation_sent_at).to be_within(1.minute).of(Time.now)
end
it 'unconfirms bad email records' do
subject
expect(bad_email_1.reload.confirmed_at).to be_nil
expect(bad_email_2.reload.confirmed_at).to be_nil
expect(bad_email_1.reload.confirmation_token).not_to be_nil
expect(bad_email_2.reload.confirmation_token).not_to be_nil
end
it 'unconfirms user records' do
subject
expect(user_needs_migration_1.reload.confirmed_at).to be_nil
expect(user_needs_migration_2.reload.confirmed_at).to be_nil
expect(user_needs_migration_1.reload.confirmation_token).not_to be_nil
expect(user_needs_migration_2.reload.confirmation_token).not_to be_nil
end
context 'enqueued jobs' do
let(:user_1) { User.find(user_needs_migration_1.id) }
let(:user_2) { User.find(user_needs_migration_2.id) }
let(:email_1) { Email.find(bad_email_1.id) }
let(:email_2) { Email.find(bad_email_2.id) }
it 'enqueues the email confirmation and the unconfirm notification mailer jobs' do
allow(DeviseMailer).to receive(:confirmation_instructions).and_call_original
allow(Gitlab::BackgroundMigration::Mailers::UnconfirmMailer).to receive(:unconfirm_notification_email).and_call_original
subject
expect(DeviseMailer).to have_received(:confirmation_instructions).with(email_1, email_1.confirmation_token)
expect(DeviseMailer).to have_received(:confirmation_instructions).with(email_2, email_2.confirmation_token)
expect(Gitlab::BackgroundMigration::Mailers::UnconfirmMailer).to have_received(:unconfirm_notification_email).with(user_1)
expect(DeviseMailer).to have_received(:confirmation_instructions).with(user_1, user_1.confirmation_token)
expect(Gitlab::BackgroundMigration::Mailers::UnconfirmMailer).to have_received(:unconfirm_notification_email).with(user_2)
expect(DeviseMailer).to have_received(:confirmation_instructions).with(user_2, user_2.confirmation_token)
end
end
end
...@@ -73,12 +73,10 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar ...@@ -73,12 +73,10 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
let(:merge_request) { create(:merge_request, merge_status: :unchecked, source_project: project, target_project: project) } let(:merge_request) { create(:merge_request, merge_status: :unchecked, source_project: project, target_project: project) }
describe '#async_execute' do describe '#async_execute' do
shared_examples_for 'no job is enqueued' do it 'updates merge status to checking' do
it 'does not enqueue MergeRequestMergeabilityCheckWorker' do described_class.new(merge_request).async_execute
expect(MergeRequestMergeabilityCheckWorker).not_to receive(:perform_async)
described_class.new(merge_request).async_execute expect(merge_request).to be_checking
end
end end
it 'enqueues MergeRequestMergeabilityCheckWorker' do it 'enqueues MergeRequestMergeabilityCheckWorker' do
...@@ -92,15 +90,11 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar ...@@ -92,15 +90,11 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
allow(Gitlab::Database).to receive(:read_only?) { true } allow(Gitlab::Database).to receive(:read_only?) { true }
end end
it_behaves_like 'no job is enqueued' it 'does not enqueue MergeRequestMergeabilityCheckWorker' do
end expect(MergeRequestMergeabilityCheckWorker).not_to receive(:perform_async)
context 'when merge_status is already checking' do described_class.new(merge_request).async_execute
before do
merge_request.mark_as_checking
end end
it_behaves_like 'no job is enqueued'
end end
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment