Commit a6ca930f authored by Rémy Coutable's avatar Rémy Coutable

Merge branch '331248-remove-old-migrations-in-release-14-4' into 'master'

Resolve "Remove old migrations in release 14 (PART IV)"

See merge request gitlab-org/gitlab!76921
parents a0b1d35e 4fa40777
......@@ -346,7 +346,7 @@ rspec fast_spec_helper minimal:
db:rollback:
extends: .db-job-base
script:
- scripts/db_tasks db:migrate VERSION=20181228175414
- scripts/db_tasks db:migrate VERSION=20210301200959
- scripts/db_tasks db:migrate SKIP_SCHEMA_VERSION_CHECK=true
db:rollback decomposed:
......
This source diff could not be displayed because it is too large. You can view the blob instead.
# frozen_string_literal: true
class AddBloatEstimateToReindexAction < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :postgres_reindex_actions, :bloat_estimate_bytes_start, :bigint
end
end
# frozen_string_literal: true
class ChangeMrAllowMaintainerToPushDefault < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
change_column_default :merge_requests, :allow_maintainer_to_push, from: nil, to: true
end
end
def down
with_lock_retries do
change_column_default :merge_requests, :allow_maintainer_to_push, from: true, to: nil
end
end
end
# frozen_string_literal: true
class AddHasExternalWikiTrigger < ActiveRecord::Migration[6.0]
include Gitlab::Database::SchemaHelpers
DOWNTIME = false
FUNCTION_NAME = 'set_has_external_wiki'
TRIGGER_ON_INSERT_NAME = 'trigger_has_external_wiki_on_insert'
TRIGGER_ON_UPDATE_NAME = 'trigger_has_external_wiki_on_update'
TRIGGER_ON_DELETE_NAME = 'trigger_has_external_wiki_on_delete'
def up
create_trigger_function(FUNCTION_NAME, replace: true) do
<<~SQL
UPDATE projects SET has_external_wiki = COALESCE(NEW.active, FALSE)
WHERE projects.id = COALESCE(NEW.project_id, OLD.project_id);
RETURN NULL;
SQL
end
execute(<<~SQL)
CREATE TRIGGER #{TRIGGER_ON_INSERT_NAME}
AFTER INSERT ON services
FOR EACH ROW
WHEN (NEW.active = TRUE AND NEW.type = 'ExternalWikiService' AND NEW.project_id IS NOT NULL)
EXECUTE FUNCTION #{FUNCTION_NAME}();
SQL
execute(<<~SQL)
CREATE TRIGGER #{TRIGGER_ON_UPDATE_NAME}
AFTER UPDATE ON services
FOR EACH ROW
WHEN (NEW.type = 'ExternalWikiService' AND OLD.active != NEW.active AND NEW.project_id IS NOT NULL)
EXECUTE FUNCTION #{FUNCTION_NAME}();
SQL
execute(<<~SQL)
CREATE TRIGGER #{TRIGGER_ON_DELETE_NAME}
AFTER DELETE ON services
FOR EACH ROW
WHEN (OLD.type = 'ExternalWikiService' AND OLD.project_id IS NOT NULL)
EXECUTE FUNCTION #{FUNCTION_NAME}();
SQL
end
def down
drop_trigger(:services, TRIGGER_ON_INSERT_NAME)
drop_trigger(:services, TRIGGER_ON_UPDATE_NAME)
drop_trigger(:services, TRIGGER_ON_DELETE_NAME)
drop_function(FUNCTION_NAME)
end
end
# frozen_string_literal: true
class AddExpirationPolicyCompletedAtToContainerRepositories < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
add_column(:container_repositories, :expiration_policy_completed_at, :datetime_with_timezone)
end
def down
remove_column(:container_repositories, :expiration_policy_completed_at)
end
end
# frozen_string_literal: true
class AddContainerRegistryCleanupTagsServiceMaxListSizeToApplicationSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column(:application_settings, :container_registry_cleanup_tags_service_max_list_size, :integer, default: 200, null: false)
end
end
# frozen_string_literal: true
class AddAppSettingsContainerRegCleanupTagsServiceMaxListSizeConstraint < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
CONSTRAINT_NAME = 'app_settings_container_reg_cleanup_tags_max_list_size_positive'
disable_ddl_transaction!
def up
add_check_constraint :application_settings, 'container_registry_cleanup_tags_service_max_list_size >= 0', CONSTRAINT_NAME
end
def down
remove_check_constraint :application_settings, CONSTRAINT_NAME
end
end
# frozen_string_literal: true
class AddCustomMappingColumnsToHttpIntegrations < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :alert_management_http_integrations, :payload_example, :jsonb, null: false, default: {}
add_column :alert_management_http_integrations, :payload_attribute_mapping, :jsonb, null: false, default: {}
end
end
# frozen_string_literal: true
class AddEpicBoardList < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
unless table_exists?(:boards_epic_lists)
with_lock_retries do
create_table :boards_epic_lists do |t|
t.timestamps_with_timezone
t.references :epic_board, index: true, foreign_key: { to_table: :boards_epic_boards, on_delete: :cascade }, null: false
t.references :label, index: true, foreign_key: { on_delete: :cascade }
t.integer :position
t.integer :list_type, default: 1, limit: 2, null: false
t.index [:epic_board_id, :label_id], unique: true, where: 'list_type = 1', name: 'index_boards_epic_lists_on_epic_board_id_and_label_id'
end
end
end
add_check_constraint :boards_epic_lists, '(list_type <> 1) OR ("position" IS NOT NULL AND "position" >= 0)', 'boards_epic_lists_position_constraint'
end
def down
with_lock_retries do
drop_table :boards_epic_lists
end
end
end
# frozen_string_literal: true
class DeleteMockDeploymentServiceRecords < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
if Rails.env.development?
execute("DELETE FROM services WHERE type = 'MockDeploymentService'")
end
end
def down
# no-op
end
end
# frozen_string_literal: true
class ChangeUniqueIndexOnSecurityFindings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
OLD_INDEX_NAME = 'index_security_findings_on_uuid'
NEW_INDEX_NAME = 'index_security_findings_on_uuid_and_scan_id'
disable_ddl_transaction!
class SecurityFinding < ActiveRecord::Base
include EachBatch
self.table_name = 'security_findings'
end
def up
add_concurrent_index :security_findings, [:uuid, :scan_id], unique: true, name: NEW_INDEX_NAME
remove_concurrent_index_by_name :security_findings, OLD_INDEX_NAME
end
def down
# It is very unlikely that we rollback this migration but just in case if we have to,
# we have to clear the table because there can be multiple records with the same UUID
# which would break the creation of unique index on the `uuid` column.
# We choose clearing the table because just removing the duplicated records would
# cause data inconsistencies.
SecurityFinding.each_batch(of: 10000) { |relation| relation.delete_all }
add_concurrent_index :security_findings, :uuid, unique: true, name: OLD_INDEX_NAME
remove_concurrent_index_by_name :security_findings, NEW_INDEX_NAME
end
end
# frozen_string_literal: true
class CreateNamespacePackageSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
create_table :namespace_package_settings, if_not_exists: true, id: false do |t|
t.references :namespace, primary_key: true, index: false, default: nil, foreign_key: { to_table: :namespaces, on_delete: :cascade }, type: :bigint
t.boolean :maven_duplicates_allowed, null: false, default: true
t.text :maven_duplicate_exception_regex, null: false, default: ''
end
end
add_text_limit :namespace_package_settings, :maven_duplicate_exception_regex, 255
end
def down
drop_table :namespace_package_settings
end
end
# frozen_string_literal: true
class AddSquashCommitShaIndex < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = "index_merge_requests_on_target_project_id_and_squash_commit_sha"
disable_ddl_transaction!
def up
add_concurrent_index :merge_requests,
[:target_project_id, :squash_commit_sha],
name: INDEX_NAME
end
def down
remove_concurrent_index :merge_requests,
[:target_project_id, :squash_commit_sha],
name: INDEX_NAME
end
end
# frozen_string_literal: true
class AddDevopsAdoptionSnapshotRangeEnd < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :analytics_devops_adoption_snapshots, :end_time, :datetime_with_timezone
end
end
# frozen_string_literal: true
class AddGroupMergeRequestApprovalSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
create_table :group_merge_request_approval_settings, id: false do |t|
t.timestamps_with_timezone null: false
t.references :group, references: :namespaces, primary_key: true, default: nil, index: false,
foreign_key: { to_table: :namespaces, on_delete: :cascade }
t.boolean :allow_author_approval, null: false, default: false
end
end
end
def down
with_lock_retries do
drop_table :group_merge_request_approval_settings
end
end
end
# frozen_string_literal: true
class ChangePagesDeploymentSizeToBigint < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
change_column_type_concurrently :pages_deployments, :size, :bigint
end
def down
undo_change_column_type_concurrently :pages_deployments, :size
end
end
# frozen_string_literal: true
class CreateElasticReindexingSubtasks < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
class ReindexingTask < ActiveRecord::Base
self.table_name = 'elastic_reindexing_tasks'
end
class ReindexingSubtask < ActiveRecord::Base
self.table_name = 'elastic_reindexing_subtasks'
end
def up
unless table_exists?(:elastic_reindexing_subtasks)
create_table :elastic_reindexing_subtasks do |t|
t.references :elastic_reindexing_task, foreign_key: { on_delete: :cascade }, null: false
t.text :alias_name, null: false
t.text :index_name_from, null: false
t.text :index_name_to, null: false
t.text :elastic_task, null: false
t.integer :documents_count_target
t.integer :documents_count
t.timestamps_with_timezone null: false
end
end
add_text_limit :elastic_reindexing_subtasks, :index_name_from, 255
add_text_limit :elastic_reindexing_subtasks, :index_name_to, 255
add_text_limit :elastic_reindexing_subtasks, :elastic_task, 255
add_text_limit :elastic_reindexing_subtasks, :alias_name, 255
ReindexingTask.find_each do |task|
next if task.index_name_from.blank? || task.index_name_to.blank? || task.elastic_task.blank?
next if ReindexingSubtask.where(elastic_reindexing_task_id: task.id).exists?
ReindexingSubtask.create(
elastic_reindexing_task_id: task.id,
documents_count_target: task.documents_count_target,
documents_count: task.documents_count,
alias_name: 'gitlab-production',
index_name_from: task.index_name_from,
index_name_to: task.index_name_to,
elastic_task: task.elastic_task
)
end
end
def down
drop_table :elastic_reindexing_subtasks
end
end
# frozen_string_literal: true
class CreateAdminNotes < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
create_table_with_constraints :namespace_admin_notes do |t|
t.timestamps_with_timezone
t.references :namespace, null: false, foreign_key: { on_delete: :cascade }
t.text :note
t.text_limit :note, 1000
end
end
def down
drop_table :namespace_admin_notes
end
end
# frozen_string_literal: true
class AddDevopsSnapshotIndex < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'index_on_snapshots_segment_id_end_time'
def up
add_concurrent_index :analytics_devops_adoption_snapshots, [:segment_id, :end_time], name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :analytics_devops_adoption_snapshots, INDEX_NAME
end
end
# frozen_string_literal: true
class ChangeClustersHelmMajorVersionDefaultTo3 < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
change_column_default(:clusters, :helm_major_version, from: 2, to: 3)
end
end
# frozen_string_literal: true
class AddServiceDeskReplyToIsNotNullIndexOnIssues < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
# no-op, the migration's version number was lowered to be executed earlier than db/post_migrate/20201128210234_schedule_populate_issue_email_participants.rb
#
# The new migration is located here: db/migrate/20201128210000_add_service_desk_reply_to_is_not_null_index_on_issues_fix.rb
end
end
# frozen_string_literal: true
class UpdateTrustedAppsToConfidential < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'tmp_index_oauth_applications_on_id_where_trusted'
disable_ddl_transaction!
def up
add_concurrent_index :oauth_applications, :id, where: 'trusted = true', name: INDEX_NAME
execute('UPDATE oauth_applications SET confidential = true WHERE trusted = true')
end
def down
# We won't be able to tell which trusted applications weren't confidential before the migration
# and setting all trusted applications are not confidential would introduce security issues
remove_concurrent_index_by_name :oauth_applications, INDEX_NAME
end
end
# frozen_string_literal: true
class AddRestrictUserDefinedVariablesToProjectSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :project_ci_cd_settings, :restrict_user_defined_variables, :boolean, default: false, null: false
end
end
def down
with_lock_retries do
remove_column :project_ci_cd_settings, :restrict_user_defined_variables
end
end
end
# frozen_string_literal: true
class MigrateCoverageReportWorker < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
sidekiq_queue_migrate 'ci_pipelines_create_artifact', to: 'ci_pipeline_artifacts_coverage_report' # rubocop:disable Migration/SidekiqQueueMigrate
end
def down
sidekiq_queue_migrate 'ci_pipeline_artifacts_coverage_report', to: 'ci_pipelines_create_artifact' # rubocop:disable Migration/SidekiqQueueMigrate
end
end
# frozen_string_literal: true
class CreateIterationsCadence < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
create_table_with_constraints :iterations_cadences do |t|
t.references :group, null: false, foreign_key: { to_table: :namespaces, on_delete: :cascade }
t.timestamps_with_timezone null: false
t.date :start_date, null: false
t.date :last_run_date
t.integer :duration_in_weeks
t.integer :iterations_in_advance
t.boolean :active, default: true, null: false
t.boolean :automatic, default: true, null: false
t.text :title, null: false
t.text_limit :title, 255
end
end
def down
drop_table :iterations_cadences if table_exists?(:iterations_cadences)
end
end
# frozen_string_literal: true
class AddIterationsCadenceToSprints < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'index_sprints_iterations_cadence_id'
def up
add_column :sprints, :iterations_cadence_id, :integer unless column_exists?(:sprints, :iterations_cadence_id)
add_concurrent_index :sprints, :iterations_cadence_id, name: INDEX_NAME
add_concurrent_foreign_key :sprints, :iterations_cadences, column: :iterations_cadence_id, on_delete: :cascade
end
def down
remove_column :sprints, :iterations_cadence_id if column_exists?(:sprints, :iterations_cadence_id)
end
end
# frozen_string_literal: true
class AddDismissalReasonIntoVulnerabilityFeedbackTable < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
add_column :vulnerability_feedback, :dismissal_reason, :smallint
end
def down
remove_column :vulnerability_feedback, :dismissal_reason
end
end
# frozen_string_literal: true
class AddInvisibleCaptchaEnabledToSettings < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :application_settings, :invisible_captcha_enabled, :boolean, null: false, default: false
end
end
# frozen_string_literal: true
class AddRateLimitingResponseTextToApplicationSettings < ActiveRecord::Migration[6.0]
DOWNTIME = false
# rubocop:disable Migration/AddLimitToTextColumns
# limit is added in 20210101110640_set_limit_for_rate_limiting_response_text
def change
add_column :application_settings, :rate_limiting_response_text, :text
end
# rubocop:enable Migration/AddLimitToTextColumns
end
# frozen_string_literal: true
class CreateOnboardingProgress < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
create_table :onboarding_progresses do |t|
t.references :namespace, null: false, index: { unique: true }, foreign_key: { on_delete: :cascade }
t.timestamps_with_timezone null: false
t.datetime_with_timezone :git_pull_at
t.datetime_with_timezone :git_write_at
t.datetime_with_timezone :merge_request_created_at
t.datetime_with_timezone :pipeline_created_at
t.datetime_with_timezone :user_added_at
t.datetime_with_timezone :trial_started_at
t.datetime_with_timezone :subscription_created_at
t.datetime_with_timezone :required_mr_approvals_enabled_at
t.datetime_with_timezone :code_owners_enabled_at
t.datetime_with_timezone :scoped_label_created_at
t.datetime_with_timezone :security_scan_enabled_at
t.datetime_with_timezone :issue_auto_closed_at
t.datetime_with_timezone :repository_imported_at
t.datetime_with_timezone :repository_mirrored_at
end
end
end
def down
with_lock_retries do
drop_table :onboarding_progresses
end
end
end
# frozen_string_literal: true
class SetLimitForRateLimitingResponseText < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_text_limit :application_settings, :rate_limiting_response_text, 255
end
def down
remove_text_limit :application_settings, :rate_limiting_response_text
end
end
# frozen_string_literal: true
class DropTemporaryIndexOnCiBuilds < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX = 'tmp_build_stage_position_index'
def up
remove_concurrent_index_by_name :ci_builds, INDEX
end
def down
add_concurrent_index :ci_builds, [:stage_id, :stage_idx], where: 'stage_idx IS NOT NULL', name: INDEX
end
end
# frozen_string_literal: true
class AddEpicBoardPositionIndex < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_boards_epic_board_positions_on_scoped_relative_position'
disable_ddl_transaction!
def up
add_concurrent_index :boards_epic_board_positions, [:epic_board_id, :epic_id, :relative_position], name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :boards_epic_board_positions, INDEX_NAME
end
end
# frozen_string_literal: true
class RenameAssetProxyWhitelistOnApplicationSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers::V2
DOWNTIME = false
disable_ddl_transaction!
def up
rename_column_concurrently :application_settings,
:asset_proxy_whitelist,
:asset_proxy_allowlist
end
def down
undo_rename_column_concurrently :application_settings,
:asset_proxy_whitelist,
:asset_proxy_allowlist
end
end
# frozen_string_literal: true
class AddEntityColumnsToVulnerabilityOccurrences < ActiveRecord::Migration[6.0]
DOWNTIME = false
# rubocop:disable Migration/AddLimitToTextColumns
# limit is added in 20200501000002_add_text_limit_to_sprints_extended_title
def change
add_column :vulnerability_occurrences, :description, :text
add_column :vulnerability_occurrences, :message, :text
add_column :vulnerability_occurrences, :solution, :text
add_column :vulnerability_occurrences, :cve, :text
add_column :vulnerability_occurrences, :location, :jsonb
end
# rubocop:enable Migration/AddLimitToTextColumns
end
# frozen_string_literal: true
class AddTextLimitToVulnerabilityOccurrencesEntityColumns < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_text_limit :vulnerability_occurrences, :description, 15000
add_text_limit :vulnerability_occurrences, :message, 3000
add_text_limit :vulnerability_occurrences, :solution, 7000
add_text_limit :vulnerability_occurrences, :cve, 48400
end
def down
remove_text_limit :vulnerability_occurrences, :description
remove_text_limit :vulnerability_occurrences, :message
remove_text_limit :vulnerability_occurrences, :solution
remove_text_limit :vulnerability_occurrences, :cve
end
end
# frozen_string_literal: true
class AddUniqueIndexForGolangPackages < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_packages_on_project_id_name_version_unique_when_golang'
PACKAGE_TYPE_GOLANG = 8
disable_ddl_transaction!
def up
add_concurrent_index :packages_packages, [:project_id, :name, :version], unique: true, where: "package_type = #{PACKAGE_TYPE_GOLANG}", name: INDEX_NAME
end
def down
remove_concurrent_index_by_name(:packages_packages, INDEX_NAME)
end
end
# frozen_string_literal: true
class DropTmpIndexOnEmails < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
EMAIL_INDEX_NAME = 'tmp_index_for_email_unconfirmation_migration'
disable_ddl_transaction!
def up
Gitlab::BackgroundMigration.steal('WrongfullyConfirmedEmailUnconfirmer')
remove_concurrent_index_by_name(:emails, EMAIL_INDEX_NAME)
end
def down
add_concurrent_index(:emails, :id, where: 'confirmed_at IS NOT NULL', name: EMAIL_INDEX_NAME)
end
end
# frozen_string_literal: true
# See https://docs.gitlab.com/ee/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddMergeRequestDiffCommitTrailers < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :merge_request_diff_commits, :trailers, :jsonb, default: {}, null: false
end
end
def down
with_lock_retries do
remove_column :merge_request_diff_commits, :trailers
end
end
end
# frozen_string_literal: true
# This migration aligns an existing database schema with what we actually expect
# and fixes inconsistencies with index names and similar issues.
#
# This is intended for GitLab.com, but can be run on any instance.
class RenameIndexesOnGitLabCom < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
rename_index_if_exists :ldap_group_links, 'ldap_groups_pkey', 'ldap_group_links_pkey'
# Removes unique constraint, add unique index instead
replace_unique_constraint_with_index :emails, :email, 'emails_email_key', 'index_emails_on_email'
replace_unique_constraint_with_index :users, :confirmation_token, 'users_confirmation_token_key', 'index_users_on_confirmation_token'
replace_unique_constraint_with_index :users, :reset_password_token, 'users_reset_password_token_key', 'index_users_on_reset_password_token'
replace_unique_constraint_with_index :users, :email, 'users_email_key', 'index_users_on_email'
upgrade_to_primary_key(:schema_migrations, :version, 'schema_migrations_version_key', 'schema_migrations_pkey')
end
def down
# no-op
end
private
def replace_unique_constraint_with_index(table, columns, old_name, new_name)
return unless index_exists_by_name?(table, old_name)
add_concurrent_index table, columns, unique: true, name: new_name
execute "ALTER TABLE #{quote_table_name(table)} DROP CONSTRAINT #{quote_table_name(old_name)}"
end
def rename_index_if_exists(table, old_name, new_name)
return unless index_exists_by_name?(table, old_name)
return if index_exists_by_name?(table, new_name)
with_lock_retries do
rename_index table, old_name, new_name
end
end
def upgrade_to_primary_key(table, column, old_name, new_name)
return unless index_exists_by_name?(table, old_name)
return if index_exists_by_name?(table, new_name)
return if primary_key(table)
execute "ALTER TABLE #{quote_table_name(table)} ADD CONSTRAINT #{new_name} PRIMARY KEY (#{column})"
execute "ALTER TABLE #{quote_table_name(table)} DROP CONSTRAINT #{old_name}"
end
end
# frozen_string_literal: true
class AddKeepLatestArtifactsToApplicationSettings < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
# This is named keep_latest_artifact for consistency with the project level setting but
# turning it on keeps all (multiple) artifacts on the latest pipeline per ref
add_column :application_settings, :keep_latest_artifact, :boolean, default: true, null: false
end
end
# frozen_string_literal: true
class AddDiffTypeToMergeRequestDiffs < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
UNIQUE_INDEX_NAME = 'index_merge_request_diffs_on_unique_merge_request_id'
def up
unless column_exists?(:merge_request_diffs, :diff_type)
with_lock_retries do
add_column :merge_request_diffs, :diff_type, :integer, null: false, limit: 2, default: 1
end
end
add_concurrent_index :merge_request_diffs, :merge_request_id, unique: true, where: 'diff_type = 2', name: UNIQUE_INDEX_NAME
end
def down
remove_concurrent_index_by_name(:merge_request_diffs, UNIQUE_INDEX_NAME)
if column_exists?(:merge_request_diffs, :diff_type)
with_lock_retries do
remove_column :merge_request_diffs, :diff_type
end
end
end
end
# frozen_string_literal: true
# See https://docs.gitlab.com/ee/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddMergeRequestContextCommitTrailers < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :merge_request_context_commits, :trailers, :jsonb, default: {}, null: false
end
end
# frozen_string_literal: true
class UpdateMaxImportSizeDefault < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
change_column_default(:application_settings, :max_import_size, from: 50, to: 0)
end
end
# frozen_string_literal: true
class CreateDastProfiles < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
table_comment = { owner: 'group::dynamic analysis', description: 'Profile used to run a DAST on-demand scan' }
create_table_with_constraints :dast_profiles, comment: table_comment.to_json do |t| # rubocop:disable Migration/AddLimitToTextColumns
t.references :project, null: false, foreign_key: false, index: false
t.references :dast_site_profile, null: false, foreign_key: { on_delete: :cascade }
t.references :dast_scanner_profile, null: false, foreign_key: { on_delete: :cascade }
t.timestamps_with_timezone
# rubocop:disable Migration/AddLimitToTextColumns
t.text :name, null: false
t.text :description, null: false
# rubocop:enable Migration/AddLimitToTextColumns
t.index [:project_id, :name], unique: true
t.text_limit :name, 255
t.text_limit :description, 255
end
end
def down
with_lock_retries do
drop_table :dast_profiles
end
end
end
# frozen_string_literal: true
class AddProjectFkForDastProfile < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :dast_profiles, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :dast_profiles, column: :project_id
end
end
end
# frozen_string_literal: true
class AddTemporaryIndexOnSecurityFindingsScanId < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'tmp_index_on_security_findings_scan_id'
disable_ddl_transaction!
def up
add_concurrent_index :security_findings, :scan_id, where: 'uuid is null', name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :security_findings, INDEX_NAME
end
end
# frozen_string_literal: true
class DropTmpIndexOnEmailsAgain < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
EMAIL_INDEX_NAME = 'tmp_index_for_email_unconfirmation_migration'
disable_ddl_transaction!
def up
remove_concurrent_index_by_name(:emails, EMAIL_INDEX_NAME)
end
def down
add_concurrent_index(:emails, :id, where: 'confirmed_at IS NOT NULL', name: EMAIL_INDEX_NAME)
end
end
# frozen_string_literal: true
class CreateComposerCacheFile < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
# rubocop:disable Migration/AddLimitToTextColumns
create_table_with_constraints :packages_composer_cache_files do |t|
t.timestamps_with_timezone
# record can be deleted after `delete_at`
t.datetime_with_timezone :delete_at
# which namespace it belongs to
t.integer :namespace_id, null: true
# file storage related fields
t.integer :file_store, limit: 2, null: false, default: 1
t.text :file, null: false
t.binary :file_sha256, null: false
t.index [:namespace_id, :file_sha256], name: "index_packages_composer_cache_namespace_and_sha", using: :btree, unique: true
t.foreign_key :namespaces, column: :namespace_id, on_delete: :nullify
t.text_limit :file, 255
end
end
def down
drop_table :packages_composer_cache_files
end
end
# frozen_string_literal: true
class AddPipelineConfigurationFullPathToCompliancePipeline < ActiveRecord::Migration[6.0]
DOWNTIME = false
# rubocop:disable Migration/AddLimitToTextColumns
# limit is added in 20210119162812_add_text_limit_to_compliance_pipeline_configuration_full_path.rb
def up
add_column :compliance_management_frameworks, :pipeline_configuration_full_path, :text
end
# rubocop:enable Migration/AddLimitToTextColumns
def down
remove_column :compliance_management_frameworks, :pipeline_configuration_full_path
end
end
# frozen_string_literal: true
class AddConvertedAtToExperimentSubjects < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :experiment_subjects, :converted_at, :datetime_with_timezone
end
end
# frozen_string_literal: true
class AddContextToExperimentSubjects < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :experiment_subjects, :context, :jsonb, default: {}, null: false
end
end
# frozen_string_literal: true
class RemoveGroupIdTitleIndex < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'index_labels_on_group_id_and_title_with_null_project_id'
LABELS_TABLE = :labels
def up
remove_concurrent_index_by_name LABELS_TABLE, INDEX_NAME
end
def down
add_concurrent_index LABELS_TABLE, [:group_id, :title], where: 'project_id IS NULL', name: INDEX_NAME
end
end
# frozen_string_literal: true
class AddIndexesToOnboardingProgresses < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
CREATE_TRACK_INDEX_NAME = 'index_onboarding_progresses_for_create_track'
VERIFY_TRACK_INDEX_NAME = 'index_onboarding_progresses_for_verify_track'
TRIAL_TRACK_INDEX_NAME = 'index_onboarding_progresses_for_trial_track'
TEAM_TRACK_INDEX_NAME = 'index_onboarding_progresses_for_team_track'
disable_ddl_transaction!
def up
add_concurrent_index :onboarding_progresses, :created_at, where: 'git_write_at IS NULL', name: CREATE_TRACK_INDEX_NAME
add_concurrent_index :onboarding_progresses, :git_write_at, where: 'git_write_at IS NOT NULL AND pipeline_created_at IS NULL', name: VERIFY_TRACK_INDEX_NAME
add_concurrent_index :onboarding_progresses, 'GREATEST(git_write_at, pipeline_created_at)', where: 'git_write_at IS NOT NULL AND pipeline_created_at IS NOT NULL AND trial_started_at IS NULL', name: TRIAL_TRACK_INDEX_NAME
add_concurrent_index :onboarding_progresses, 'GREATEST(git_write_at, pipeline_created_at, trial_started_at)', where: 'git_write_at IS NOT NULL AND pipeline_created_at IS NOT NULL AND trial_started_at IS NOT NULL AND user_added_at IS NULL', name: TEAM_TRACK_INDEX_NAME
end
def down
remove_concurrent_index_by_name :onboarding_progresses, CREATE_TRACK_INDEX_NAME
remove_concurrent_index_by_name :onboarding_progresses, VERIFY_TRACK_INDEX_NAME
remove_concurrent_index_by_name :onboarding_progresses, TRIAL_TRACK_INDEX_NAME
remove_concurrent_index_by_name :onboarding_progresses, TEAM_TRACK_INDEX_NAME
end
end
# frozen_string_literal: true
class CreateGroupRepositoryStorageMove < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
unless table_exists?(:group_repository_storage_moves)
with_lock_retries do
create_table :group_repository_storage_moves do |t|
t.timestamps_with_timezone
t.references :group, references: :namespace, column: :group_id, index: true, null: false
t.integer :state, limit: 2, default: 1, null: false
t.text :source_storage_name, null: false
t.text :destination_storage_name, null: false
t.foreign_key :namespaces, column: :group_id, on_delete: :cascade
end
end
end
add_text_limit(:group_repository_storage_moves, :source_storage_name, 255, constraint_name: 'group_repository_storage_moves_source_storage_name')
add_text_limit(:group_repository_storage_moves, :destination_storage_name, 255, constraint_name: 'group_repository_storage_moves_destination_storage_name')
end
def down
with_lock_retries do
drop_table :group_repository_storage_moves
end
end
end
# frozen_string_literal: true
class AddHasExternalIssueTrackerTrigger < ActiveRecord::Migration[6.0]
include Gitlab::Database::SchemaHelpers
DOWNTIME = false
FUNCTION_NAME = 'set_has_external_issue_tracker'
TRIGGER_ON_INSERT_NAME = 'trigger_has_external_issue_tracker_on_insert'
TRIGGER_ON_UPDATE_NAME = 'trigger_has_external_issue_tracker_on_update'
TRIGGER_ON_DELETE_NAME = 'trigger_has_external_issue_tracker_on_delete'
def up
create_trigger_function(FUNCTION_NAME, replace: true) do
<<~SQL
UPDATE projects SET has_external_issue_tracker = (
EXISTS
(
SELECT 1
FROM services
WHERE project_id = COALESCE(NEW.project_id, OLD.project_id)
AND active = TRUE
AND category = 'issue_tracker'
)
)
WHERE projects.id = COALESCE(NEW.project_id, OLD.project_id);
RETURN NULL;
SQL
end
execute(<<~SQL)
CREATE TRIGGER #{TRIGGER_ON_INSERT_NAME}
AFTER INSERT ON services
FOR EACH ROW
WHEN (NEW.category = 'issue_tracker' AND NEW.active = TRUE AND NEW.project_id IS NOT NULL)
EXECUTE FUNCTION #{FUNCTION_NAME}();
SQL
execute(<<~SQL)
CREATE TRIGGER #{TRIGGER_ON_UPDATE_NAME}
AFTER UPDATE ON services
FOR EACH ROW
WHEN (NEW.category = 'issue_tracker' AND OLD.active != NEW.active AND NEW.project_id IS NOT NULL)
EXECUTE FUNCTION #{FUNCTION_NAME}();
SQL
execute(<<~SQL)
CREATE TRIGGER #{TRIGGER_ON_DELETE_NAME}
AFTER DELETE ON services
FOR EACH ROW
WHEN (OLD.category = 'issue_tracker' AND OLD.active = TRUE AND OLD.project_id IS NOT NULL)
EXECUTE FUNCTION #{FUNCTION_NAME}();
SQL
end
def down
drop_trigger(:services, TRIGGER_ON_INSERT_NAME)
drop_trigger(:services, TRIGGER_ON_UPDATE_NAME)
drop_trigger(:services, TRIGGER_ON_DELETE_NAME)
drop_function(FUNCTION_NAME)
end
end
# frozen_string_literal: true
class AddEnforceSshKeyExpirationToApplicationSettings < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :application_settings, :enforce_ssh_key_expiration, :boolean, default: false, null: false
end
end
# frozen_string_literal: true
class AddProxySettingsToJiraTrackerData < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :jira_tracker_data, :encrypted_proxy_address, :text
add_column :jira_tracker_data, :encrypted_proxy_address_iv, :text
add_column :jira_tracker_data, :encrypted_proxy_port, :text
add_column :jira_tracker_data, :encrypted_proxy_port_iv, :text
add_column :jira_tracker_data, :encrypted_proxy_username, :text
add_column :jira_tracker_data, :encrypted_proxy_username_iv, :text
add_column :jira_tracker_data, :encrypted_proxy_password, :text
add_column :jira_tracker_data, :encrypted_proxy_password_iv, :text
end
end
# frozen_string_literal: true
class AddTextLimitToCompliancePipelineConfigurationFullPath < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_text_limit :compliance_management_frameworks, :pipeline_configuration_full_path, 255
end
def down
remove_text_limit :compliance_management_frameworks, :pipeline_configuration_full_path
end
end
# frozen_string_literal: true
class ExtendIndexOnCiBuildsMetadata < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
OLD_INDEX = :index_ci_builds_metadata_on_build_id_and_interruptible
NEW_INDEX = :index_ci_builds_metadata_on_build_id_and_id_and_interruptible
TABLE = :ci_builds_metadata
def up
create_covering_index(TABLE, NEW_INDEX)
remove_concurrent_index_by_name TABLE, OLD_INDEX
end
def down
add_concurrent_index TABLE, :build_id, where: 'interruptible = true', name: OLD_INDEX
remove_concurrent_index_by_name TABLE, NEW_INDEX
end
private
def create_covering_index(table, name)
return if index_exists_by_name?(table, name)
disable_statement_timeout do
execute <<~SQL
CREATE INDEX CONCURRENTLY #{name}
ON #{table} (build_id) INCLUDE (id)
WHERE interruptible = true
SQL
end
end
end
# frozen_string_literal: true
class DeleteOauthApplicationsTmpIndex < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'tmp_index_oauth_applications_on_id_where_trusted'
disable_ddl_transaction!
def up
remove_concurrent_index_by_name :oauth_applications, INDEX_NAME
end
def down
add_concurrent_index :oauth_applications, :id, where: 'trusted = true', name: INDEX_NAME
end
end
# frozen_string_literal: true
class RemoveRepositoryReadOnlyToGroups < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
if column_exists?(:namespaces, :repository_read_only)
with_lock_retries do
remove_column :namespaces, :repository_read_only # rubocop:disable Migration/RemoveColumn
end
end
end
def down
unless column_exists?(:namespaces, :repository_read_only)
with_lock_retries do
add_column :namespaces, :repository_read_only, :boolean, default: false, null: false # rubocop:disable Migration/AddColumnsToWideTables
end
end
end
end
# frozen_string_literal: true
class AddDevopsAdoptionGroupSegment < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_column :analytics_devops_adoption_segments, :namespace_id, :integer, if_not_exists: true
add_concurrent_index :analytics_devops_adoption_segments, :namespace_id, unique: true
end
def down
remove_column :analytics_devops_adoption_segments, :namespace_id
end
end
# frozen_string_literal: true
class OptionalDevopsAdoptionSegmentName < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'index_analytics_devops_adoption_segments_on_name'
def up
change_column_null :analytics_devops_adoption_segments, :name, true
remove_concurrent_index_by_name :analytics_devops_adoption_segments, INDEX_NAME
end
def down
transaction do
execute "DELETE FROM analytics_devops_adoption_segments WHERE name IS NULL"
change_column_null :analytics_devops_adoption_segments, :name, false
end
add_concurrent_index :analytics_devops_adoption_segments, :name, unique: true, name: INDEX_NAME
end
end
# frozen_string_literal: true
class AddRepositoryReadOnlyToNamespaceSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :namespace_settings, :repository_read_only, :boolean, default: false, null: false
end
end
def down
with_lock_retries do
remove_column :namespace_settings, :repository_read_only
end
end
end
# frozen_string_literal: true
class AddStateToMergeRequestReviewers < ActiveRecord::Migration[6.0]
DOWNTIME = false
REVIEW_DEFAULT_STATE = 0
def change
add_column :merge_request_reviewers, :state, :smallint, default: REVIEW_DEFAULT_STATE, null: false
end
end
# frozen_string_literal: true
class AddPipelineStepToBulkImportsFailures < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
unless column_exists?(:bulk_import_failures, :pipeline_step, :text)
with_lock_retries do
add_column :bulk_import_failures, :pipeline_step, :text
end
end
add_text_limit :bulk_import_failures, :pipeline_step, 255
end
def down
with_lock_retries do
remove_column :bulk_import_failures, :pipeline_step
end
end
end
# frozen_string_literal: true
class AddDevopsAdoptionSegmentNamespaceFk < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :analytics_devops_adoption_segments, :namespaces, column: :namespace_id
end
def down
remove_foreign_key_if_exists :analytics_devops_adoption_segments, :namespaces, column: :namespace_id
end
end
# frozen_string_literal: true
class AddSecurityDashboardAccessLevelIntoProjectFeatures < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
PRIVATE_ACCESS_LEVEL = 10
def up
with_lock_retries do
add_column :project_features, :security_and_compliance_access_level, :integer, default: PRIVATE_ACCESS_LEVEL, null: false
end
end
def down
with_lock_retries do
remove_column :project_features, :security_and_compliance_access_level
end
end
end
# frozen_string_literal: true
class AddUniqueIndexServicesProjectIdAndType < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'index_services_on_project_id_and_type_unique'
def up
add_concurrent_index :services, [:project_id, :type], name: INDEX_NAME, unique: true
end
def down
remove_concurrent_index_by_name :services, name: INDEX_NAME
end
end
# frozen_string_literal: true
class RemoveIndexServicesProjectIdAndType < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'index_services_on_project_id_and_type'
# Replaced by the index added in 20210126091713_add_unique_index_services_project_id_and_type.rb
def up
remove_concurrent_index_by_name :services, name: INDEX_NAME
end
def down
add_concurrent_index :services, [:project_id, :type], name: INDEX_NAME
end
end
# frozen_string_literal: true
class AddRubygemsMaxFileSizeToPlanLimits < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :plan_limits, :rubygems_max_file_size, :bigint, default: 3.gigabytes, null: false
end
end
# frozen_string_literal: true
class AddSubgroupEventsToWebHooks < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :web_hooks, :subgroup_events, :boolean, null: false, default: false
end
end
# frozen_string_literal: true
class AddOldestMergeRequestsIndex < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
# replaced by db/migrate/20210201140434_add_oldest_merge_requests_index_again.rb
end
def down
# replaced by db/migrate/20210201140434_add_oldest_merge_requests_index_again.rb
end
end
# frozen_string_literal: true
class AddIterationsCadenceDateRangeConstraint < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
execute <<~SQL
ALTER TABLE sprints
ADD CONSTRAINT iteration_start_and_due_date_iterations_cadence_id_constraint
EXCLUDE USING gist
( iterations_cadence_id WITH =,
daterange(start_date, due_date, '[]') WITH &&
)
WHERE (group_id IS NOT NULL)
SQL
end
end
def down
with_lock_retries do
execute <<~SQL
ALTER TABLE sprints
DROP CONSTRAINT IF EXISTS iteration_start_and_due_date_iterations_cadence_id_constraint
SQL
end
end
end
# frozen_string_literal: true
class RemoveIterationGroupDateRangeConstraint < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
execute <<~SQL
ALTER TABLE sprints
DROP CONSTRAINT IF EXISTS iteration_start_and_due_daterange_group_id_constraint
SQL
end
end
def down
with_lock_retries do
execute <<~SQL
ALTER TABLE sprints
ADD CONSTRAINT iteration_start_and_due_daterange_group_id_constraint
EXCLUDE USING gist
( group_id WITH =,
daterange(start_date, due_date, '[]') WITH &&
)
WHERE (group_id IS NOT NULL)
SQL
end
end
end
# frozen_string_literal: true
class AddGitTwoFactorSessionExpiryToApplicationSettings < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :application_settings, :git_two_factor_session_expiry, :integer, default: 15, null: false
end
end
# frozen_string_literal: true
class AddPreventMergeWithoutJiraIssueToProjectSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :project_settings, :prevent_merge_without_jira_issue, :boolean, null: false, default: false
end
end
def down
with_lock_retries do
remove_column :project_settings, :prevent_merge_without_jira_issue
end
end
end
# frozen_string_literal: true
class AddAutoDeleteAtToEnvironments < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :environments, :auto_delete_at, :datetime_with_timezone
end
end
def down
with_lock_retries do
remove_column :environments, :auto_delete_at
end
end
end
# frozen_string_literal: true
class AddContentTypeToDependencyProxyManifests < ActiveRecord::Migration[6.0]
DOWNTIME = false
# rubocop:disable Migration/AddLimitToTextColumns
# limit is added in 20210128140232_add_text_limit_to_dependency_proxy_manifests_content_type.rb
def change
add_column :dependency_proxy_manifests, :content_type, :text
end
# rubocop:enable Migration/AddLimitToTextColumns
end
# frozen_string_literal: true
class AddTextLimitToDependencyProxyManifestsContentType < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_text_limit :dependency_proxy_manifests, :content_type, 255
end
def down
remove_text_limit :dependency_proxy_manifests, :content_type
end
end
# frozen_string_literal: true
class CreateCiNamespaceMonthlyUsage < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
create_table :ci_namespace_monthly_usages, if_not_exists: true do |t|
t.references :namespace, index: false, null: false
t.date :date, null: false
t.integer :additional_amount_available, null: false, default: 0
t.decimal :amount_used, null: false, default: 0.0, precision: 18, scale: 2
t.index [:namespace_id, :date], unique: true
end
end
add_check_constraint :ci_namespace_monthly_usages, "(date = date_trunc('month', date))", 'ci_namespace_monthly_usages_year_month_constraint'
end
def down
with_lock_retries do
drop_table :ci_namespace_monthly_usages
end
end
end
# frozen_string_literal: true
class CreateBackgroundMigrationTrackingTables < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table_with_constraints :batched_background_migrations do |t|
t.timestamps_with_timezone
t.bigint :min_value, null: false, default: 1
t.bigint :max_value, null: false
t.integer :batch_size, null: false
t.integer :sub_batch_size, null: false
t.integer :interval, limit: 2, null: false
t.integer :status, limit: 2, null: false, default: 0
t.text :job_class_name, null: false
t.text :batch_class_name, null: false,
default: 'Gitlab::Database::BackgroundMigration::PrimaryKeyBatchingStrategy'
t.text :table_name, null: false
t.text :column_name, null: false
t.jsonb :job_arguments, null: false, default: '[]'
t.text_limit :job_class_name, 100
t.text_limit :batch_class_name, 100
t.text_limit :table_name, 63
t.text_limit :column_name, 63
t.check_constraint :check_positive_min_value, 'min_value > 0'
t.check_constraint :check_max_value_in_range, 'max_value >= min_value'
t.check_constraint :check_positive_sub_batch_size, 'sub_batch_size > 0'
t.check_constraint :check_batch_size_in_range, 'batch_size >= sub_batch_size'
t.index %i[job_class_name table_name column_name], name: :index_batched_migrations_on_job_table_and_column_name
end
create_table :batched_background_migration_jobs do |t|
t.timestamps_with_timezone
t.datetime_with_timezone :started_at
t.datetime_with_timezone :finished_at
t.references :batched_background_migration, null: false, index: false, foreign_key: { on_delete: :cascade }
t.bigint :min_value, null: false
t.bigint :max_value, null: false
t.integer :batch_size, null: false
t.integer :sub_batch_size, null: false
t.integer :status, limit: 2, null: false, default: 0
t.integer :attempts, limit: 2, null: false, default: 0
t.index [:batched_background_migration_id, :id], name: :index_batched_jobs_by_batched_migration_id_and_id
end
end
def down
drop_table :batched_background_migration_jobs
drop_table :batched_background_migrations
end
end
# frozen_string_literal: true
class AddIndexToOncallShftsOnStartsAtAndEndsAt < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
NEW_NAME = 'index_oncall_shifts_on_rotation_id_and_starts_at_and_ends_at'
OLD_NAME = 'index_incident_management_oncall_shifts_on_rotation_id'
def up
add_concurrent_index :incident_management_oncall_shifts, %i[rotation_id starts_at ends_at], name: NEW_NAME
remove_concurrent_index_by_name :incident_management_oncall_shifts, OLD_NAME
end
def down
add_concurrent_index :incident_management_oncall_shifts, :rotation_id, name: OLD_NAME
remove_concurrent_index_by_name :incident_management_oncall_shifts, NEW_NAME
end
end
# frozen_string_literal: true
class AddActivePeriodsToOnCallRotations < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :incident_management_oncall_rotations, :active_period_start, :time, null: true
add_column :incident_management_oncall_rotations, :active_period_end, :time, null: true
end
end
# frozen_string_literal: true
class AddOldestMergeRequestsIndexAgain < ActiveRecord::Migration[6.0]
include Gitlab::Database::SchemaHelpers
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
INDEX = 'index_on_merge_requests_for_latest_diffs'
def up
execute "DROP INDEX CONCURRENTLY #{INDEX}" if invalid_index?
return if index_exists_by_name?('merge_requests', INDEX)
begin
disable_statement_timeout do
execute "CREATE INDEX CONCURRENTLY #{INDEX} ON merge_requests " \
'USING btree (target_project_id) INCLUDE (id, latest_merge_request_diff_id)'
end
rescue ActiveRecord::StatementInvalid => ex
# Due to https://github.com/lfittl/pg_query/issues/184, if the CREATE
# INDEX statement fails, we trigger a separate error due to the Gem not
# supporting the INCLUDE syntax.
#
# To work around this, we raise a custom error instead, as these won't
# have a query context injected.
raise "The index #{INDEX} couldn't be added: #{ex.message}"
end
create_comment(
'INDEX',
INDEX,
'Index used to efficiently obtain the oldest merge request for a commit SHA'
)
end
def down
return unless index_exists_by_name?('merge_requests', INDEX)
disable_statement_timeout do
execute "DROP INDEX CONCURRENTLY #{INDEX}"
end
end
def invalid_index?
result = execute(<<~SQL)
SELECT pg_class.relname
FROM pg_class, pg_index
WHERE pg_index.indisvalid = false
AND pg_index.indexrelid = pg_class.oid
AND pg_class.relname = '#{INDEX}';
SQL
result.values.any?
end
end
# frozen_string_literal: true
class DropBackupLabelIndex < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'backup_labels_project_id_title_idx'
def up
remove_concurrent_index_by_name(:backup_labels, name: INDEX_NAME)
end
def down
add_concurrent_index :backup_labels, [:project_id, :title], name: INDEX_NAME, unique: true, where: 'group_id = NULL::integer'
end
end
# frozen_string_literal: true
class RemoveHasExternalWikiConstraint < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
# This reverts the following migration: add_not_null_constraint :projects, :has_external_wiki, validate: false
if check_not_null_constraint_exists?(:projects, :has_external_wiki)
remove_not_null_constraint :projects, :has_external_wiki
end
end
def down
# no-op
end
end
# frozen_string_literal: true
class RestoreHasExternalWikiDefaultValue < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
class TmpProject < ActiveRecord::Base
self.table_name = 'projects'
end
# This reverts the following migration: change_column_default(:projects, :has_external_wiki, from: nil, to: false)
# We only change the column when the current default value is false
def up
# Find out the current default value
column = TmpProject.columns.find { |c| c.name == 'has_external_wiki' }
return unless column
if column.default == 'false'
with_lock_retries do
change_column_default(:projects, :has_external_wiki, from: false, to: nil)
end
end
end
def down
# no-op
end
end
# frozen_string_literal: true
class CreatePackagesRubygemsMetadata < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
create_table_with_constraints :packages_rubygems_metadata, id: false do |t|
t.timestamps_with_timezone
t.references :package, primary_key: true, index: false, default: nil, null: false, foreign_key: { to_table: :packages_packages, on_delete: :cascade }, type: :bigint
t.text :authors
t.text :files
t.text :summary
t.text :description
t.text :email
t.text :homepage
t.text :licenses
t.text :metadata
t.text :author
t.text :bindir
t.text :cert_chain
t.text :executables
t.text :extensions
t.text :extra_rdoc_files
t.text :platform
t.text :post_install_message
t.text :rdoc_options
t.text :require_paths
t.text :required_ruby_version
t.text :required_rubygems_version
t.text :requirements
t.text :rubygems_version
t.text :signing_key
t.text_limit :authors, 255
t.text_limit :files, 255
t.text_limit :summary, 1024
t.text_limit :description, 1024
t.text_limit :email, 255
t.text_limit :homepage, 255
t.text_limit :licenses, 255
t.text_limit :metadata, 255
t.text_limit :author, 255
t.text_limit :bindir, 255
t.text_limit :cert_chain, 255
t.text_limit :executables, 255
t.text_limit :extensions, 255
t.text_limit :extra_rdoc_files, 255
t.text_limit :platform, 255
t.text_limit :post_install_message, 255
t.text_limit :rdoc_options, 255
t.text_limit :require_paths, 255
t.text_limit :required_ruby_version, 255
t.text_limit :required_rubygems_version, 255
t.text_limit :requirements, 255
t.text_limit :rubygems_version, 255
t.text_limit :signing_key, 255
end
end
def down
drop_table :packages_rubygems_metadata
end
end
# frozen_string_literal: true
class AddExpiredIndexToComposerCacheFiles < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'composer_cache_files_index_on_deleted_at'
def up
add_concurrent_index :packages_composer_cache_files, [:delete_at, :id], name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :packages_composer_cache_files, INDEX_NAME
end
end
# frozen_string_literal: true
class AddOrphanIndexToComposerCacheFiles < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'index_composer_cache_files_where_namespace_id_is_null'
def up
add_concurrent_index :packages_composer_cache_files, :id, name: INDEX_NAME, where: 'namespace_id IS NULL'
end
def down
remove_concurrent_index_by_name :packages_composer_cache_files, INDEX_NAME
end
end
# frozen_string_literal: true
class AddStatusToPackagesPackages < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :packages_packages, :status, :smallint, default: 0, null: false
end
end
# frozen_string_literal: true
class AddGroupIdToCiDailyBuildGroupReportResults < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column(:ci_daily_build_group_report_results, :group_id, :bigint)
end
end
# frozen_string_literal: true
class CreateCiProjectMonthlyUsage < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
create_table :ci_project_monthly_usages, if_not_exists: true do |t|
t.references :project, foreign_key: { on_delete: :cascade }, index: false, null: false
t.date :date, null: false
t.decimal :amount_used, null: false, default: 0.0, precision: 18, scale: 2
t.index [:project_id, :date], unique: true
end
end
add_check_constraint :ci_project_monthly_usages, "(date = date_trunc('month', date))", 'ci_project_monthly_usages_year_month_constraint'
end
def down
with_lock_retries do
drop_table :ci_project_monthly_usages
end
end
end
# frozen_string_literal: true
class AddCreatorIdToCustomEmoji < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
# Custom Emoji is at the moment behind a default-disabled feature flag. It
# will be unlikely there are any records in this table, but to able to
# ensure a not-null constraint delete any existing rows.
# Roll-out issue: https://gitlab.com/gitlab-org/gitlab/-/issues/231317
execute 'DELETE FROM custom_emoji'
add_reference :custom_emoji, # rubocop:disable Migration/AddReference
:creator,
index: true,
null: false, # rubocop:disable Rails/NotNullColumn
foreign_key: false # FK is added in 20210219100137
end
def down
remove_reference :custom_emoji, :creator
end
end
# frozen_string_literal: true
class RemoveNamespaceIdForeignKeyOnNamespaceOnboardingActions < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
remove_foreign_key :namespace_onboarding_actions, :namespaces
end
end
def down
with_lock_retries do
add_foreign_key :namespace_onboarding_actions, :namespaces, on_delete: :cascade
end
end
end
# frozen_string_literal: true
class RemoveForeignKeysFromAlertsServiceData < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
remove_foreign_key_if_exists :alerts_service_data, column: :service_id
end
end
def down
with_lock_retries do
add_foreign_key :alerts_service_data, :services, column: :service_id, on_delete: :cascade
end
end
end
# frozen_string_literal: true
class AddIssueCreatedAtToOnboardingProgress < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :onboarding_progresses, :issue_created_at, :datetime_with_timezone
end
end
# frozen_string_literal: true
class AddStatusExpiresAtToUserStatuses < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column(:user_statuses, :clear_status_at, :datetime_with_timezone, null: true)
end
end
def down
with_lock_retries do
remove_column(:user_statuses, :clear_status_at)
end
end
end
# frozen_string_literal: true
class AddIndexOnUserStatusesStatusExpiresAt < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_user_statuses_on_clear_status_at_not_null'
disable_ddl_transaction!
def up
add_concurrent_index(:user_statuses, :clear_status_at, name: INDEX_NAME, where: 'clear_status_at IS NOT NULL')
end
def down
remove_concurrent_index_by_name(:user_statuses, INDEX_NAME)
end
end
# frozen_string_literal: true
class AddIndexGroupIdToCiDailyBuildGroupReportResults < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_ci_daily_build_group_report_results_on_group_id'
disable_ddl_transaction!
def up
add_concurrent_index(:ci_daily_build_group_report_results, :group_id, name: INDEX_NAME)
add_concurrent_foreign_key(:ci_daily_build_group_report_results, :namespaces, column: :group_id)
end
def down
remove_foreign_key_if_exists(:ci_daily_build_group_report_results, column: :group_id)
remove_concurrent_index_by_name(:ci_daily_build_group_report_results, INDEX_NAME)
end
end
# frozen_string_literal: true
class AddNotesCreateLimitToApplicationSettings < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :application_settings, :notes_create_limit, :integer, default: 300, null: false
end
end
# frozen_string_literal: true
class AddEndsAtToOncallRotations < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :incident_management_oncall_rotations, :ends_at, :datetime_with_timezone
end
end
# frozen_string_literal: true
class CreateExternalApprovalRules < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
def up
create_table_with_constraints :external_approval_rules, if_not_exists: true do |t|
t.references :project, foreign_key: { on_delete: :cascade }, null: false, index: false
t.timestamps_with_timezone
t.text :external_url, null: false
t.text_limit :external_url, 255
t.text :name, null: false
t.text_limit :name, 255
t.index([:project_id, :name],
unique: true,
name: 'idx_on_external_approval_rules_project_id_name')
t.index([:project_id, :external_url],
unique: true,
name: 'idx_on_external_approval_rules_project_id_external_url')
end
create_table :external_approval_rules_protected_branches do |t|
t.bigint :external_approval_rule_id, null: false, index: { name: 'idx_eaprpb_external_approval_rule_id' }
t.bigint :protected_branch_id, null: false
t.index([:protected_branch_id, :external_approval_rule_id],
unique: true,
name: 'idx_protected_branch_id_external_approval_rule_id')
end
end
def down
with_lock_retries do
drop_table :external_approval_rules_protected_branches, force: :cascade, if_exists: true
end
with_lock_retries do
drop_table :external_approval_rules, force: :cascade, if_exists: true
end
end
end
# frozen_string_literal: true
class CreateSecurityOrchestrationPolicyConfigurations < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_PREFIX = 'index_sop_configs_'
def up
table_comment = { owner: 'group::container security', description: 'Configuration used to store relationship between project and security policy repository' }
create_table_with_constraints :security_orchestration_policy_configurations, comment: table_comment.to_json do |t|
t.references :project, null: false, foreign_key: { to_table: :projects, on_delete: :cascade }, index: { name: INDEX_PREFIX + 'on_project_id', unique: true }
t.references :security_policy_management_project, null: false, foreign_key: { to_table: :projects, on_delete: :restrict }, index: { name: INDEX_PREFIX + 'on_security_policy_management_project_id', unique: true }
t.timestamps_with_timezone
end
end
def down
with_lock_retries do
drop_table :security_orchestration_policy_configurations, force: :cascade
end
end
end
# frozen_string_literal: true
class AddStatusIndexToPackagesPackages < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_NAME = 'index_packages_packages_on_project_id_and_status'
def up
add_concurrent_index :packages_packages, [:project_id, :status], name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :packages_packages, name: INDEX_NAME
end
end
# frozen_string_literal: true
class AddMarkdownSurroundSelectionToUserPreferences < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
add_column :user_preferences, :markdown_surround_selection, :boolean, default: true, null: false
end
end
def down
with_lock_retries do
remove_column :user_preferences, :markdown_surround_selection, :boolean
end
end
end
# frozen_string_literal: true
class AddNotesCreateLimitAllowlistToApplicationSettings < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :application_settings, :notes_create_limit_allowlist, :text, array: true, default: [], null: false
end
end
# frozen_string_literal: true
class AddCreatedByUserForClusterAgentToken < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_cluster_agent_tokens_on_created_by_user_id'
disable_ddl_transaction!
def up
unless column_exists?(:cluster_agent_tokens, :created_by_user_id)
add_column :cluster_agent_tokens, :created_by_user_id, :bigint
end
add_concurrent_index :cluster_agent_tokens, :created_by_user_id, name: INDEX_NAME
add_concurrent_foreign_key :cluster_agent_tokens, :users, column: :created_by_user_id, on_delete: :nullify
end
def down
with_lock_retries do
remove_foreign_key_if_exists :cluster_agent_tokens, :users, column: :created_by_user_id
end
remove_concurrent_index_by_name :cluster_agent_tokens, INDEX_NAME
remove_column :cluster_agent_tokens, :created_by_user_id
end
end
# frozen_string_literal: true
class MakeTheGeoOauthApplicationTrustedByDefault < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
execute(<<-SQL.squish)
UPDATE oauth_applications
SET confidential = true, trusted = true
WHERE id IN (SELECT oauth_application_id FROM geo_nodes);
SQL
end
def down
# We won't be able to tell which trusted applications weren't
# confidential before the migration and setting all trusted
# applications are not confidential would introduce security
# issues.
end
end
# frozen_string_literal: true
# See https://docs.gitlab.com/ee/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddMergeWhenPipelineSucceedsToNotificationSettings < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :notification_settings, :merge_when_pipeline_succeeds, :boolean, default: false, null: false
end
end
# frozen_string_literal: true
class AddDelayedProjectRemovalToNamespaceSettings < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :namespace_settings, :delayed_project_removal, :boolean, default: false, null: false
end
end
# frozen_string_literal: true
class AddIndexToNamespacesDelayedProjectRemoval < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'tmp_idx_on_namespaces_delayed_project_removal'
disable_ddl_transaction!
def up
add_concurrent_index :namespaces, :id, name: INDEX_NAME, where: 'delayed_project_removal = TRUE'
end
def down
remove_concurrent_index_by_name :namespaces, INDEX_NAME
end
end
# frozen_string_literal: true
class RemoveArtifactExpiryTempIndex < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
INDEX_NAME = 'expired_artifacts_temp_index'
INDEX_CONDITION = "expire_at IS NULL AND date(created_at AT TIME ZONE 'UTC') < '2020-06-22'::date"
def up
remove_concurrent_index_by_name :ci_job_artifacts, INDEX_NAME
end
def down
add_concurrent_index(:ci_job_artifacts, %i(id created_at), where: INDEX_CONDITION, name: INDEX_NAME)
end
end
# frozen_string_literal: true
class AddInProductMarketingEmailsEnabledSetting < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :application_settings, :in_product_marketing_emails_enabled, :boolean, null: false, default: true
end
end
# frozen_string_literal: true
class AddCreatedByToClusterAgent < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_cluster_agents_on_created_by_user_id'
disable_ddl_transaction!
def up
unless column_exists?(:cluster_agents, :created_by_user_id)
with_lock_retries do
add_column :cluster_agents, :created_by_user_id, :bigint
end
end
add_concurrent_index :cluster_agents, :created_by_user_id, name: INDEX_NAME
add_concurrent_foreign_key :cluster_agents, :users, column: :created_by_user_id, on_delete: :nullify
end
def down
with_lock_retries do
remove_column :cluster_agents, :created_by_user_id
end
end
end
# frozen_string_literal: true
class AddDescriptionToClusterToken < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
unless column_exists?(:cluster_agent_tokens, :description)
add_column :cluster_agent_tokens, :description, :text
end
add_text_limit :cluster_agent_tokens, :description, 1024
end
def down
remove_column :cluster_agent_tokens, :description
end
end
# frozen_string_literal: true
class RemoveIndexOnIssuesWhereServiceDeskReplyToIsNotNull < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_TABLE = :issues
INDEX_NAME = 'idx_on_issues_where_service_desk_reply_to_is_not_null'
def up
Gitlab::BackgroundMigration.steal('PopulateIssueEmailParticipants')
remove_concurrent_index_by_name INDEX_TABLE, INDEX_NAME
end
def down
add_concurrent_index(INDEX_TABLE, [:id], name: INDEX_NAME, where: 'service_desk_reply_to IS NOT NULL')
end
end
# frozen_string_literal: true
class CreateEpicListUserPreferences < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
create_table :boards_epic_list_user_preferences do |t|
t.bigint :user_id, null: false
t.bigint :epic_list_id, index: true, null: false
t.timestamps_with_timezone null: false
t.boolean :collapsed, null: false, default: false
end
add_index :boards_epic_list_user_preferences, [:user_id, :epic_list_id], unique: true, name: 'index_epic_board_list_preferences_on_user_and_list'
end
def down
drop_table :boards_epic_list_user_preferences
end
end
# frozen_string_literal: true
class AddEnvironmentScopeToGroupVariables < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
OLD_INDEX = 'index_ci_group_variables_on_group_id_and_key'
NEW_INDEX = 'index_ci_group_variables_on_group_id_and_key_and_environment'
disable_ddl_transaction!
def up
unless column_exists?(:ci_group_variables, :environment_scope)
# rubocop:disable Migration/AddLimitToTextColumns
# Added in 20210305013509_add_text_limit_to_group_ci_variables_environment_scope
add_column :ci_group_variables, :environment_scope, :text, null: false, default: '*'
# rubocop:enable Migration/AddLimitToTextColumns
end
add_concurrent_index :ci_group_variables, [:group_id, :key, :environment_scope], unique: true, name: NEW_INDEX
remove_concurrent_index_by_name :ci_group_variables, OLD_INDEX
end
def down
remove_duplicates!
add_concurrent_index :ci_group_variables, [:group_id, :key], unique: true, name: OLD_INDEX
remove_concurrent_index_by_name :ci_group_variables, NEW_INDEX
remove_column :ci_group_variables, :environment_scope
end
private
def remove_duplicates!
execute <<-SQL
DELETE FROM ci_group_variables
WHERE id NOT IN (
SELECT MIN(id)
FROM ci_group_variables
GROUP BY group_id, key
)
SQL
end
end
# frozen_string_literal: true
class ChangeFindingFingerprintEnum < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
change_column :vulnerability_finding_fingerprints, :algorithm_type, :integer, limit: 2
end
def down
change_column :vulnerability_finding_fingerprints, :algorithm_type, :integer
end
end
# frozen_string_literal: true
class AddSprintsStartDateNotNullCheckConstraint < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_not_null_constraint(:sprints, :start_date, validate: false)
end
def down
remove_not_null_constraint(:sprints, :start_date)
end
end
# frozen_string_literal: true
class AddSprintsDueDateNotNullCheckConstraint < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_not_null_constraint(:sprints, :due_date, validate: false)
end
def down
remove_not_null_constraint(:sprints, :due_date)
end
end
# frozen_string_literal: true
class AddCreatorForeignKeyToCustomEmoji < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
FK_NAME = 'fk_custom_emoji_creator_id'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :custom_emoji, :users,
on_delete: :cascade,
column: :creator_id,
name: FK_NAME
end
def down
with_lock_retries do
remove_foreign_key :custom_emoji, name: FK_NAME
end
end
end
# frozen_string_literal: true
class AddEpicIssueCompositeIndex < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_epic_issues_on_epic_id_and_issue_id'
disable_ddl_transaction!
def up
add_concurrent_index :epic_issues, [:epic_id, :issue_id], name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :epic_issues, INDEX_NAME
end
end
# frozen_string_literal: true
class AddVersionUsageDataIdToRawUsageData < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :raw_usage_data, :version_usage_data_id_value, :bigint
end
end
# frozen_string_literal: true
class AddIsRemovedToOncallParticipant < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :incident_management_oncall_participants, :is_removed, :boolean, default: false, null: false
end
end
def down
with_lock_retries do
remove_column :incident_management_oncall_participants, :is_removed
end
end
end
# frozen_string_literal: true
class AddIsRemovedIndexToOncallParticipant < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
EXISTING_INDEX_NAME = 'index_inc_mgmnt_oncall_participants_on_oncall_rotation_id'
NEW_INDEX_NAME = 'index_inc_mgmnt_oncall_pcpnt_on_oncall_rotation_id_is_removed'
def up
add_concurrent_index :incident_management_oncall_participants, [:oncall_rotation_id, :is_removed], name: NEW_INDEX_NAME
remove_concurrent_index_by_name(:incident_management_oncall_participants, EXISTING_INDEX_NAME)
end
def down
add_concurrent_index :incident_management_oncall_participants, :oncall_rotation_id, name: EXISTING_INDEX_NAME
remove_concurrent_index_by_name(:incident_management_oncall_participants, NEW_INDEX_NAME)
end
end
# frozen_string_literal: true
class AddStorageSizeToNamespaceStatistics < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :namespace_statistics, :storage_size, :bigint, default: 0, null: false
end
end
def down
with_lock_retries do
remove_column :namespace_statistics, :storage_size
end
end
end
# frozen_string_literal: true
class AddWikiSizeToNamespaceStatistics < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :namespace_statistics, :wiki_size, :bigint, default: 0, null: false
end
end
def down
with_lock_retries do
remove_column :namespace_statistics, :wiki_size
end
end
end
# frozen_string_literal: true
class AddEpicBoardUserPreferenceUserFk < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :boards_epic_list_user_preferences, :users, column: :user_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key_if_exists :boards_epic_list_user_preferences, :users
end
end
end
# frozen_string_literal: true
class AddEpicBoardUserPreferenceEpicListFk < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :boards_epic_list_user_preferences, :boards_epic_lists, column: :epic_list_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key_if_exists :boards_epic_list_user_preferences, :boards_epic_lists
end
end
end
# frozen_string_literal: true
class AddContainerRegistryAccessLevel < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column(
:project_features,
:container_registry_access_level,
:integer,
default: 0, # ProjectFeature::DISABLED value
null: false
)
end
end
def down
with_lock_retries do
remove_column :project_features, :container_registry_access_level
end
end
end
# frozen_string_literal: true
class AddBranchNameToDastProfile < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
add_column :dast_profiles, :branch_name, :text
end
add_text_limit :dast_profiles, :branch_name, 255
end
def down
with_lock_retries do
remove_column :dast_profiles, :branch_name
end
end
end
# frozen_string_literal: true
class AddForeignKeyToExternalApprovalRules < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :external_approval_rules_protected_branches, :external_approval_rules, column: :external_approval_rule_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :external_approval_rules_protected_branches, column: :external_approval_rule_id
end
end
end
# frozen_string_literal: true
class AddForeignKeyToExternalApprovalRulesProtectedBranches < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :external_approval_rules_protected_branches, :protected_branches, column: :protected_branch_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :external_approval_rules_protected_branches, column: :protected_branch_id
end
end
end
# frozen_string_literal: true
class UpdateRubygemsMetadataMetadata < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
remove_text_limit :packages_rubygems_metadata, :metadata
add_text_limit :packages_rubygems_metadata, :metadata, 30000
end
def down
remove_text_limit :packages_rubygems_metadata, :metadata
add_text_limit :packages_rubygems_metadata, :metadata, 255, validate: false
end
end
# frozen_string_literal: true
class AddNullConstraintToTerraformStateName < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
change_column_null :terraform_states, :name, false
end
end
# frozen_string_literal: true
class AddNameFieldToClusterAgentToken < ActiveRecord::Migration[6.0]
DOWNTIME = false
# rubocop:disable Migration/AddLimitToTextColumns
# limit is added in LimitClusterTokenSize
def change
add_column :cluster_agent_tokens, :name, :text
end
# rubocop:enable Migration/AddLimitToTextColumns
end
# frozen_string_literal: true
class AddJiraIssueTransitionAutomaticToJiraTrackerData < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :jira_tracker_data, :jira_issue_transition_automatic, :boolean, null: false, default: false
end
end
# frozen_string_literal: true
class CreateDoraDailyMetrics < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
create_table :dora_daily_metrics, if_not_exists: true do |t|
t.references :environment, null: false, foreign_key: { on_delete: :cascade }, index: false
t.date :date, null: false
t.integer :deployment_frequency
t.integer :lead_time_for_changes_in_seconds
t.index [:environment_id, :date], unique: true
end
end
add_check_constraint :dora_daily_metrics, "deployment_frequency >= 0", 'dora_daily_metrics_deployment_frequency_positive'
add_check_constraint :dora_daily_metrics, "lead_time_for_changes_in_seconds >= 0", 'dora_daily_metrics_lead_time_for_changes_in_seconds_positive'
end
def down
with_lock_retries do
drop_table :dora_daily_metrics
end
end
end
# frozen_string_literal: true
class LimitClusterTokenSize < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_text_limit :cluster_agent_tokens, :name, 255
end
def down
remove_text_limit :cluster_agent_tokens, :name
end
end
# frozen_string_literal: true
class AddAllowForcePushToProtectedBranches < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :protected_branches, :allow_force_push, :boolean, default: false, null: false
end
end
def down
with_lock_retries do
remove_column :protected_branches, :allow_force_push
end
end
end
# frozen_string_literal: true
class AddTierToEnvironments < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :environments, :tier, :smallint
end
end
def down
with_lock_retries do
remove_column :environments, :tier
end
end
end
# frozen_string_literal: true
class AddOptionalToCiBuildNeeds < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :ci_build_needs, :optional, :boolean, default: false, null: false
end
end
def down
with_lock_retries do
remove_column :ci_build_needs, :optional
end
end
end
# frozen_string_literal: true
class RenameAssetProxyAllowlistOnApplicationSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers::V2
DOWNTIME = false
disable_ddl_transaction!
def up
cleanup_concurrent_column_rename :application_settings,
:asset_proxy_whitelist,
:asset_proxy_allowlist
rename_column_concurrently :application_settings,
:asset_proxy_allowlist,
:asset_proxy_whitelist
end
def down
undo_rename_column_concurrently :application_settings,
:asset_proxy_allowlist,
:asset_proxy_whitelist
undo_cleanup_concurrent_column_rename :application_settings,
:asset_proxy_whitelist,
:asset_proxy_allowlist
end
end
# frozen_string_literal: true
class SchedulePopulateFindingUuidForVulnerabilityFeedback < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION_CLASS = 'PopulateFindingUuidForVulnerabilityFeedback'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
disable_ddl_transaction!
def up
queue_background_migration_jobs_by_range_at_intervals(
Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityFeedback::VulnerabilityFeedback,
MIGRATION_CLASS,
DELAY_INTERVAL,
batch_size: BATCH_SIZE
)
end
def down
# no-op
end
end
# frozen_string_literal: true
class AddDevopsAdoptionSnapshotNotNull < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
execute(
<<~SQL
LOCK TABLE analytics_devops_adoption_snapshots IN ACCESS EXCLUSIVE MODE;
UPDATE analytics_devops_adoption_snapshots SET end_time = date_trunc('month', recorded_at) - interval '1 millisecond';
ALTER TABLE analytics_devops_adoption_snapshots ALTER COLUMN end_time SET NOT NULL;
SQL
)
end
end
def down
with_lock_retries do
execute(<<~SQL)
ALTER TABLE analytics_devops_adoption_snapshots ALTER COLUMN end_time DROP NOT NULL;
SQL
end
end
end
# frozen_string_literal: true
class ChangePagesDeploymentSizeToBigintCleanup < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
cleanup_concurrent_column_type_change :pages_deployments, :size
end
def down
undo_cleanup_concurrent_column_type_change :pages_deployments, :size, :integer, limit: 4
end
end
# frozen_string_literal: true
class ReindexCiPipelinesOnScheduleIdAndId < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
OLD_INDEX_NAME = 'index_ci_pipelines_on_pipeline_schedule_id'
NEW_INDEX_NAME = 'index_ci_pipelines_on_pipeline_schedule_id_and_id'
disable_ddl_transaction!
def up
add_concurrent_index :ci_pipelines, [:pipeline_schedule_id, :id], name: NEW_INDEX_NAME
remove_concurrent_index_by_name :ci_pipelines, OLD_INDEX_NAME
end
def down
add_concurrent_index :ci_pipelines, :pipeline_schedule_id, name: OLD_INDEX_NAME
remove_concurrent_index_by_name :ci_pipelines, NEW_INDEX_NAME
end
end
# frozen_string_literal: true
class ScheduleSetDefaultIterationCadences < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 1_000
DELAY_INTERVAL = 2.minutes.to_i
MIGRATION_CLASS = 'SetDefaultIterationCadences'
class Iteration < ActiveRecord::Base # rubocop:disable Style/Documentation
include EachBatch
self.table_name = 'sprints'
end
disable_ddl_transaction!
def up
# Do nothing, rescheduling migration: 20210219102900_reschedule_set_default_iteration_cadences.rb
end
def down
# Not needed
end
end
# frozen_string_literal: true
class CleanupProjectsWithBadHasExternalWikiData < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
TMP_INDEX_NAME = 'tmp_index_projects_on_id_where_has_external_wiki_is_true'
BATCH_SIZE = 100
disable_ddl_transaction!
class Service < ActiveRecord::Base
include EachBatch
belongs_to :project
self.table_name = 'services'
self.inheritance_column = :_type_disabled
end
class Project < ActiveRecord::Base
include EachBatch
self.table_name = 'projects'
end
def up
update_projects_with_active_external_wikis
update_projects_without_active_external_wikis
end
def down
# no-op : can't go back to incorrect data
end
private
def update_projects_with_active_external_wikis
# 11 projects are scoped in this query on GitLab.com.
scope = Service.where(active: true, type: 'ExternalWikiService').where.not(project_id: nil)
scope.each_batch(of: BATCH_SIZE) do |relation|
scope_with_projects = relation
.joins(:project)
.select('project_id')
.merge(Project.where(has_external_wiki: false).where(pending_delete: false).where(archived: false))
execute(<<~SQL)
WITH project_ids_to_update (id) AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
#{scope_with_projects.to_sql}
)
UPDATE projects SET has_external_wiki = true WHERE id IN (SELECT id FROM project_ids_to_update)
SQL
end
end
def update_projects_without_active_external_wikis
# Add a temporary index to speed up the scoping of projects.
index_where = <<~SQL
(
"projects"."has_external_wiki" = TRUE
)
AND "projects"."pending_delete" = FALSE
AND "projects"."archived" = FALSE
SQL
add_concurrent_index(:projects, :id, where: index_where, name: TMP_INDEX_NAME)
services_sub_query = Service
.select('1')
.where('services.project_id = projects.id')
.where(type: 'ExternalWikiService')
.where(active: true)
# 322 projects are scoped in this query on GitLab.com.
Project.where(index_where).each_batch(of: BATCH_SIZE) do |relation|
relation_with_exists_query = relation.where('NOT EXISTS (?)', services_sub_query)
execute(<<~SQL)
WITH project_ids_to_update (id) AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
#{relation_with_exists_query.select(:id).to_sql}
)
UPDATE projects SET has_external_wiki = false WHERE id IN (SELECT id FROM project_ids_to_update)
SQL
end
# Drop the temporary index.
remove_concurrent_index_by_name(:projects, TMP_INDEX_NAME)
end
end
# frozen_string_literal: true
class CleanUpAssetProxyWhitelistRenameOnApplicationSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers::V2
DOWNTIME = false
disable_ddl_transaction!
def up
# This migration has been made a no-op in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56352
# because to revert the rename in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/55419 we need
# to cleanup the triggers on the `asset_proxy_allowlist` column. As such, this migration would do nothing.
end
def down
# no-op
end
end
# frozen_string_literal: true
class DeleteColumnGroupIdOnComplianceFramework < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
remove_column :compliance_management_frameworks, :group_id, :bigint
end
end
# frozen_string_literal: true
class RemoveAlertsServiceRecords < ActiveRecord::Migration[6.0]
DOWNTIME = false
disable_ddl_transaction!
class Service < ActiveRecord::Base
self.table_name = 'services'
end
def up
Service.delete_by(type: 'AlertsService')
end
def down
# no-op
end
end
# frozen_string_literal: true
class ScheduleUuidPopulationForSecurityFindings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION_CLASS = 'PopulateUuidsForSecurityFindings'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 25
disable_ddl_transaction!
def up
# no-op, replaced by 20210111075206_schedule_uuid_population_for_security_findings2.rb
end
def down
# no-op
end
end
# frozen_string_literal: true
# This replaces the previous post-deployment migration 20210111075105_schedule_uuid_population_for_security_findings.rb,
# we have to run this again due to a bug in how we were receiving the arguments in the background migration.
class ScheduleUuidPopulationForSecurityFindings2 < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION_CLASS = 'PopulateUuidsForSecurityFindings'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 25
disable_ddl_transaction!
def up
::Gitlab::BackgroundMigration.steal(MIGRATION_CLASS) do |job|
job.delete
false
end
Gitlab::BackgroundMigration::PopulateUuidsForSecurityFindings.security_findings.each_batch(column: :scan_id, of: BATCH_SIZE) do |batch, index|
migrate_in(
DELAY_INTERVAL * index,
MIGRATION_CLASS,
batch.pluck(:scan_id)
)
end
end
def down
# no-op
end
end
# frozen_string_literal: true
# This replaces the previous post-deployment migration 20201207165956_remove_duplicate_services_spec.rb,
# we have to run this again due to a bug in how we were receiving the arguments in the background migration.
class RemoveDuplicateServices2 < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INTERVAL = 2.minutes
BATCH_SIZE = 5_000
MIGRATION = 'RemoveDuplicateServices'
disable_ddl_transaction!
def up
project_ids_with_duplicates = Gitlab::BackgroundMigration::RemoveDuplicateServices::Service.project_ids_with_duplicates
project_ids_with_duplicates.each_batch(of: BATCH_SIZE, column: :project_id) do |batch, index|
migrate_in(
INTERVAL * index,
MIGRATION,
batch.pluck(:project_id)
)
end
end
def down
end
end
# frozen_string_literal: true
class CancelArtifactExpiryBackfill < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION = 'BackfillArtifactExpiryDate'
disable_ddl_transaction!
def up
Gitlab::BackgroundMigration.steal(MIGRATION) do |job|
job.delete
false
end
end
def down
# no-op
end
end
# frozen_string_literal: true
class ScheduleArtifactExpiryBackfill < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION = 'BackfillArtifactExpiryDate'
SWITCH_DATE = Date.new(2020, 06, 22).freeze
INDEX_NAME = 'expired_artifacts_temp_index'
OLD_INDEX_CONDITION = "expire_at IS NULL AND created_at < '#{SWITCH_DATE}'"
INDEX_CONDITION = "expire_at IS NULL AND date(created_at AT TIME ZONE 'UTC') < '2020-06-22'::date"
disable_ddl_transaction!
class JobArtifact < ActiveRecord::Base
include EachBatch
self.table_name = 'ci_job_artifacts'
scope :without_expiry_date, -> { where(expire_at: nil) }
scope :before_switch, -> { where("date(created_at AT TIME ZONE 'UTC') < ?::date", SWITCH_DATE) }
end
def up
# Create temporary index for expired artifacts
# Needs to be removed in a later migration
remove_concurrent_index_by_name :ci_job_artifacts, INDEX_NAME
add_concurrent_index(:ci_job_artifacts, %i(id created_at), where: INDEX_CONDITION, name: INDEX_NAME)
queue_background_migration_jobs_by_range_at_intervals(
JobArtifact.without_expiry_date.before_switch,
MIGRATION,
2.minutes,
batch_size: 200_000
)
end
def down
remove_concurrent_index_by_name :ci_job_artifacts, INDEX_NAME
add_concurrent_index(:ci_job_artifacts, %i(id created_at), where: OLD_INDEX_CONDITION, name: INDEX_NAME)
Gitlab::BackgroundMigration.steal(MIGRATION) do |job|
job.delete
false
end
end
end
# frozen_string_literal: true
class AlterVsaIssueFirstMentionedInCommitValue < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
ISSUE_FIRST_MENTIONED_IN_COMMIT_FOSS = 2
ISSUE_FIRST_MENTIONED_IN_COMMIT_EE = 6
class GroupStage < ActiveRecord::Base
self.table_name = 'analytics_cycle_analytics_group_stages'
include EachBatch
end
def up
GroupStage.each_batch(of: 100) do |relation|
relation
.where(start_event_identifier: ISSUE_FIRST_MENTIONED_IN_COMMIT_EE)
.update_all(start_event_identifier: ISSUE_FIRST_MENTIONED_IN_COMMIT_FOSS)
relation
.where(end_event_identifier: ISSUE_FIRST_MENTIONED_IN_COMMIT_EE)
.update_all(end_event_identifier: ISSUE_FIRST_MENTIONED_IN_COMMIT_FOSS)
end
end
def down
# rollback is not needed, the identifier "6" is the same as identifier "2" on the application level
end
end
# frozen_string_literal: true
# Data migration to migrate multi-selection segments into separate segments.
# Both tables involved are pretty-low traffic and the number
# of records in DB cannot exceed 400
class MigrateExistingDevopsSegmentsToGroups < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
Gitlab::BackgroundMigration::MigrateDevopsSegmentsToGroups.new.perform
end
def down
end
end
# frozen_string_literal: true
class AddNewPostEoaPlans < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
execute "INSERT INTO plans (name, title, created_at, updated_at) VALUES ('premium', 'Premium (Formerly Silver)', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)"
execute "INSERT INTO plans (name, title, created_at, updated_at) VALUES ('ultimate', 'Ultimate (Formerly Gold)', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)"
end
def down
execute "DELETE FROM plans WHERE name IN ('premium', 'ultimate')"
end
end
# frozen_string_literal: true
class RemoveNamespaceOnboardingActionsTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
drop_table :namespace_onboarding_actions
end
end
def down
with_lock_retries do
create_table :namespace_onboarding_actions do |t|
t.references :namespace, index: true, null: false
t.datetime_with_timezone :created_at, null: false
t.integer :action, limit: 2, null: false
end
end
end
end
# frozen_string_literal: true
class RemoveBadDependencyProxyManifests < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
# We run destroy on each record because we need the callback to remove
# the underlying files
DependencyProxy::Manifest.where.not(content_type: nil).destroy_all # rubocop:disable Cop/DestroyAll
end
def down
# no op
end
end
# frozen_string_literal: true
class DropAlertsServiceData < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
drop_table :alerts_service_data
end
end
# rubocop:disable Migration/PreventStrings
def down
with_lock_retries do
create_table :alerts_service_data do |t|
t.bigint :service_id, null: false
t.timestamps_with_timezone
t.string :encrypted_token, limit: 255
t.string :encrypted_token_iv, limit: 255
end
end
end
# rubocop:enable Migration/PreventStrings
end
# frozen_string_literal: true
class RemoveAlertsServiceRecordsAgain < ActiveRecord::Migration[6.0]
DOWNTIME = false
disable_ddl_transaction!
class Service < ActiveRecord::Base
self.table_name = 'services'
end
def up
Service.delete_by(type: 'AlertsService')
end
def down
# no-op
end
end
# frozen_string_literal: true
class BackfillUpdatedAtAfterRepositoryStorageMove < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 10_000
INTERVAL = 2.minutes
MIGRATION_CLASS = 'BackfillProjectUpdatedAtAfterRepositoryStorageMove'
disable_ddl_transaction!
class RepositoryStorageMove < ActiveRecord::Base
include EachBatch
self.table_name = 'project_repository_storage_moves'
end
def up
RepositoryStorageMove.reset_column_information
RepositoryStorageMove.select(:project_id).distinct.each_batch(of: BATCH_SIZE, column: :project_id) do |batch, index|
migrate_in(
INTERVAL * index,
MIGRATION_CLASS,
batch.pluck(:project_id)
)
end
end
def down
# No-op
end
end
# frozen_string_literal: true
class CleanupProjectsWithBadHasExternalIssueTrackerData < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
TMP_INDEX_NAME = 'tmp_idx_projects_on_id_where_has_external_issue_tracker_is_true'
BATCH_SIZE = 100
disable_ddl_transaction!
class Service < ActiveRecord::Base
include EachBatch
belongs_to :project
self.table_name = 'services'
self.inheritance_column = :_type_disabled
end
class Project < ActiveRecord::Base
include EachBatch
self.table_name = 'projects'
end
def up
update_projects_with_active_external_issue_trackers
update_projects_without_active_external_issue_trackers
end
def down
# no-op : can't go back to incorrect data
end
private
def update_projects_with_active_external_issue_trackers
scope = Service.where(active: true, category: 'issue_tracker').where.not(project_id: nil).distinct(:project_id)
scope.each_batch(of: BATCH_SIZE) do |relation|
scope_with_projects = relation
.joins(:project)
.select('project_id')
.merge(Project.where(has_external_issue_tracker: false).where(pending_delete: false))
execute(<<~SQL)
WITH project_ids_to_update (id) AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
#{scope_with_projects.to_sql}
)
UPDATE projects SET has_external_issue_tracker = true WHERE id IN (SELECT id FROM project_ids_to_update)
SQL
end
end
def update_projects_without_active_external_issue_trackers
# Add a temporary index to speed up the scoping of projects.
index_where = <<~SQL
"projects"."has_external_issue_tracker" = TRUE
AND "projects"."pending_delete" = FALSE
SQL
add_concurrent_index(:projects, :id, where: index_where, name: TMP_INDEX_NAME)
services_sub_query = Service
.select('1')
.where('services.project_id = projects.id')
.where(category: 'issue_tracker')
.where(active: true)
# 322 projects are scoped in this query on GitLab.com.
Project.where(index_where).each_batch(of: BATCH_SIZE) do |relation|
relation_with_exists_query = relation.where('NOT EXISTS (?)', services_sub_query)
execute(<<~SQL)
WITH project_ids_to_update (id) AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
#{relation_with_exists_query.select(:id).to_sql}
)
UPDATE projects SET has_external_issue_tracker = false WHERE id IN (SELECT id FROM project_ids_to_update)
SQL
end
# Drop the temporary index.
remove_concurrent_index_by_name(:projects, TMP_INDEX_NAME)
end
end
# frozen_string_literal: true
class MigrateDelayedProjectRemovalFromNamespacesToNamespaceSettings < ActiveRecord::Migration[6.0]
DOWNTIME = false
class Namespace < ActiveRecord::Base
self.table_name = 'namespaces'
include ::EachBatch
end
def up
Namespace.select(:id).where(delayed_project_removal: true).each_batch do |batch|
values = batch.map { |record| "(#{record.id}, TRUE, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)" }
execute <<-EOF.strip_heredoc
INSERT INTO namespace_settings (namespace_id, delayed_project_removal, created_at, updated_at)
VALUES #{values.join(', ')}
ON CONFLICT (namespace_id) DO UPDATE
SET delayed_project_removal = TRUE
EOF
end
end
def down
# no-op
end
end
# frozen_string_literal: true
class MigrateUsageTrendsSidekiqQueue < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
sidekiq_queue_migrate 'cronjob:analytics_instance_statistics_count_job_trigger', to: 'cronjob:analytics_usage_trends_count_job_trigger'
sidekiq_queue_migrate 'analytics_instance_statistics_counter_job', to: 'analytics_usage_trends_counter_job'
end
def down
sidekiq_queue_migrate 'cronjob:analytics_usage_trends_count_job_trigger', to: 'cronjob:analytics_instance_statistics_count_job_trigger'
sidekiq_queue_migrate 'analytics_usage_trends_counter_job', to: 'analytics_instance_statistics_counter_job'
end
end
# frozen_string_literal: true
class MoveCreateReleaseEvidenceQueueOutOfCronjobNamespace < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
def up
sidekiq_queue_migrate 'cronjob:releases_create_evidence', to: 'releases_create_evidence'
end
def down
sidekiq_queue_migrate 'releases_create_evidence', to: 'cronjob:releases_create_evidence'
end
end
# frozen_string_literal: true
class RemoveDeprecatedCiBuildsColumns < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
remove_column :ci_builds, :artifacts_file
remove_column :ci_builds, :artifacts_file_store
remove_column :ci_builds, :artifacts_metadata
remove_column :ci_builds, :artifacts_metadata_store
remove_column :ci_builds, :artifacts_size
remove_column :ci_builds, :commands
end
end
def down
# rubocop:disable Migration/AddColumnsToWideTables
with_lock_retries do
add_column :ci_builds, :artifacts_file, :text
add_column :ci_builds, :artifacts_file_store, :integer
add_column :ci_builds, :artifacts_metadata, :text
add_column :ci_builds, :artifacts_metadata_store, :integer
add_column :ci_builds, :artifacts_size, :bigint
add_column :ci_builds, :commands, :text
end
# rubocop:enable Migration/AddColumnsToWideTables
add_concurrent_index :ci_builds, :artifacts_expire_at, where: "artifacts_file <> ''::text", name: 'index_ci_builds_on_artifacts_expire_at'
end
end
# frozen_string_literal: true
class RemoveDeprecatedCiRunnerColumn < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
remove_column :ci_runners, :is_shared
end
end
def down
add_column :ci_runners, :is_shared, :boolean, default: false unless column_exists?(:ci_runners, :is_shared)
add_concurrent_index :ci_runners, :is_shared
end
end
# frozen_string_literal: true
class RescheduleSetDefaultIterationCadences < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 1_000
DELAY_INTERVAL = 2.minutes.to_i
MIGRATION_CLASS = 'SetDefaultIterationCadences'
class Iteration < ActiveRecord::Base # rubocop:disable Style/Documentation
include EachBatch
self.table_name = 'sprints'
end
disable_ddl_transaction!
def up
Iteration.select(:group_id).distinct.each_batch(of: BATCH_SIZE, column: :group_id) do |batch, index|
group_ids = batch.pluck(:group_id)
migrate_in(index * DELAY_INTERVAL, MIGRATION_CLASS, group_ids)
end
end
def down
# Not needed
end
end
# frozen_string_literal: true
class RemoveBackupLabelsForeignKeys < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
remove_foreign_key_if_exists(:backup_labels, :projects)
remove_foreign_key_if_exists(:backup_labels, :namespaces)
end
end
def down
add_concurrent_foreign_key(:backup_labels, :projects, column: :project_id, on_delete: :cascade)
add_concurrent_foreign_key(:backup_labels, :namespaces, column: :group_id, on_delete: :cascade)
end
end
# frozen_string_literal: true
class RemoveBackupLabelsTable < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
drop_table :backup_labels
end
def down
create_table :backup_labels, id: false do |t|
t.integer :id, null: false
t.string :title
t.string :color
t.integer :project_id
t.timestamps null: true # rubocop:disable Migration/Timestamps
t.boolean :template, default: false
t.string :description
t.text :description_html
t.string :type
t.integer :group_id
t.integer :cached_markdown_version
t.integer :restore_action
t.string :new_title
end
execute 'ALTER TABLE backup_labels ADD PRIMARY KEY (id)'
add_index :backup_labels, [:group_id, :project_id, :title], name: 'backup_labels_group_id_project_id_title_idx', unique: true
add_index :backup_labels, [:group_id, :title], where: 'project_id = NULL::integer', name: 'backup_labels_group_id_title_idx'
add_index :backup_labels, :project_id, name: 'backup_labels_project_id_idx'
add_index :backup_labels, :template, name: 'backup_labels_template_idx', where: 'template'
add_index :backup_labels, :title, name: 'backup_labels_title_idx'
add_index :backup_labels, [:type, :project_id], name: 'backup_labels_type_project_id_idx'
end
end
# frozen_string_literal: true
class RescheduleArtifactExpiryBackfill < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION = 'BackfillArtifactExpiryDate'
SWITCH_DATE = Date.new(2020, 06, 22).freeze
disable_ddl_transaction!
class JobArtifact < ActiveRecord::Base
include EachBatch
self.inheritance_column = :_type_disabled
self.table_name = 'ci_job_artifacts'
scope :without_expiry_date, -> { where(expire_at: nil) }
scope :before_switch, -> { where("date(created_at AT TIME ZONE 'UTC') < ?::date", SWITCH_DATE) }
end
def up
Gitlab::BackgroundMigration.steal(MIGRATION) do |job|
job.delete
false
end
queue_background_migration_jobs_by_range_at_intervals(
JobArtifact.without_expiry_date.before_switch,
MIGRATION,
2.minutes,
batch_size: 200_000
)
end
def down
Gitlab::BackgroundMigration.steal(MIGRATION) do |job|
job.delete
false
end
end
end
# frozen_string_literal: true
class MoveContainerRegistryEnabledToProjectFeatures < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 50_000
MIGRATION = 'MoveContainerRegistryEnabledToProjectFeature'
disable_ddl_transaction!
class Project < ActiveRecord::Base
include EachBatch
self.table_name = 'projects'
end
def up
# no-op
# Superceded by db/post_migrate/20210401131948_move_container_registry_enabled_to_project_features2.rb
# queue_background_migration_jobs_by_range_at_intervals(Project, MIGRATION, 2.minutes, batch_size: BATCH_SIZE)
end
def down
# no-op
end
end
# frozen_string_literal: true
class DedupIssueMetrics < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
TMP_INDEX_NAME = 'tmp_unique_issue_metrics_by_issue_id'
OLD_INDEX_NAME = 'index_issue_metrics'
INDEX_NAME = 'index_unique_issue_metrics_issue_id'
BATCH_SIZE = 1_000
disable_ddl_transaction!
class IssueMetrics < ActiveRecord::Base
self.table_name = 'issue_metrics'
include EachBatch
end
def up
IssueMetrics.reset_column_information
last_metrics_record_id = IssueMetrics.maximum(:id) || 0
# This index will disallow further duplicates while we're deduplicating the data.
add_concurrent_index(:issue_metrics, :issue_id, where: "id > #{Integer(last_metrics_record_id)}", unique: true, name: TMP_INDEX_NAME)
IssueMetrics.each_batch(of: BATCH_SIZE) do |relation|
duplicated_issue_ids = IssueMetrics
.where(issue_id: relation.select(:issue_id))
.select(:issue_id)
.group(:issue_id)
.having('COUNT(issue_metrics.issue_id) > 1')
.pluck(:issue_id)
duplicated_issue_ids.each do |issue_id|
deduplicate_item(issue_id)
end
end
add_concurrent_index(:issue_metrics, :issue_id, unique: true, name: INDEX_NAME)
remove_concurrent_index_by_name(:issue_metrics, TMP_INDEX_NAME)
remove_concurrent_index_by_name(:issue_metrics, OLD_INDEX_NAME)
end
def down
add_concurrent_index(:issue_metrics, :issue_id, name: OLD_INDEX_NAME)
remove_concurrent_index_by_name(:issue_metrics, TMP_INDEX_NAME)
remove_concurrent_index_by_name(:issue_metrics, INDEX_NAME)
end
private
def deduplicate_item(issue_id)
issue_metrics_records = IssueMetrics.where(issue_id: issue_id).order(updated_at: :asc).to_a
attributes = {}
issue_metrics_records.each do |issue_metrics_record|
params = issue_metrics_record.attributes.except('id')
attributes.merge!(params.compact)
end
ActiveRecord::Base.transaction do
record_to_keep = issue_metrics_records.pop
records_to_delete = issue_metrics_records
IssueMetrics.where(id: records_to_delete.map(&:id)).delete_all
record_to_keep.update!(attributes)
end
end
end
# frozen_string_literal: true
class CleanUpAssetProxyAllowlistRenameOnApplicationSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers::V2
DOWNTIME = false
disable_ddl_transaction!
def up
cleanup_concurrent_column_rename :application_settings,
:asset_proxy_allowlist,
:asset_proxy_whitelist
end
def down
undo_cleanup_concurrent_column_rename :application_settings,
:asset_proxy_allowlist,
:asset_proxy_whitelist
end
end
789496127df650700b043e55cf1198f778d4f290f320c629905e242f2d0b0664
\ No newline at end of file
5117b71950bec3c6c746eaf4851c111a335bf2280075ddc2c73b60a30e23d907
\ No newline at end of file
ecf6b392f35bb0ef905144a4605bcb927ce767240e47ec3b0653a94139b987bd
\ No newline at end of file
58f2bd74adf8b4ef616c8f341053dbeaa8116430a0f4493cbf5f8456d7f4b907
\ No newline at end of file
db23b5315386ad5d5fec5a14958769cc1e62a0a89ec3246edb9fc024607e917b
\ No newline at end of file
e991bf621a2eb047903f796256ee65b781e5dd34aff12449f2347480bf7791a7
\ No newline at end of file
e3eb306d7956e396f038f07a20c674929fc4aae3e188b24e1087305b3fea8b4d
\ No newline at end of file
59906a3528499dd9c0b4c30088539299c08383ad3b36be8c862a7cc9ef1d50b2
\ No newline at end of file
9c2f6c75126172d4876db33cfd814f974a381df97a23aec21f2550ec288946c2
\ No newline at end of file
adce3c714064991e93f8a587da3d5892c47dbc14963fa49638ebbbf8b5489359
\ No newline at end of file
8aa288d8f4a02030528e096c3aa4e109c57f4ca2515442ca0bfb3463cf9ff609
\ No newline at end of file
916f29e6ab89551fd785c3a8584c24b72d9002ada30d159e9ff826cb247199b5
\ No newline at end of file
b54cf8b75c5882f2dfca74b218a9eeac766ff65233d43de865717d3ab8c7a217
\ No newline at end of file
f1c6927431895c6ce03fe7e0be30fcd0a1f4ccfeac8277ee0662d7434b97d257
\ No newline at end of file
c878874bbb9bf2314b90c1328d6e27846fe71513ba1ce688a262d36761b66665
\ No newline at end of file
3647e8b944aedeb58c41d9b3f08c8fb657fab231b0ff25c276f6d2aaa2ea93ae
\ No newline at end of file
59e40a24e8422e64bc85c4d54e6da923512017bac6a167350affeff241046e9f
\ No newline at end of file
32330327aa8db01b5bc2c533af5387e77ad3dc0c34eacaac16a793df75634ce6
\ No newline at end of file
938aa97919e5a15143a72f33bebb27e501d5ef7cc53cf4e7debe9dee398b7255
\ No newline at end of file
164bcc838beb7d51775f8b813b92d3ec7080d4c7937d6ad16cf973131b45359e
\ No newline at end of file
7c33bd30af66ebb9a837c72e2ced107f015d4a22c7b6393554a9299bf3907cc0
\ No newline at end of file
15517956f3b5d7ce2c05d196a34881fa169df7b1bf5ccf0dbfbee74fb3143ba7
\ No newline at end of file
8c123da6a380524c7269ffc67ea0e533a415d3c6eddf96cee4025ea152fc7582
\ No newline at end of file
06dcd1a1f4bc0598357d48d9a0e838d9af1cf078234f2aabaa53ff9df01d2c17
\ No newline at end of file
d3af120a74b4c55345ac7fb524395251cd3c1b3cd9685f711196a134f427845c
\ No newline at end of file
e845a6704ac92881926cca56bf7fb01c6252f1fe2b2d94fc9d6548144126d6a5
\ No newline at end of file
35acb5bbabfd12f97c988776aafa6ff380e2cbe2267e856b8f439f7102a6fbf2
\ No newline at end of file
a83762c788d4ec007a26da386dc36bce16b60f5642ed3e6405482acfebefc1be
\ No newline at end of file
6488e3542276042f302d79533e3e84c43a4ef471535137bcef11e73a0e4d961f
\ No newline at end of file
7be98c4f62df9fd837f7a547916dd5481c0b4da2d4fc6680b104b2a998be1eed
\ No newline at end of file
9018fed4aab19642fafee3e50bf41be422fc3f8256d0b5d78c8a70fc96f4090f
\ No newline at end of file
cf391e617ef16f70c0daa4584959d36eda4b29c7e211f3f90ad74b4ebbc7ebbd
\ No newline at end of file
b3fcc73c6b61469d770e9eb9a14c88bb86398db4ab4b6dc5283718a147db0ac0
\ No newline at end of file
c2766b50914c6b4d8c96fb958cdfb67f0d29e40df45654c35d62792c272e3d5a
\ No newline at end of file
26bf4abb73a53f71fbcb8b5cd1ae1e1539ec59e7052b3bbed95ab1de3fda3de7
\ No newline at end of file
8aac4108b658a7a0646ec230dc2568cb51fea0535b13dfba8b8c9e6edb401d07
\ No newline at end of file
d15dc3e57f050f037dd6b6b2b1efdafee49bf411580e35a7b4dbe14868c41e13
\ No newline at end of file
37aa0564d2ade1cab56a669facccbaaf08e4d9856c7a4cc120968d33cff161bd
\ No newline at end of file
c5a780e5b5e62043fb04e77ebf89f7d04dfc9bfdc70df8d89c16a3f3fd960ea3
\ No newline at end of file
4eef64fb237f783cdb07e012356d48a4ec9afc349721de1c53cf3def95e83858
\ No newline at end of file
ef994f0c65154825906fb0952b9b3073f4cb0692f01c90280edf06a4ea2ec339
\ No newline at end of file
0a318fbcf54860d9fe8b3e8372e10331d2b52df738e621f4b0eec5fd4f255739
\ No newline at end of file
7a252c5d76c1e71421c3aa3e01584cdeeec6a5002ba6ef0824674c64f92e2764
\ No newline at end of file
9327676097c49bb1a221d79dd351ad8c57a434f19e32f49951c0d6d655c2fa4e
\ No newline at end of file
f4c81be1168dc8dc3eaadbc9b0d46cfd5aefa0b9e4d61fa8276bbc4f59216da8
\ No newline at end of file
71e005116082a59e40194fe5f9a500e31d67a011500d12aeecd59cb64d611848
\ No newline at end of file
4aeff45663a9f5a41a8dd92298afb4b0b57aa6f190f4648455df2fa1e39e174f
\ No newline at end of file
938977d6379e484a53857304c339a024c32d8b71c2175574a72314e461d67e3b
\ No newline at end of file
a0561e52982756aded22563e833ab8005b4f45b84c81e872dd8c7188aeb84434
\ No newline at end of file
f76ce27a82f4773dcda324d79cc93a044f21317dbb9fdff035879502b5752da3
\ No newline at end of file
3e867ceefcab4f043b89d3c04e6e0a1182423039e1a9245e611128efe77c0e88
\ No newline at end of file
d72cf1c88a060ccadd9f90cbef5ae7d4ea6a4416a6263d11a870e01b02d1f935
\ No newline at end of file
7888a82e3bbc1f4c78badcbe8335ac823ebdedec843a9d90f91cf0d5c169a191
\ No newline at end of file
6075e469081fcca124c0c4b485071a086545b502c398314cca05052765072caf
\ No newline at end of file
a98ca25378df3fc798b6ae361b3a47b697f6b853796975221329db023cb98466
\ No newline at end of file
3568bda1b43710880c8bfe2777d346aba1172217c27b5e690e7151aec9da2288
\ No newline at end of file
d48f5e042f3f919041f0c93e6492bcf56c19f4268d4819bd231ddffe70ba7c6b
\ No newline at end of file
9da5955d9f71d671a41e6d03f76f87370d6e67b6853707adb164f7ffa8c75082
\ No newline at end of file
e8e26d49a8292e31ef0ea88a0262f0386b8deda83658ea4de7d464d79c5428e4
\ No newline at end of file
05d45e25ab9ef1565c04ca6515e0b01f2f98c5e98b1eeb09fa9dd43ebbe3c4d0
\ No newline at end of file
56595e67e9e78a9558e6874d75bdcc295b89ab0096d1b37e4d9366e1574d241c
\ No newline at end of file
e6841491cd7d2cc015fd628f5c14270720d59cbb17b7efb160937963f074f5c2
\ No newline at end of file
1a430ce6d137ee896cbd37bb822ccd18c9e87204b765b1192928dd82efb28602
\ No newline at end of file
5d7bbf376acbf5679d111866e70b69eebba26a487a9e7d6bd571f15dc423e3e3
\ No newline at end of file
69aae8d967fdb8af816a969fd818ed325b8d780b4faaa205c78a66c5d533ab2a
\ No newline at end of file
7ef5cb1f167c133c67fc98c0abe929516ec700179747d3353d19cf8219ebd0ef
\ No newline at end of file
5415850ae27c507fd8b1df20951e25b42352f4f9ec8e1402019533170edabdb8
\ No newline at end of file
cb846ce5f6270cfdc543c3d4ad3e861b2a92445b952ee8f0a02f4171b9792411
\ No newline at end of file
541a6626d3afd4fd421fd59fe5eb8ab7764952ae780c83c3805fd4a29e3f42fb
\ No newline at end of file
c0d22d00d52a516347930e1a36f350113c0949214925176f08ceed81999746bd
\ No newline at end of file
f33cc3eebc9197db381d81150a140582e30905d3964d6fb444caad6c9eff1b31
\ No newline at end of file
e8264993f6503268cd99e8ca26ccdc0e986f31a2818b9bbef2a9cef36191e686
\ No newline at end of file
c8b5485f158fdec0ab6813e4014713786dfa231b901e77ea610a873d03f8f0f0
\ No newline at end of file
cd7643fc762d8b9236ef5ac7cc285ffbd29f1953178b9b6e129082efd7b9e07b
\ No newline at end of file
897815c347f04093da5eba420e95b6310454bd493412914e60a296bd710ebde1
\ No newline at end of file
4bf1d277affdfa9ee772d69cb713f49f257140fb58c40bc8659d563b4cc3de29
\ No newline at end of file
e2be30f71b2a4a410b21e57ee53c3b54cf0214a08bc65cd92b2cb4b93bde9451
\ No newline at end of file
12a5eba74f0bb5b63cddd32d32009ad073e638a9defb40eeee5c16f559ebe951
\ No newline at end of file
359231b3f18a2c1e56ffba4872a51d01fd4ca834ef722e1133a5a9f01e4271e9
\ No newline at end of file
f5231b1eec17ea1a67f2d2f4ca759314afb85b2c8fb431e3303d530d44bdb1ef
\ No newline at end of file
4c697cc183a000ee8c18b516e4b1d77d0f8d2d3d7abe11121f2240a60c03216c
\ No newline at end of file
5f326f101ff06993e9160b0486d24d615abd6d5027b375e422f776181ad8a193
\ No newline at end of file
0c80fa1c88f67ef34bbfab52b1b75eadc4a6f07557986f0fbe4ffd83e20df52a
\ No newline at end of file
b5c219d1b1443ddf482f26d8280a1c7318456affce3ad57a082eb8f9efc32206
\ No newline at end of file
3906739d07514e6e59f79a4a81d28859a2481614a299c95ec1b1d9825a07ec64
\ No newline at end of file
124c5ae1a1ccade5dec01f72b726e03febc8f56411d7d8990f976bb2a9516037
\ No newline at end of file
4105ae45742c2eda67fe5c54256732e55555ab7832e4cbf0fcb041599c23bd29
\ No newline at end of file
e978687e9b27db5288862ea85053f3fc04a8e841702b16ca17c01398a86654d0
\ No newline at end of file
c173ba86340efe39977f1b319d1ebcead634e3bfe819a30e230fb4f81766f28a
\ No newline at end of file
32f636ffad4d2c6a002129d6e3eaeaf5d8f420dcc1273665129dc4d23f2e0dbe
\ No newline at end of file
951f46f88c1b07505e0b560e802a8bd701db7d379342d97b0bff3ad90e81fb02
\ No newline at end of file
6164ef91c60bd614310cbda6ad8ab1a205cae17c4d037fafa89eaf95d06323e4
\ No newline at end of file
ae84fa35fcc5a0780d86887294a32e250d2ac13dcf607750f834df5828e5bece
\ No newline at end of file
ad83205a0203427ccad1c25933e28cf13ebfb0274865ce5f0f2d578e84bb07a3
\ No newline at end of file
0fa84b2038f33e27e549bdb3eb137e1813f604b6e81abc67a49a54d3e1e4bcf5
\ No newline at end of file
93f337364eb5ca5c67f4d4767d1aee9972bfe0596c89f006317dd6103558e35c
\ No newline at end of file
932509d18f1cfdfa09f1565e4ac2f197b7ca792263ff5da3e5b712fae7279925
\ No newline at end of file
1cf1305ad5eaaef51f99f057b8a2e81731d69a6d02629c0c9a7d94dfdecbea47
\ No newline at end of file
6cb54c71a9835ec1b3cf801a19c2cd385d224e0438c7924b6a29d298ecebe8a7
\ No newline at end of file
e5492820a8618d5599429ece04ea941e869c84c22d213d536644bcefc5775363
\ No newline at end of file
71220cfc36215f6c22d22d1fb0b74389e90c58733214b5fa36dcb8da0377a120
\ No newline at end of file
b8a19b2ebf648a1b79a488a7a2847f6a392e594464c1ba0ae96f274f583559e3
\ No newline at end of file
fa0b69df96069dd29971f69a51ce4eb691480904c5693fa480f6daa83b042e77
\ No newline at end of file
096d13548e156a13b9ccdbb39ed83b3be7ef64a57df5daea2e3cd870f3e2b448
\ No newline at end of file
1f05176d9f6a88e9d740000084b7c9f5c72c61a59dbe1f68f43b3b5606ecd9d8
\ No newline at end of file
ec6832ba26fca8d8427383cd0189765191a0a7f17bb78d61b900c5b541d5725e
\ No newline at end of file
6d3250533b72c6aa307570d386725fa3ebe1ec49c36edc0f7d6dc8a1d5092826
\ No newline at end of file
4ccf450bbc9061edae81cabcfafd9360f1f57cfd25af3ad016fbbb344f9fe694
\ No newline at end of file
cb9f4b4e627cbc163653fc01c0542ef0ce87139b376c55bbaa4d7ab23e9b8973
\ No newline at end of file
2a40acf9a3ac2716120388cfe79be13130e4587286d215596e9c75097af1e436
\ No newline at end of file
5bd622f36126b06c7c585ee14a8140750843d36092e79b6cc35b62c06afb1178
\ No newline at end of file
4df2229fca7652cb836c867b621da91f1194899c50bb0a8be2fbae58f29dc788
\ No newline at end of file
7c368cad497ccfd86c6a92e2edfec1d2a16879eb749184b1d20c5ab4c702b974
\ No newline at end of file
cdf55e9f2b1b9c375920198a438d29fe3c9ab7147f3c670b0d66b11d499573d9
\ No newline at end of file
d9cfb7515805e642c562b8be58b6cd482c24e62e76245db35a7d91b25c327d8d
\ No newline at end of file
483d1b4a24086fa57efe7f3b3fa872cf793352f80aba5c25614f07eafa2d30c5
\ No newline at end of file
c7c2936062f4a7c764938453fb28dc2f461a06f0a21cc74b1750edbde9398fa1
\ No newline at end of file
caec7f6c66a0277561f650ae513fedaba581ab35bb238351eccccfef1132d118
\ No newline at end of file
6ca08c885fddccd3c82fc8651d20140655b65019e56f9c6136e92140401386d1
\ No newline at end of file
039962e291f9adde52379cac8f8278aa1b339d973fb33ae40ea7d8dc3e113fb6
\ No newline at end of file
b9200d6c754f7c450ba0c718171806e8f4f9720d870e532f4800640ca707f24f
\ No newline at end of file
3a7fb1b7959f09b9ba464253a72d52bcb744e7f78aac4f44e1d9201fa3c8387d
\ No newline at end of file
845636d8a0c6e9b6b39194cb44ffeceea3464023c22fadb2a4da44fed5dd973f
\ No newline at end of file
818fcf0f0fec9d2833b091ef380005a2d485486522fb63e2a7b2fd01dbf1ff79
\ No newline at end of file
e7a0121e8e21acd356daa882d8fe83242f4db180915dd0f3c25835c6c664ce0b
\ No newline at end of file
a24354264df3c12411af040903d26faf298f06a7334ef118f87b3e88b683b326
\ No newline at end of file
601d67a2911c461881064ec18a2246ef9e5b2835eb0fdf40e701c9360e19eca4
\ No newline at end of file
5c661c453922181b350b8551d9a8f9b097e568459a2c2d128e41d9aefb026ab5
\ No newline at end of file
484751de711e873e0f0f22d5951e36bf60d4826ebc95afa45e4f6cdaa0e3c024
\ No newline at end of file
961c147e9c8e35eac5b8dd33f879582e173b7f6e31659b2d00989bc38afc6f5a
\ No newline at end of file
e1bd58eeaf63caf473680a8c4b7269cc63e7c0d6e8d4e71636608e10c9731c85
\ No newline at end of file
1ff1256d2deac0a1545ef7db30d8ba7969265d6c2df62f6bd20f9f1721a482cb
\ No newline at end of file
484338ddc83bfb44523d08da92ac7f5b9d13e1a66ad1c9c3f7590f91fc9305c0
\ No newline at end of file
233a976aab340f16ed1c896963580fb66f4c9b4dee6a34f9536a62a4f7688792
\ No newline at end of file
4c6061f6ab1cb9e070a3ae87d44caf12d2c110a6033f0b33898b4b7ea7e37055
\ No newline at end of file
8c1da1c7edba16993da93d9075ad2a3624b8c12ccf73a241e1a166014a99e254
\ No newline at end of file
7678d97de752e7a9a571d80febc74eb44c699c7b1967690d9a2391036caea5d2
\ No newline at end of file
25820a3d060826a082565f12a3ac96deafbbde750f5756d71e34d14801ec6148
\ No newline at end of file
2965d990ec9cf2edd610b063686f9a1d9de4c38bcba3c8439a18159812d529d4
\ No newline at end of file
f72f0a31bca545d2528030019695b03e0858d7ae9a0fb32d407c25580731fa6b
\ No newline at end of file
0aa6f7385cf13c2b0ee9b7d2a51b63dd208feccffecee8f08ea3d183ebb5ffb4
\ No newline at end of file
8c676b4142db828b1d2d5dc6bd891eb929d12ab13e9073693ab7d830bcea599a
\ No newline at end of file
3587ba61d003385ea63ce900c1dd1c2bd1f2386abd921615b50421f1b798f553
\ No newline at end of file
52bf190bdb219366c790a5b7c081bfb383543498780cc95a25eafcecea036426
\ No newline at end of file
281ea05a95785b7f1d2d805bf8fe071c0fa59425eb01b46eeb69ad21f5650e29
\ No newline at end of file
909aee5ed0ad447fec425f7252fc6dbec827a66ff720620bae1bf3a32536cb96
\ No newline at end of file
6fe34be82f9aee6cbdb729a67d1d4ac0702c8d9774a038bfd4fd9d9cb28b1a2b
\ No newline at end of file
b5ff5aeb9cef243165d9c40db7211f61a632edb6a189c09112ef069d267bf64d
\ No newline at end of file
7631c82f9762e643a4da9c03f3870ab1c97fae93da4975219d9ab7cd629720ec
\ No newline at end of file
063c97800eec5bfc7f6879dbe559fa39952295d5ba947cf44ad03ac23212e924
\ No newline at end of file
545747e86481c74832a6df55764ab97ecfefc4446df9cc2366a8ce9d9c400ea4
\ No newline at end of file
91969bfc791cd7bc78b940aa6fed345b13a3186db0b89828428b798aa4f7949e
\ No newline at end of file
be2ddc15e16d7d59bd050a60faaa0b6941d94ba7c47a88be473bcf3a17bb2763
\ No newline at end of file
42e06332b279aaac7044243df0a8bd5525025db7d8c22bc474c0874e85f525f5
\ No newline at end of file
546802f93f64e346b066438e78ace5d2dc54de8a5f6234c2d01296a239cfe74c
\ No newline at end of file
b58f2853d7a2d9a821198f69c5913d290404a4961410dd66d256eefc7ecf1026
\ No newline at end of file
44b0e2b3e32e45fb557a5c9c41f69de94a5d41bad43267f00090b9c527b2b1e1
\ No newline at end of file
88bbd8cbc4398c3c05a834c4d8d36ee55aca9d698da3cf3c1df0afe916bc051f
\ No newline at end of file
2aa618cdbb6e55c3af1fa144ae3f3bd0d9d5400706db337301d2d165ba789ef0
\ No newline at end of file
e13856c6f65d86dae9a3268f91189bb90af236c555d2f645c6ba4c600c996cba
\ No newline at end of file
858cd59ea324e3653801055c7f3fae2152b04ac175945a59faa00d67ae7fa223
\ No newline at end of file
9e6f99ed0c3d4d76a8c290308805cabf84aa7e5fb6dc2b06d973d9d8726fc4d8
\ No newline at end of file
4da0131929bf59dd4a72bf05a889063df7978f8b5782e5f3b27d466302dc44b6
\ No newline at end of file
0bccf1ff356a4b9c08d472e8b63070b497f331c2dfaded1bdb2cf01860df8903
\ No newline at end of file
b2508d46edbfbba24df65731f6e285886acbb6352a900dd1c6a985a686252ef0
\ No newline at end of file
1266bf92f23a42d96778bf546534882f03d2388f22640e4cfaa2a9a1aad19093
\ No newline at end of file
99ee6773319af0fa7a1dfef92f67cc95141c892bf7adcf500d46adc1ebd4c70f
\ No newline at end of file
991041c8d3092175165834a988eb32141e49d7785cda756c8a78170b4af6db64
\ No newline at end of file
18d64af208338baec9d56a6ac9d7fc35aaeb79d3f8036d3cf5bcc72879827299
\ No newline at end of file
ac0f71b427be1fb583474e352ce9468a1270c6e67fa40f9071751a0485f21160
\ No newline at end of file
964e9f88018b2ab72fd370f6a8dccde218cfd4ffa3ccedf4f142ab341f5e858f
\ No newline at end of file
cc9f56a872cf5e9084e863adc599545754594fb03f30f18433923e0429986e39
\ No newline at end of file
328e095123eb0b8822342b0d4a338d42265ca8eafbcadcc7e15628e9d02c863d
\ No newline at end of file
65f27401a76856d6cb284078204bb1b80797fa344e1a4ef3d9638c296f2d72d7
\ No newline at end of file
526174bd42327e0212b15ffbad99541887de1dec35cc4c592d4f02065026b3ca
\ No newline at end of file
ec071087de45291ae8fc0d6d6e778d16a7411a934e4a301f62890061abcaed4c
\ No newline at end of file
ced4e314f2653ff56a946d334b4cb12085825b8d21ceea42cb686bef688b714c
\ No newline at end of file
400dd521f5c462afdcb3c556815f840e916df7576a6d6dd301fe5a49a1fe6011
\ No newline at end of file
6c52ab55814241b37014949976c4f3a0c63bea0a4f9a301735cc9f4c509f433d
\ No newline at end of file
2929e4796e85fa6cf8b5950fb57295ae87c48c914d0a71123a29d579d797d636
\ No newline at end of file
21ae7ea7cbf1d34c7b9dc300a641eaf975ed1e33f5bc519494cd37c4a661bec8
\ No newline at end of file
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MigrateDevopsSegmentsToGroups, schema: 20210128114526 do
RSpec.describe Gitlab::BackgroundMigration::MigrateDevopsSegmentsToGroups, schema: 20210301200959 do
let(:segments_table) { table(:analytics_devops_adoption_segments) }
let(:selections_table) { table(:analytics_devops_adoption_segment_selections) }
let(:namespaces_table) { table(:namespaces) }
......
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe CreateElasticReindexingSubtasks do
let(:migration) { described_class.new }
let(:reindexing_tasks) { table(:elastic_reindexing_tasks) }
let(:reindexing_subtasks) { table(:elastic_reindexing_subtasks) }
let(:fields_to_migrate) { %w(documents_count documents_count_target index_name_from index_name_to elastic_task) }
describe "#up" do
it 'migrates old reindexing tasks' do
# these tasks should not be migrated
reindexing_tasks.create!(in_progress: false, state: 10)
reindexing_tasks.create!(in_progress: false, state: 10, index_name_from: 'index_name')
reindexing_tasks.create!(in_progress: false, state: 10, index_name_to: 'index_name')
reindexing_tasks.create!(in_progress: false, state: 10, elastic_task: 'TASK')
# these tasks should not be migrated
task1 = reindexing_tasks.create!(in_progress: false, documents_count: 100, state: 10, index_name_from: 'index1', index_name_to: 'index2', elastic_task: 'TASK_ID', documents_count_target: 100)
task2 = reindexing_tasks.create!(in_progress: false, documents_count: 50, state: 11, index_name_from: 'index3', index_name_to: 'index4', elastic_task: 'TASK_ID2', documents_count_target: 99)
migrate!
expect(reindexing_subtasks.count).to eq(2)
[task1, task2].each do |task|
subtask = reindexing_subtasks.find_by(elastic_reindexing_task_id: task.id)
expect(task.attributes.slice(*fields_to_migrate)).to match(subtask.attributes.slice(*fields_to_migrate))
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe ScheduleUuidPopulationForSecurityFindings2 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:ci_pipelines) { table(:ci_pipelines) }
let(:ci_builds) { table(:ci_builds) }
let(:scanners) { table(:vulnerability_scanners) }
let(:security_scans) { table(:security_scans) }
let(:security_findings) { table(:security_findings) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
let(:ci_pipeline) { ci_pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success') }
let(:ci_build) { ci_builds.create!(commit_id: ci_pipeline.id, retried: false, type: 'Ci::Build') }
let(:scanner) { scanners.create!(project_id: project.id, external_id: 'bandit', name: 'Bandit') }
let(:security_scan_1) { security_scans.create!(build_id: ci_build.id, scan_type: 0) }
let(:security_scan_2) { security_scans.create!(build_id: ci_build.id, scan_type: 1) }
around do |example|
freeze_time { Sidekiq::Testing.fake! { example.run } }
end
before do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
3.times do
security_findings.create!(scan_id: security_scan_1.id, scanner_id: scanner.id, severity: 0, confidence: 0, project_fingerprint: SecureRandom.uuid)
end
security_findings.create!(scan_id: security_scan_2.id, scanner_id: scanner.id, severity: 0, confidence: 0, project_fingerprint: SecureRandom.uuid)
end
it 'schedules the background jobs', :aggregate_failures do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to be(2)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(2.minutes, security_scan_1.id)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(4.minutes, security_scan_2.id)
end
end
# frozen_string_literal: true
# rubocop:disable Metrics/ClassLength
module Gitlab
module BackgroundMigration
##
# The class to migrate job artifacts from `ci_builds` to `ci_job_artifacts`
class MigrateLegacyArtifacts
FILE_LOCAL_STORE = 1 # equal to ObjectStorage::Store::LOCAL
ARCHIVE_FILE_TYPE = 1 # equal to Ci::JobArtifact.file_types['archive']
METADATA_FILE_TYPE = 2 # equal to Ci::JobArtifact.file_types['metadata']
LEGACY_PATH_FILE_LOCATION = 1 # equal to Ci::JobArtifact.file_location['legacy_path']
def perform(start_id, stop_id)
ActiveRecord::Base.transaction do
insert_archives(start_id, stop_id)
insert_metadatas(start_id, stop_id)
delete_legacy_artifacts(start_id, stop_id)
end
end
private
def insert_archives(start_id, stop_id)
ActiveRecord::Base.connection.execute <<~SQL
INSERT INTO
ci_job_artifacts (
project_id,
job_id,
expire_at,
file_location,
created_at,
updated_at,
file,
size,
file_store,
file_type
)
SELECT
project_id,
id,
artifacts_expire_at #{add_missing_db_timezone},
#{LEGACY_PATH_FILE_LOCATION},
created_at #{add_missing_db_timezone},
created_at #{add_missing_db_timezone},
artifacts_file,
artifacts_size,
COALESCE(artifacts_file_store, #{FILE_LOCAL_STORE}),
#{ARCHIVE_FILE_TYPE}
FROM
ci_builds
WHERE
id BETWEEN #{start_id.to_i} AND #{stop_id.to_i}
AND artifacts_file <> ''
AND NOT EXISTS (
SELECT
1
FROM
ci_job_artifacts
WHERE
ci_builds.id = ci_job_artifacts.job_id
AND ci_job_artifacts.file_type = #{ARCHIVE_FILE_TYPE})
SQL
end
def insert_metadatas(start_id, stop_id)
ActiveRecord::Base.connection.execute <<~SQL
INSERT INTO
ci_job_artifacts (
project_id,
job_id,
expire_at,
file_location,
created_at,
updated_at,
file,
size,
file_store,
file_type
)
SELECT
project_id,
id,
artifacts_expire_at #{add_missing_db_timezone},
#{LEGACY_PATH_FILE_LOCATION},
created_at #{add_missing_db_timezone},
created_at #{add_missing_db_timezone},
artifacts_metadata,
NULL,
COALESCE(artifacts_metadata_store, #{FILE_LOCAL_STORE}),
#{METADATA_FILE_TYPE}
FROM
ci_builds
WHERE
id BETWEEN #{start_id.to_i} AND #{stop_id.to_i}
AND artifacts_file <> ''
AND artifacts_metadata <> ''
AND NOT EXISTS (
SELECT
1
FROM
ci_job_artifacts
WHERE
ci_builds.id = ci_job_artifacts.job_id
AND ci_job_artifacts.file_type = #{METADATA_FILE_TYPE})
SQL
end
def delete_legacy_artifacts(start_id, stop_id)
ActiveRecord::Base.connection.execute <<~SQL
UPDATE
ci_builds
SET
artifacts_file = NULL,
artifacts_file_store = NULL,
artifacts_size = NULL,
artifacts_metadata = NULL,
artifacts_metadata_store = NULL
WHERE
id BETWEEN #{start_id.to_i} AND #{stop_id.to_i}
AND artifacts_file <> ''
SQL
end
def add_missing_db_timezone
'at time zone \'UTC\''
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Remove duplicated service records with the same project and type.
# These were created in the past for unknown reasons, and should be blocked
# now by the uniqueness validation in the Service model.
class RemoveDuplicateServices
# See app/models/service
class Service < ActiveRecord::Base
include EachBatch
self.table_name = 'services'
self.inheritance_column = :_type_disabled
scope :project_ids_with_duplicates, -> do
select(:project_id)
.distinct
.where.not(project_id: nil)
.group(:project_id, :type)
.having('count(*) > 1')
end
scope :types_with_duplicates, -> (project_ids) do
select(:project_id, :type)
.where(project_id: project_ids)
.group(:project_id, :type)
.having('count(*) > 1')
end
end
def perform(*project_ids)
types_with_duplicates = Service.types_with_duplicates(project_ids).pluck(:project_id, :type)
types_with_duplicates.each do |project_id, type|
remove_duplicates(project_id, type)
end
end
private
def remove_duplicates(project_id, type)
scope = Service.where(project_id: project_id, type: type)
# Build a subquery to determine which service record is actually in use,
# by querying for it without specifying an order.
#
# This should match the record returned by `Project#find_service`,
# and the `has_one` service associations on `Project`.
correct_service = scope.select(:id).limit(1)
# Delete all other services with the same `project_id` and `type`
duplicate_services = scope.where.not(id: correct_service)
duplicate_services.delete_all
end
end
end
end
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillArtifactExpiryDate, :migration, schema: 20181228175414 do
RSpec.describe Gitlab::BackgroundMigration::BackfillArtifactExpiryDate, :migration, schema: 20210301200959 do
subject(:perform) { migration.perform(1, 99) }
let(:migration) { described_class.new }
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20181228175414 do
RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20210301200959 do
let_it_be(:jira_integration_temp) { described_class::JiraServiceTemp }
let_it_be(:jira_tracker_data_temp) { described_class::JiraTrackerDataTemp }
let_it_be(:atlassian_host) { 'https://api.atlassian.net' }
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillProjectUpdatedAtAfterRepositoryStorageMove, :migration, schema: 20210210093901 do
RSpec.describe Gitlab::BackgroundMigration::BackfillProjectUpdatedAtAfterRepositoryStorageMove, :migration, schema: 20210301200959 do
let(:projects) { table(:projects) }
let(:project_repository_storage_moves) { table(:project_repository_storage_moves) }
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20181228175414 do
RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20210301200959 do
let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let_it_be(:users) { table(:users) }
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MigrateLegacyArtifacts, schema: 20210210093901 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
let(:jobs) { table(:ci_builds) }
let(:job_artifacts) { table(:ci_job_artifacts) }
subject { described_class.new.perform(*range) }
context 'when a pipeline exists' do
let!(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let!(:project) { projects.create!(name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id) }
let!(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a') }
context 'when a legacy artifacts exists' do
let(:artifacts_expire_at) { 1.day.since.to_s }
let(:file_store) { ::ObjectStorage::Store::REMOTE }
let!(:job) do
jobs.create!(
commit_id: pipeline.id,
project_id: project.id,
status: :success,
**artifacts_archive_attributes,
**artifacts_metadata_attributes)
end
let(:artifacts_archive_attributes) do
{
artifacts_file: 'archive.zip',
artifacts_file_store: file_store,
artifacts_size: 123,
artifacts_expire_at: artifacts_expire_at
}
end
let(:artifacts_metadata_attributes) do
{
artifacts_metadata: 'metadata.gz',
artifacts_metadata_store: file_store
}
end
it 'has legacy artifacts' do
expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([artifacts_archive_attributes.values])
expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([artifacts_metadata_attributes.values])
end
it 'does not have new artifacts yet' do
expect(job_artifacts.count).to be_zero
end
context 'when the record exists inside of the range of a background migration' do
let(:range) { [job.id, job.id] }
it 'migrates a legacy artifact to ci_job_artifacts table' do
expect { subject }.to change { job_artifacts.count }.by(2)
expect(job_artifacts.order(:id).pluck('project_id, job_id, file_type, file_store, size, expire_at, file, file_sha256, file_location'))
.to eq([[project.id,
job.id,
described_class::ARCHIVE_FILE_TYPE,
file_store,
artifacts_archive_attributes[:artifacts_size],
artifacts_expire_at,
'archive.zip',
nil,
described_class::LEGACY_PATH_FILE_LOCATION],
[project.id,
job.id,
described_class::METADATA_FILE_TYPE,
file_store,
nil,
artifacts_expire_at,
'metadata.gz',
nil,
described_class::LEGACY_PATH_FILE_LOCATION]])
expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([[nil, nil, nil, artifacts_expire_at]])
expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([[nil, nil]])
end
context 'when file_store is nil' do
let(:file_store) { nil }
it 'has nullified file_store in all legacy artifacts' do
expect(jobs.pluck('artifacts_file_store, artifacts_metadata_store')).to eq([[nil, nil]])
end
it 'fills file_store by the value of local file store' do
subject
expect(job_artifacts.pluck('file_store')).to all(eq(::ObjectStorage::Store::LOCAL))
end
end
context 'when new artifacts has already existed' do
context 'when only archive.zip existed' do
before do
job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE, size: 999, file: 'archive.zip')
end
it 'had archive.zip already' do
expect(job_artifacts.exists?(job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE)).to be_truthy
end
it 'migrates metadata' do
expect { subject }.to change { job_artifacts.count }.by(1)
expect(job_artifacts.exists?(job_id: job.id, file_type: described_class::METADATA_FILE_TYPE)).to be_truthy
end
end
context 'when both archive and metadata existed' do
before do
job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE, size: 999, file: 'archive.zip')
job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::METADATA_FILE_TYPE, size: 999, file: 'metadata.zip')
end
it 'does not migrate' do
expect { subject }.not_to change { job_artifacts.count }
end
end
end
end
context 'when the record exists outside of the range of a background migration' do
let(:range) { [job.id + 1, job.id + 1] }
it 'does not migrate' do
expect { subject }.not_to change { job_artifacts.count }
end
end
end
context 'when the job does not have legacy artifacts' do
let!(:job) { jobs.create!(commit_id: pipeline.id, project_id: project.id, status: :success) }
it 'does not have the legacy artifacts in database' do
expect(jobs.count).to eq(1)
expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([[nil, nil, nil, nil]])
expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([[nil, nil]])
end
context 'when the record exists inside of the range of a background migration' do
let(:range) { [job.id, job.id] }
it 'does not migrate' do
expect { subject }.not_to change { job_artifacts.count }
end
end
end
end
end
......@@ -4,7 +4,7 @@ require 'spec_helper'
require 'webauthn/u2f_migrator'
RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20181228175414 do
RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20210301200959 do
let(:users) { table(:users) }
let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) }
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 2021_02_26_120851 do
RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 20210301200959 do
let(:enabled) { 20 }
let(:disabled) { 0 }
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityFeedback, schema: 20181228175414 do
RSpec.describe Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityFeedback, schema: 20210301200959 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateIssueEmailParticipants, schema: 20181228175414 do
RSpec.describe Gitlab::BackgroundMigration::PopulateIssueEmailParticipants, schema: 20210301200959 do
let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
let!(:project) { table(:projects).create!(id: 1, namespace_id: namespace.id) }
let!(:issue1) { table(:issues).create!(id: 1, project_id: project.id, service_desk_reply_to: "a@gitlab.com") }
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateServices, :migration, schema: 20181228175414 do
let_it_be(:users) { table(:users) }
let_it_be(:namespaces) { table(:namespaces) }
let_it_be(:projects) { table(:projects) }
let_it_be(:services) { table(:services) }
let_it_be(:alerts_service_data) { table(:alerts_service_data) }
let_it_be(:chat_names) { table(:chat_names) }
let_it_be(:issue_tracker_data) { table(:issue_tracker_data) }
let_it_be(:jira_tracker_data) { table(:jira_tracker_data) }
let_it_be(:open_project_tracker_data) { table(:open_project_tracker_data) }
let_it_be(:slack_integrations) { table(:slack_integrations) }
let_it_be(:web_hooks) { table(:web_hooks) }
let_it_be(:data_tables) do
[alerts_service_data, chat_names, issue_tracker_data, jira_tracker_data, open_project_tracker_data, slack_integrations, web_hooks]
end
let!(:user) { users.create!(id: 1, projects_limit: 100) }
let!(:namespace) { namespaces.create!(id: 1, name: 'group', path: 'group') }
# project without duplicate services
let!(:project1) { projects.create!(id: 1, namespace_id: namespace.id) }
let!(:service1) { services.create!(id: 1, project_id: project1.id, type: 'AsanaService') }
let!(:service2) { services.create!(id: 2, project_id: project1.id, type: 'JiraService') }
let!(:service3) { services.create!(id: 3, project_id: project1.id, type: 'SlackService') }
# project with duplicate services
let!(:project2) { projects.create!(id: 2, namespace_id: namespace.id) }
let!(:service4) { services.create!(id: 4, project_id: project2.id, type: 'AsanaService') }
let!(:service5) { services.create!(id: 5, project_id: project2.id, type: 'JiraService') }
let!(:service6) { services.create!(id: 6, project_id: project2.id, type: 'JiraService') }
let!(:service7) { services.create!(id: 7, project_id: project2.id, type: 'SlackService') }
let!(:service8) { services.create!(id: 8, project_id: project2.id, type: 'SlackService') }
let!(:service9) { services.create!(id: 9, project_id: project2.id, type: 'SlackService') }
# project with duplicate services and dependant records
let!(:project3) { projects.create!(id: 3, namespace_id: namespace.id) }
let!(:service10) { services.create!(id: 10, project_id: project3.id, type: 'AlertsService') }
let!(:service11) { services.create!(id: 11, project_id: project3.id, type: 'AlertsService') }
let!(:service12) { services.create!(id: 12, project_id: project3.id, type: 'SlashCommandsService') }
let!(:service13) { services.create!(id: 13, project_id: project3.id, type: 'SlashCommandsService') }
let!(:service14) { services.create!(id: 14, project_id: project3.id, type: 'IssueTrackerService') }
let!(:service15) { services.create!(id: 15, project_id: project3.id, type: 'IssueTrackerService') }
let!(:service16) { services.create!(id: 16, project_id: project3.id, type: 'JiraService') }
let!(:service17) { services.create!(id: 17, project_id: project3.id, type: 'JiraService') }
let!(:service18) { services.create!(id: 18, project_id: project3.id, type: 'OpenProjectService') }
let!(:service19) { services.create!(id: 19, project_id: project3.id, type: 'OpenProjectService') }
let!(:service20) { services.create!(id: 20, project_id: project3.id, type: 'SlackService') }
let!(:service21) { services.create!(id: 21, project_id: project3.id, type: 'SlackService') }
let!(:dependant_records) do
alerts_service_data.create!(id: 1, service_id: service10.id)
alerts_service_data.create!(id: 2, service_id: service11.id)
chat_names.create!(id: 1, service_id: service12.id, user_id: user.id, team_id: 'team1', chat_id: 'chat1')
chat_names.create!(id: 2, service_id: service13.id, user_id: user.id, team_id: 'team2', chat_id: 'chat2')
issue_tracker_data.create!(id: 1, service_id: service14.id)
issue_tracker_data.create!(id: 2, service_id: service15.id)
jira_tracker_data.create!(id: 1, service_id: service16.id)
jira_tracker_data.create!(id: 2, service_id: service17.id)
open_project_tracker_data.create!(id: 1, service_id: service18.id)
open_project_tracker_data.create!(id: 2, service_id: service19.id)
slack_integrations.create!(id: 1, service_id: service20.id, user_id: user.id, team_id: 'team1', team_name: 'team1', alias: 'alias1')
slack_integrations.create!(id: 2, service_id: service21.id, user_id: user.id, team_id: 'team2', team_name: 'team2', alias: 'alias2')
web_hooks.create!(id: 1, service_id: service20.id)
web_hooks.create!(id: 2, service_id: service21.id)
end
# project without services
let!(:project4) { projects.create!(id: 4, namespace_id: namespace.id) }
it 'removes duplicate services and dependant records' do
# Determine which services we expect to keep
expected_services = projects.pluck(:id).each_with_object({}) do |project_id, map|
project_services = services.where(project_id: project_id)
types = project_services.distinct.pluck(:type)
map[project_id] = types.map { |type| project_services.where(type: type).take!.id }
end
expect do
subject.perform(project2.id, project3.id)
end.to change { services.count }.from(21).to(12)
services1 = services.where(project_id: project1.id)
expect(services1.count).to be(3)
expect(services1.pluck(:type)).to contain_exactly('AsanaService', 'JiraService', 'SlackService')
expect(services1.pluck(:id)).to contain_exactly(*expected_services[project1.id])
services2 = services.where(project_id: project2.id)
expect(services2.count).to be(3)
expect(services2.pluck(:type)).to contain_exactly('AsanaService', 'JiraService', 'SlackService')
expect(services2.pluck(:id)).to contain_exactly(*expected_services[project2.id])
services3 = services.where(project_id: project3.id)
expect(services3.count).to be(6)
expect(services3.pluck(:type)).to contain_exactly('AlertsService', 'SlashCommandsService', 'IssueTrackerService', 'JiraService', 'OpenProjectService', 'SlackService')
expect(services3.pluck(:id)).to contain_exactly(*expected_services[project3.id])
kept_services = expected_services.values.flatten
data_tables.each do |table|
expect(table.count).to be(1)
expect(kept_services).to include(table.pluck(:service_id).first)
end
end
it 'does not delete services without duplicates' do
expect do
subject.perform(project1.id, project4.id)
end.not_to change { services.count }
end
it 'only deletes duplicate services for the current batch' do
expect do
subject.perform(project2.id)
end.to change { services.count }.by(-3)
end
end
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::WrongfullyConfirmedEmailUnconfirmer, schema: 20181228175414 do
RSpec.describe Gitlab::BackgroundMigration::WrongfullyConfirmedEmailUnconfirmer, schema: 20210301200959 do
let(:users) { table(:users) }
let(:emails) { table(:emails) }
let(:user_synced_attributes_metadata) { table(:user_synced_attributes_metadata) }
......
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe RemoveDuplicateServices2 do
let_it_be(:namespaces) { table(:namespaces) }
let_it_be(:projects) { table(:projects) }
let_it_be(:services) { table(:services) }
describe '#up' do
before do
stub_const("#{described_class}::BATCH_SIZE", 2)
namespaces.create!(id: 1, name: 'group', path: 'group')
projects.create!(id: 1, namespace_id: 1) # duplicate services
projects.create!(id: 2, namespace_id: 1) # normal services
projects.create!(id: 3, namespace_id: 1) # no services
projects.create!(id: 4, namespace_id: 1) # duplicate services
projects.create!(id: 5, namespace_id: 1) # duplicate services
services.create!(id: 1, project_id: 1, type: 'JiraService')
services.create!(id: 2, project_id: 1, type: 'JiraService')
services.create!(id: 3, project_id: 2, type: 'JiraService')
services.create!(id: 4, project_id: 4, type: 'AsanaService')
services.create!(id: 5, project_id: 4, type: 'AsanaService')
services.create!(id: 6, project_id: 4, type: 'JiraService')
services.create!(id: 7, project_id: 4, type: 'JiraService')
services.create!(id: 8, project_id: 4, type: 'SlackService')
services.create!(id: 9, project_id: 4, type: 'SlackService')
services.create!(id: 10, project_id: 5, type: 'JiraService')
services.create!(id: 11, project_id: 5, type: 'JiraService')
# Services without a project_id should be ignored
services.create!(id: 12, type: 'JiraService')
services.create!(id: 13, type: 'JiraService')
end
it 'schedules background jobs for all projects with duplicate services' do
Sidekiq::Testing.fake! do
freeze_time do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, 1, 4)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, 5)
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe AlterVsaIssueFirstMentionedInCommitValue, schema: 20210114033715 do
let(:group_stages) { table(:analytics_cycle_analytics_group_stages) }
let(:value_streams) { table(:analytics_cycle_analytics_group_value_streams) }
let(:namespaces) { table(:namespaces) }
let(:namespace) { namespaces.create!(id: 1, name: 'group', path: 'group') }
let(:value_stream) { value_streams.create!(name: 'test', group_id: namespace.id) }
let!(:stage_1) { group_stages.create!(group_value_stream_id: value_stream.id, group_id: namespace.id, name: 'stage 1', start_event_identifier: described_class::ISSUE_FIRST_MENTIONED_IN_COMMIT_EE, end_event_identifier: 1) }
let!(:stage_2) { group_stages.create!(group_value_stream_id: value_stream.id, group_id: namespace.id, name: 'stage 2', start_event_identifier: 2, end_event_identifier: described_class::ISSUE_FIRST_MENTIONED_IN_COMMIT_EE) }
let!(:stage_3) { group_stages.create!(group_value_stream_id: value_stream.id, group_id: namespace.id, name: 'stage 3', start_event_identifier: described_class::ISSUE_FIRST_MENTIONED_IN_COMMIT_FOSS, end_event_identifier: 3) }
describe '#up' do
it 'changes the EE specific identifier values to the FOSS version' do
migrate!
expect(stage_1.reload.start_event_identifier).to eq(described_class::ISSUE_FIRST_MENTIONED_IN_COMMIT_FOSS)
expect(stage_2.reload.end_event_identifier).to eq(described_class::ISSUE_FIRST_MENTIONED_IN_COMMIT_FOSS)
end
it 'does not change irrelevant records' do
expect { migrate! }.not_to change { stage_3.reload }
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe RemoveBadDependencyProxyManifests, schema: 20210128140157 do
let_it_be(:namespaces) { table(:namespaces) }
let_it_be(:dependency_proxy_manifests) { table(:dependency_proxy_manifests) }
let_it_be(:group) { namespaces.create!(type: 'Group', name: 'test', path: 'test') }
let_it_be(:dependency_proxy_manifest_with_content_type) do
dependency_proxy_manifests.create!(group_id: group.id, file: 'foo', file_name: 'foo', digest: 'asdf1234', content_type: 'content-type' )
end
let_it_be(:dependency_proxy_manifest_without_content_type) do
dependency_proxy_manifests.create!(group_id: group.id, file: 'bar', file_name: 'bar', digest: 'fdsa6789')
end
it 'removes the dependency_proxy_manifests with a content_type', :aggregate_failures do
expect(dependency_proxy_manifest_with_content_type).to be_present
expect(dependency_proxy_manifest_without_content_type).to be_present
expect { migrate! }.to change { dependency_proxy_manifests.count }.from(2).to(1)
expect(dependency_proxy_manifests.where.not(content_type: nil)).to be_empty
expect(dependency_proxy_manifest_without_content_type.reload).to be_present
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe BackfillUpdatedAtAfterRepositoryStorageMove, :sidekiq do
let_it_be(:projects) { table(:projects) }
let_it_be(:project_repository_storage_moves) { table(:project_repository_storage_moves) }
let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
describe '#up' do
it 'schedules background jobs for all distinct projects in batches' do
stub_const("#{described_class}::BATCH_SIZE", 3)
project_1 = projects.create!(id: 1, namespace_id: namespace.id)
project_2 = projects.create!(id: 2, namespace_id: namespace.id)
project_3 = projects.create!(id: 3, namespace_id: namespace.id)
project_4 = projects.create!(id: 4, namespace_id: namespace.id)
project_5 = projects.create!(id: 5, namespace_id: namespace.id)
project_6 = projects.create!(id: 6, namespace_id: namespace.id)
project_7 = projects.create!(id: 7, namespace_id: namespace.id)
projects.create!(id: 8, namespace_id: namespace.id)
project_repository_storage_moves.create!(id: 1, project_id: project_1.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 2, project_id: project_1.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 3, project_id: project_2.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 4, project_id: project_3.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 5, project_id: project_3.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 6, project_id: project_4.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 7, project_id: project_4.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 8, project_id: project_5.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 9, project_id: project_6.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 10, project_id: project_7.id, source_storage_name: 'default', destination_storage_name: 'default')
Sidekiq::Testing.fake! do
freeze_time do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq(3)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(2.minutes, 1, 2, 3)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(4.minutes, 4, 5, 6)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(6.minutes, 7)
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe AddEnvironmentScopeToGroupVariables do
let(:migration) { described_class.new }
let(:ci_group_variables) { table(:ci_group_variables) }
let(:group) { table(:namespaces).create!(name: 'group', path: 'group') }
def create_variable!(group, key:, environment_scope: '*')
table(:ci_group_variables).create!(
group_id: group.id,
key: key,
environment_scope: environment_scope
)
end
describe '#down' do
context 'group has variables with duplicate keys' do
it 'deletes all but the first record' do
migration.up
remaining_variable = create_variable!(group, key: 'key')
create_variable!(group, key: 'key', environment_scope: 'staging')
create_variable!(group, key: 'key', environment_scope: 'production')
migration.down
expect(ci_group_variables.pluck(:id)).to eq [remaining_variable.id]
end
end
context 'group does not have variables with duplicate keys' do
it 'does not delete any records' do
migration.up
create_variable!(group, key: 'key')
create_variable!(group, key: 'staging')
create_variable!(group, key: 'production')
expect { migration.down }.not_to change { ci_group_variables.count }
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe DedupIssueMetrics, :migration, schema: 20210205104425 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:issues) { table(:issues) }
let(:metrics) { table(:issue_metrics) }
let(:issue_params) { { title: 'title', project_id: project.id } }
let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
let!(:project) { projects.create!(namespace_id: namespace.id) }
let!(:issue_1) { issues.create!(issue_params) }
let!(:issue_2) { issues.create!(issue_params) }
let!(:issue_3) { issues.create!(issue_params) }
let!(:duplicated_metrics_1) { metrics.create!(issue_id: issue_1.id, first_mentioned_in_commit_at: 1.day.ago, first_added_to_board_at: 5.days.ago, updated_at: 2.months.ago) }
let!(:duplicated_metrics_2) { metrics.create!(issue_id: issue_1.id, first_mentioned_in_commit_at: Time.now, first_associated_with_milestone_at: Time.now, updated_at: 1.month.ago) }
let!(:duplicated_metrics_3) { metrics.create!(issue_id: issue_3.id, first_mentioned_in_commit_at: 1.day.ago, updated_at: 2.months.ago) }
let!(:duplicated_metrics_4) { metrics.create!(issue_id: issue_3.id, first_added_to_board_at: 1.day.ago, updated_at: 1.month.ago) }
let!(:non_duplicated_metrics) { metrics.create!(issue_id: issue_2.id, first_added_to_board_at: 2.days.ago) }
it 'deduplicates issue_metrics table' do
expect { migrate! }.to change { metrics.count }.from(5).to(3)
end
it 'merges `duplicated_metrics_1` with `duplicated_metrics_2`' do
migrate!
expect(metrics.where(id: duplicated_metrics_1.id)).not_to exist
merged_metrics = metrics.find_by(id: duplicated_metrics_2.id)
expect(merged_metrics).to be_present
expect(merged_metrics.first_mentioned_in_commit_at).to be_like_time(duplicated_metrics_2.first_mentioned_in_commit_at)
expect(merged_metrics.first_added_to_board_at).to be_like_time(duplicated_metrics_1.first_added_to_board_at)
end
it 'merges `duplicated_metrics_3` with `duplicated_metrics_4`' do
migrate!
expect(metrics.where(id: duplicated_metrics_3.id)).not_to exist
merged_metrics = metrics.find_by(id: duplicated_metrics_4.id)
expect(merged_metrics).to be_present
expect(merged_metrics.first_mentioned_in_commit_at).to be_like_time(duplicated_metrics_3.first_mentioned_in_commit_at)
expect(merged_metrics.first_added_to_board_at).to be_like_time(duplicated_metrics_4.first_added_to_board_at)
end
it 'does not change non duplicated records' do
expect { migrate! }.not_to change { non_duplicated_metrics.reload.attributes }
end
it 'does nothing when there are no metrics' do
metrics.delete_all
migrate!
expect(metrics.count).to eq(0)
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe AddHasExternalIssueTrackerTrigger do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:services) { table(:services) }
before do
@namespace = namespaces.create!(name: 'foo', path: 'foo')
@project = projects.create!(namespace_id: @namespace.id)
end
describe '#up' do
before do
migrate!
end
describe 'INSERT trigger' do
it 'sets `has_external_issue_tracker` to true when active `issue_tracker` is inserted' do
expect do
services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
end.to change { @project.reload.has_external_issue_tracker }.to(true)
end
it 'does not set `has_external_issue_tracker` to true when service is for a different project' do
different_project = projects.create!(namespace_id: @namespace.id)
expect do
services.create!(category: 'issue_tracker', active: true, project_id: different_project.id)
end.not_to change { @project.reload.has_external_issue_tracker }
end
it 'does not set `has_external_issue_tracker` to true when inactive `issue_tracker` is inserted' do
expect do
services.create!(category: 'issue_tracker', active: false, project_id: @project.id)
end.not_to change { @project.reload.has_external_issue_tracker }
end
it 'does not set `has_external_issue_tracker` to true when a non-`issue tracker` active service is inserted' do
expect do
services.create!(category: 'my_type', active: true, project_id: @project.id)
end.not_to change { @project.reload.has_external_issue_tracker }
end
end
describe 'UPDATE trigger' do
it 'sets `has_external_issue_tracker` to true when `issue_tracker` is made active' do
service = services.create!(category: 'issue_tracker', active: false, project_id: @project.id)
expect do
service.update!(active: true)
end.to change { @project.reload.has_external_issue_tracker }.to(true)
end
it 'sets `has_external_issue_tracker` to false when `issue_tracker` is made inactive' do
service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
expect do
service.update!(active: false)
end.to change { @project.reload.has_external_issue_tracker }.to(false)
end
it 'sets `has_external_issue_tracker` to false when `issue_tracker` is made inactive, and an inactive `issue_tracker` exists' do
services.create!(category: 'issue_tracker', active: false, project_id: @project.id)
service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
expect do
service.update!(active: false)
end.to change { @project.reload.has_external_issue_tracker }.to(false)
end
it 'does not change `has_external_issue_tracker` when `issue_tracker` is made inactive, if an active `issue_tracker` exists' do
services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
expect do
service.update!(active: false)
end.not_to change { @project.reload.has_external_issue_tracker }
end
it 'does not change `has_external_issue_tracker` when service is for a different project' do
different_project = projects.create!(namespace_id: @namespace.id)
service = services.create!(category: 'issue_tracker', active: false, project_id: different_project.id)
expect do
service.update!(active: true)
end.not_to change { @project.reload.has_external_issue_tracker }
end
end
describe 'DELETE trigger' do
it 'sets `has_external_issue_tracker` to false when `issue_tracker` is deleted' do
service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
expect do
service.delete
end.to change { @project.reload.has_external_issue_tracker }.to(false)
end
it 'sets `has_external_issue_tracker` to false when `issue_tracker` is deleted, if an inactive `issue_tracker` still exists' do
services.create!(category: 'issue_tracker', active: false, project_id: @project.id)
service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
expect do
service.delete
end.to change { @project.reload.has_external_issue_tracker }.to(false)
end
it 'does not change `has_external_issue_tracker` when `issue_tracker` is deleted, if an active `issue_tracker` still exists' do
services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
expect do
service.delete
end.not_to change { @project.reload.has_external_issue_tracker }
end
it 'does not change `has_external_issue_tracker` when service is for a different project' do
different_project = projects.create!(namespace_id: @namespace.id)
service = services.create!(category: 'issue_tracker', active: true, project_id: different_project.id)
expect do
service.delete
end.not_to change { @project.reload.has_external_issue_tracker }
end
end
end
describe '#down' do
before do
migration.up
migration.down
end
it 'drops the INSERT trigger' do
expect do
services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
end.not_to change { @project.reload.has_external_issue_tracker }
end
it 'drops the UPDATE trigger' do
service = services.create!(category: 'issue_tracker', active: false, project_id: @project.id)
@project.update!(has_external_issue_tracker: false)
expect do
service.update!(active: true)
end.not_to change { @project.reload.has_external_issue_tracker }
end
it 'drops the DELETE trigger' do
service = services.create!(category: 'issue_tracker', active: true, project_id: @project.id)
@project.update!(has_external_issue_tracker: true)
expect do
service.delete
end.not_to change { @project.reload.has_external_issue_tracker }
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe AddHasExternalWikiTrigger do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:services) { table(:services) }
before do
@namespace = namespaces.create!(name: 'foo', path: 'foo')
@project = projects.create!(namespace_id: @namespace.id)
end
describe '#up' do
before do
migrate!
end
describe 'INSERT trigger' do
it 'sets `has_external_wiki` to true when active `ExternalWikiService` is inserted' do
expect do
services.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
end.to change { @project.reload.has_external_wiki }.to(true)
end
it 'does not set `has_external_wiki` to true when service is for a different project' do
different_project = projects.create!(namespace_id: @namespace.id)
expect do
services.create!(type: 'ExternalWikiService', active: true, project_id: different_project.id)
end.not_to change { @project.reload.has_external_wiki }
end
it 'does not set `has_external_wiki` to true when inactive `ExternalWikiService` is inserted' do
expect do
services.create!(type: 'ExternalWikiService', active: false, project_id: @project.id)
end.not_to change { @project.reload.has_external_wiki }
end
it 'does not set `has_external_wiki` to true when active other service is inserted' do
expect do
services.create!(type: 'MyService', active: true, project_id: @project.id)
end.not_to change { @project.reload.has_external_wiki }
end
end
describe 'UPDATE trigger' do
it 'sets `has_external_wiki` to true when `ExternalWikiService` is made active' do
service = services.create!(type: 'ExternalWikiService', active: false, project_id: @project.id)
expect do
service.update!(active: true)
end.to change { @project.reload.has_external_wiki }.to(true)
end
it 'sets `has_external_wiki` to false when `ExternalWikiService` is made inactive' do
service = services.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
expect do
service.update!(active: false)
end.to change { @project.reload.has_external_wiki }.to(false)
end
it 'does not change `has_external_wiki` when service is for a different project' do
different_project = projects.create!(namespace_id: @namespace.id)
service = services.create!(type: 'ExternalWikiService', active: false, project_id: different_project.id)
expect do
service.update!(active: true)
end.not_to change { @project.reload.has_external_wiki }
end
end
describe 'DELETE trigger' do
it 'sets `has_external_wiki` to false when `ExternalWikiService` is deleted' do
service = services.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
expect do
service.delete
end.to change { @project.reload.has_external_wiki }.to(false)
end
it 'does not change `has_external_wiki` when service is for a different project' do
different_project = projects.create!(namespace_id: @namespace.id)
service = services.create!(type: 'ExternalWikiService', active: true, project_id: different_project.id)
expect do
service.delete
end.not_to change { @project.reload.has_external_wiki }
end
end
end
describe '#down' do
before do
migration.up
migration.down
end
it 'drops the INSERT trigger' do
expect do
services.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
end.not_to change { @project.reload.has_external_wiki }
end
it 'drops the UPDATE trigger' do
service = services.create!(type: 'ExternalWikiService', active: false, project_id: @project.id)
@project.update!(has_external_wiki: false)
expect do
service.update!(active: true)
end.not_to change { @project.reload.has_external_wiki }
end
it 'drops the DELETE trigger' do
service = services.create!(type: 'ExternalWikiService', active: true, project_id: @project.id)
@project.update!(has_external_wiki: true)
expect do
service.delete
end.not_to change { @project.reload.has_external_wiki }
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe AddNewPostEoaPlans do
let(:plans) { table(:plans) }
subject(:migration) { described_class.new }
describe '#up' do
it 'creates the two new records' do
expect { migration.up }.to change { plans.count }.by(2)
new_plans = plans.last(2)
expect(new_plans.map(&:name)).to contain_exactly('premium', 'ultimate')
end
end
describe '#down' do
it 'removes these two new records' do
plans.create!(name: 'premium', title: 'Premium (Formerly Silver)')
plans.create!(name: 'ultimate', title: 'Ultimate (Formerly Gold)')
expect { migration.down }.to change { plans.count }.by(-2)
expect(plans.find_by(name: 'premium')).to be_nil
expect(plans.find_by(name: 'ultimate')).to be_nil
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe CleanupProjectsWithBadHasExternalIssueTrackerData, :migration do
let(:namespace) { table(:namespaces).create!(name: 'foo', path: 'bar') }
let(:projects) { table(:projects) }
let(:services) { table(:services) }
def create_projects!(num)
Array.new(num) do
projects.create!(namespace_id: namespace.id)
end
end
def create_active_external_issue_tracker_integrations!(*projects)
projects.each do |project|
services.create!(category: 'issue_tracker', project_id: project.id, active: true)
end
end
def create_disabled_external_issue_tracker_integrations!(*projects)
projects.each do |project|
services.create!(category: 'issue_tracker', project_id: project.id, active: false)
end
end
def create_active_other_integrations!(*projects)
projects.each do |project|
services.create!(category: 'not_an_issue_tracker', project_id: project.id, active: true)
end
end
it 'sets `projects.has_external_issue_tracker` correctly' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
project_with_an_external_issue_tracker_1,
project_with_an_external_issue_tracker_2,
project_with_only_a_disabled_external_issue_tracker_1,
project_with_only_a_disabled_external_issue_tracker_2,
project_without_any_external_issue_trackers_1,
project_without_any_external_issue_trackers_2 = create_projects!(6)
create_active_external_issue_tracker_integrations!(
project_with_an_external_issue_tracker_1,
project_with_an_external_issue_tracker_2
)
create_disabled_external_issue_tracker_integrations!(
project_with_an_external_issue_tracker_1,
project_with_an_external_issue_tracker_2,
project_with_only_a_disabled_external_issue_tracker_1,
project_with_only_a_disabled_external_issue_tracker_2
)
create_active_other_integrations!(
project_with_an_external_issue_tracker_1,
project_with_an_external_issue_tracker_2,
project_without_any_external_issue_trackers_1,
project_without_any_external_issue_trackers_2
)
# PG triggers on the services table added in
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/51852 will have set
# the `has_external_issue_tracker` columns to correct data when the services
# records were created above.
#
# We set the `has_external_issue_tracker` columns for projects to incorrect
# data manually below to emulate projects in a state before the PG
# triggers were added.
project_with_an_external_issue_tracker_2.update!(has_external_issue_tracker: false)
project_with_only_a_disabled_external_issue_tracker_2.update!(has_external_issue_tracker: true)
project_without_any_external_issue_trackers_2.update!(has_external_issue_tracker: true)
migrate!
expected_true = [
project_with_an_external_issue_tracker_1,
project_with_an_external_issue_tracker_2
].each(&:reload).map(&:has_external_issue_tracker)
expected_not_true = [
project_without_any_external_issue_trackers_1,
project_without_any_external_issue_trackers_2,
project_with_only_a_disabled_external_issue_tracker_1,
project_with_only_a_disabled_external_issue_tracker_2
].each(&:reload).map(&:has_external_issue_tracker)
expect(expected_true).to all(eq(true))
expect(expected_not_true).to all(be_falsey)
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe CleanupProjectsWithBadHasExternalWikiData, :migration do
let(:namespace) { table(:namespaces).create!(name: 'foo', path: 'bar') }
let(:projects) { table(:projects) }
let(:services) { table(:services) }
def create_projects!(num)
Array.new(num) do
projects.create!(namespace_id: namespace.id)
end
end
def create_active_external_wiki_integrations!(*projects)
projects.each do |project|
services.create!(type: 'ExternalWikiService', project_id: project.id, active: true)
end
end
def create_disabled_external_wiki_integrations!(*projects)
projects.each do |project|
services.create!(type: 'ExternalWikiService', project_id: project.id, active: false)
end
end
def create_active_other_integrations!(*projects)
projects.each do |project|
services.create!(type: 'NotAnExternalWikiService', project_id: project.id, active: true)
end
end
it 'sets `projects.has_external_wiki` correctly' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
project_with_external_wiki_1,
project_with_external_wiki_2,
project_with_disabled_external_wiki_1,
project_with_disabled_external_wiki_2,
project_without_external_wiki_1,
project_without_external_wiki_2 = create_projects!(6)
create_active_external_wiki_integrations!(
project_with_external_wiki_1,
project_with_external_wiki_2
)
create_disabled_external_wiki_integrations!(
project_with_disabled_external_wiki_1,
project_with_disabled_external_wiki_2
)
create_active_other_integrations!(
project_without_external_wiki_1,
project_without_external_wiki_2
)
# PG triggers on the services table added in a previous migration
# will have set the `has_external_wiki` columns to correct data when
# the services records were created above.
#
# We set the `has_external_wiki` columns for projects to incorrect
# data manually below to emulate projects in a state before the PG
# triggers were added.
project_with_external_wiki_2.update!(has_external_wiki: false)
project_with_disabled_external_wiki_2.update!(has_external_wiki: true)
project_without_external_wiki_2.update!(has_external_wiki: true)
migrate!
expected_true = [
project_with_external_wiki_1,
project_with_external_wiki_2
].each(&:reload).map(&:has_external_wiki)
expected_not_true = [
project_without_external_wiki_1,
project_without_external_wiki_2,
project_with_disabled_external_wiki_1,
project_with_disabled_external_wiki_2
].each(&:reload).map(&:has_external_wiki)
expect(expected_true).to all(eq(true))
expect(expected_not_true).to all(be_falsey)
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe DropAlertsServiceData do
let_it_be(:alerts_service_data) { table(:alerts_service_data) }
it 'correctly migrates up and down' do
reversible_migration do |migration|
migration.before -> {
expect(alerts_service_data.create!(service_id: 1)).to be_a alerts_service_data
}
migration.after -> {
expect { alerts_service_data.create!(service_id: 1) }
.to raise_error(ActiveRecord::StatementInvalid, /UndefinedTable/)
}
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe MigrateDelayedProjectRemovalFromNamespacesToNamespaceSettings, :migration do
let(:namespaces) { table(:namespaces) }
let(:namespace_settings) { table(:namespace_settings) }
let!(:namespace_wo_settings) { namespaces.create!(name: generate(:name), path: generate(:name), delayed_project_removal: true) }
let!(:namespace_wo_settings_delay_false) { namespaces.create!(name: generate(:name), path: generate(:name), delayed_project_removal: false) }
let!(:namespace_w_settings_delay_true) { namespaces.create!(name: generate(:name), path: generate(:name), delayed_project_removal: true) }
let!(:namespace_w_settings_delay_false) { namespaces.create!(name: generate(:name), path: generate(:name), delayed_project_removal: false) }
let!(:namespace_settings_delay_true) { namespace_settings.create!(namespace_id: namespace_w_settings_delay_true.id, delayed_project_removal: false, created_at: DateTime.now, updated_at: DateTime.now) }
let!(:namespace_settings_delay_false) { namespace_settings.create!(namespace_id: namespace_w_settings_delay_false.id, delayed_project_removal: false, created_at: DateTime.now, updated_at: DateTime.now) }
it 'migrates delayed_project_removal to namespace_settings' do
disable_migrations_output { migrate! }
expect(namespace_settings.count).to eq(3)
expect(namespace_settings.find_by(namespace_id: namespace_wo_settings.id).delayed_project_removal).to eq(true)
expect(namespace_settings.find_by(namespace_id: namespace_wo_settings_delay_false.id)).to be_nil
expect(namespace_settings_delay_true.reload.delayed_project_removal).to eq(true)
expect(namespace_settings_delay_false.reload.delayed_project_removal).to eq(false)
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe RemoveAlertsServiceRecordsAgain do
let(:services) { table(:services) }
before do
5.times { services.create!(type: 'AlertsService') }
services.create!(type: 'SomeOtherType')
end
it 'removes services records of type AlertsService and corresponding data', :aggregate_failures do
expect(services.count).to eq(6)
migrate!
expect(services.count).to eq(1)
expect(services.first.type).to eq('SomeOtherType')
expect(services.where(type: 'AlertsService')).to be_empty
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe RemoveAlertsServiceRecords do
let(:services) { table(:services) }
let(:alerts_service_data) { table(:alerts_service_data) }
before do
5.times do
service = services.create!(type: 'AlertsService')
alerts_service_data.create!(service_id: service.id)
end
services.create!(type: 'SomeOtherType')
end
it 'removes services records of type AlertsService and corresponding data', :aggregate_failures do
expect(services.count).to eq(6)
expect(alerts_service_data.count).to eq(5)
migrate!
expect(services.count).to eq(1)
expect(services.first.type).to eq('SomeOtherType')
expect(services.where(type: 'AlertsService')).to be_empty
expect(alerts_service_data.all).to be_empty
end
end
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe RescheduleArtifactExpiryBackfill, :migration do
let(:migration_class) { Gitlab::BackgroundMigration::BackfillArtifactExpiryDate }
let(:migration_name) { migration_class.to_s.demodulize }
before do
table(:namespaces).create!(id: 123, name: 'test_namespace', path: 'test_namespace')
table(:projects).create!(id: 123, name: 'sample_project', path: 'sample_project', namespace_id: 123)
end
it 'correctly schedules background migrations' do
first_artifact = create_artifact(job_id: 0, expire_at: nil, created_at: Date.new(2020, 06, 21))
second_artifact = create_artifact(job_id: 1, expire_at: nil, created_at: Date.new(2020, 06, 21))
create_artifact(job_id: 2, expire_at: Date.yesterday, created_at: Date.new(2020, 06, 21))
create_artifact(job_id: 3, expire_at: nil, created_at: Date.new(2020, 06, 23))
Sidekiq::Testing.fake! do
freeze_time do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq(1)
expect(migration_name).to be_scheduled_migration_with_multiple_args(first_artifact.id, second_artifact.id)
end
end
end
private
def create_artifact(params)
table(:ci_builds).create!(id: params[:job_id], project_id: 123)
table(:ci_job_artifacts).create!(project_id: 123, file_type: 1, **params)
end
end
......@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
RSpec.describe ScheduleMigratePagesToZipStorage, :sidekiq_might_not_need_inline, schema: 20201231133921 do
RSpec.describe ScheduleMigratePagesToZipStorage, :sidekiq_might_not_need_inline, schema: 20210301200959 do
let(:migration_class) { described_class::MIGRATION }
let(:migration_name) { migration_class.to_s.demodulize }
......
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe SchedulePopulateFindingUuidForVulnerabilityFeedback do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
let(:vulnerability_feedback) { table(:vulnerability_feedback) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
let(:user) { users.create!(username: 'john.doe', projects_limit: 1) }
let(:common_feedback_params) { { feedback_type: 0, category: 0, project_id: project.id, author_id: user.id } }
let!(:feedback_1) { vulnerability_feedback.create!(**common_feedback_params, project_fingerprint: 'foo') }
let!(:feedback_2) { vulnerability_feedback.create!(**common_feedback_params, project_fingerprint: 'bar') }
let!(:feedback_3) { vulnerability_feedback.create!(**common_feedback_params, project_fingerprint: 'zoo', finding_uuid: SecureRandom.uuid) }
around do |example|
freeze_time { Sidekiq::Testing.fake! { example.run } }
end
before do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
end
it 'schedules the background jobs', :aggregate_failures do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to be(3)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(2.minutes, feedback_1.id, feedback_1.id)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(4.minutes, feedback_2.id, feedback_2.id)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(6.minutes, feedback_3.id, feedback_3.id)
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment