Commit 003f4531 authored by Tetiana Chupryna's avatar Tetiana Chupryna

Merge branch '332613-fix-bugs-with-split-jobs' into 'master'

Fix bugs when splitting batched migration jobs

See merge request gitlab-org/gitlab!65838
parents 58713a68 de47cb58
# frozen_string_literal: true
class IndexBatchedMigrationJobsByMaxValue < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
INDEX_NAME = 'index_migration_jobs_on_migration_id_and_max_value'
def up
add_concurrent_index :batched_background_migration_jobs, %i(batched_background_migration_id max_value), name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :batched_background_migration_jobs, INDEX_NAME
end
end
4216604d14b4ccc652ba423a95ee9bd15646b3553903dc4b79497871f5384492
\ No newline at end of file
......@@ -24100,6 +24100,8 @@ CREATE INDEX index_metrics_users_starred_dashboards_on_project_id ON metrics_use
CREATE INDEX index_migration_jobs_on_migration_id_and_finished_at ON batched_background_migration_jobs USING btree (batched_background_migration_id, finished_at);
CREATE INDEX index_migration_jobs_on_migration_id_and_max_value ON batched_background_migration_jobs USING btree (batched_background_migration_id, max_value);
CREATE INDEX index_milestone_releases_on_release_id ON milestone_releases USING btree (release_id);
CREATE INDEX index_milestones_on_description_trigram ON milestones USING gin (description gin_trgm_ops);
......@@ -69,7 +69,7 @@ module Gitlab
# In this case, we just lower the batch size so that future calls to this
# method could eventually split the job if it continues to fail.
if midpoint >= max_value
update!(batch_size: new_batch_size, status: :pending)
update!(batch_size: new_batch_size, attempts: 0)
else
old_max_value = max_value
......@@ -77,7 +77,6 @@ module Gitlab
batch_size: new_batch_size,
max_value: midpoint,
attempts: 0,
status: :pending,
started_at: nil,
finished_at: nil,
metrics: {}
......
......@@ -10,7 +10,7 @@ module Gitlab
self.table_name = :batched_background_migrations
has_many :batched_jobs, foreign_key: :batched_background_migration_id
has_one :last_job, -> { order(id: :desc) },
has_one :last_job, -> { order(max_value: :desc) },
class_name: 'Gitlab::Database::BackgroundMigration::BatchedJob',
foreign_key: :batched_background_migration_id
......
......@@ -126,40 +126,50 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
describe '#split_and_retry!' do
let!(:job) { create(:batched_background_migration_job, batch_size: 10, min_value: 6, max_value: 15, status: :failed) }
it 'splits the job into two and marks them as pending' do
allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 10])
end
expect { job.split_and_retry! }.to change { described_class.count }.by(1)
expect(job).to have_attributes(
min_value: 6,
max_value: 10,
batch_size: 5,
status: 'pending',
attempts: 0,
started_at: nil,
finished_at: nil,
metrics: {}
)
new_job = described_class.last
expect(new_job).to have_attributes(
batched_background_migration_id: job.batched_background_migration_id,
min_value: 11,
max_value: 15,
batch_size: 5,
status: 'pending',
attempts: 0,
started_at: nil,
finished_at: nil,
metrics: {}
)
expect(new_job.created_at).not_to eq(job.created_at)
let!(:job) { create(:batched_background_migration_job, batch_size: 10, min_value: 6, max_value: 15, status: :failed, attempts: 3) }
context 'when job can be split' do
before do
allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 10])
end
end
it 'sets the correct attributes' do
expect { job.split_and_retry! }.to change { described_class.count }.by(1)
expect(job).to have_attributes(
min_value: 6,
max_value: 10,
batch_size: 5,
status: 'failed',
attempts: 0,
started_at: nil,
finished_at: nil,
metrics: {}
)
new_job = described_class.last
expect(new_job).to have_attributes(
batched_background_migration_id: job.batched_background_migration_id,
min_value: 11,
max_value: 15,
batch_size: 5,
status: 'failed',
attempts: 0,
started_at: nil,
finished_at: nil,
metrics: {}
)
expect(new_job.created_at).not_to eq(job.created_at)
end
it 'splits the jobs into retriable jobs' do
migration = job.batched_migration
expect { job.split_and_retry! }.to change { migration.batched_jobs.retriable.count }.from(0).to(2)
end
end
context 'when job is not failed' do
......@@ -185,11 +195,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
end
it 'lowers the batch size and marks the job as pending' do
it 'lowers the batch size and resets the number of attempts' do
expect { job.split_and_retry! }.not_to change { described_class.count }
expect(job.batch_size).to eq(5)
expect(job.status).to eq('pending')
expect(job.attempts).to eq(0)
expect(job.status).to eq('failed')
end
end
end
......
......@@ -10,11 +10,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
describe '#last_job' do
let!(:batched_migration) { create(:batched_background_migration) }
let!(:batched_job1) { create(:batched_background_migration_job, batched_migration: batched_migration) }
let!(:batched_job2) { create(:batched_background_migration_job, batched_migration: batched_migration) }
let!(:batched_job1) { create(:batched_background_migration_job, batched_migration: batched_migration, max_value: 1000) }
let!(:batched_job2) { create(:batched_background_migration_job, batched_migration: batched_migration, max_value: 500) }
it 'returns the most recent (in order of id) batched job' do
expect(batched_migration.last_job).to eq(batched_job2)
it 'returns the batched job with highest max_value' do
expect(batched_migration.last_job).to eq(batched_job1)
end
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment