Commit 70968d11 authored by Michael Kozono's avatar Michael Kozono

Merge branch '342603-remove-geo_upload_replication-flag-as-well-as-legacy-code' into 'master'

Geo: Remove legacy code for upload sync

See merge request gitlab-org/gitlab!72714
parents 58437b93 3fc77c2b
......@@ -176,7 +176,6 @@ Rails/TimeZone:
- 'ee/spec/lib/gitlab/geo/log_cursor/events/repository_renamed_event_spec.rb'
- 'ee/spec/lib/gitlab/geo/log_cursor/events/repository_updated_event_spec.rb'
- 'ee/spec/lib/gitlab/geo/log_cursor/events/reset_checksum_event_spec.rb'
- 'ee/spec/lib/gitlab/geo/log_cursor/events/upload_deleted_event_spec.rb'
- 'ee/spec/lib/gitlab/geo/log_cursor/logger_spec.rb'
- 'ee/spec/lib/gitlab/git_access_spec.rb'
- 'ee/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb'
......
# frozen_string_literal: true
class RemoveGeoUploadDeprecatedFields < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
def up
with_lock_retries do
remove_column :geo_event_log, :upload_deleted_event_id, :bigint
end
end
def down
with_lock_retries do
add_column(:geo_event_log, :upload_deleted_event_id, :bigint) unless column_exists?(:geo_event_log, :upload_deleted_event_id)
end
add_concurrent_foreign_key :geo_event_log, :geo_upload_deleted_events,
column: :upload_deleted_event_id,
name: 'fk_c1f241c70d',
on_delete: :cascade
add_concurrent_index :geo_event_log,
:upload_deleted_event_id,
name: 'index_geo_event_log_on_upload_deleted_event_id',
where: "(upload_deleted_event_id IS NOT NULL)"
end
end
# frozen_string_literal: true
class DropGeoUploadDeletedEventsTable < Gitlab::Database::Migration[1.0]
def up
drop_table :geo_upload_deleted_events
end
def down
create_table :geo_upload_deleted_events, id: :bigserial do |t|
t.integer :upload_id, null: false, index: true
t.string :file_path, null: false
t.integer :model_id, null: false
t.string :model_type, null: false
t.string :uploader, null: false
end
end
end
# frozen_string_literal: true
class RemoveOutdatedFieldsFromGeoNodeStatus < Gitlab::Database::Migration[1.0]
enable_lock_retries!
def change
remove_column :geo_node_statuses, :attachments_count, :integer
remove_column :geo_node_statuses, :attachments_synced_count, :integer
remove_column :geo_node_statuses, :attachments_failed_count, :integer
remove_column :geo_node_statuses, :attachments_synced_missing_on_primary_count, :integer
end
end
bc7974917509bfbda47375299009295bc5a55970b92443dd5d7134075b161279
\ No newline at end of file
483e4cbe2a0be2afbda511f2298e3715abaca29afafeeae26449fc862f49a08f
\ No newline at end of file
c474870a626c909da772a1c9f459f369d50658ce8f585a35e7cc3c7ef64af657
\ No newline at end of file
......@@ -14170,7 +14170,6 @@ CREATE TABLE geo_event_log (
hashed_storage_migrated_event_id bigint,
lfs_object_deleted_event_id bigint,
hashed_storage_attachments_event_id bigint,
upload_deleted_event_id bigint,
job_artifact_deleted_event_id bigint,
reset_checksum_event_id bigint,
cache_invalidation_event_id bigint,
......@@ -14300,9 +14299,6 @@ CREATE TABLE geo_node_statuses (
lfs_objects_count integer,
lfs_objects_synced_count integer,
lfs_objects_failed_count integer,
attachments_count integer,
attachments_synced_count integer,
attachments_failed_count integer,
last_event_id integer,
last_event_date timestamp without time zone,
cursor_last_event_id integer,
......@@ -14327,7 +14323,6 @@ CREATE TABLE geo_node_statuses (
wikis_verification_failed_count integer,
lfs_objects_synced_missing_on_primary_count integer,
job_artifacts_synced_missing_on_primary_count integer,
attachments_synced_missing_on_primary_count integer,
repositories_checksummed_count integer,
repositories_checksum_failed_count integer,
repositories_checksum_mismatch_count integer,
......@@ -14496,24 +14491,6 @@ CREATE SEQUENCE geo_reset_checksum_events_id_seq
ALTER SEQUENCE geo_reset_checksum_events_id_seq OWNED BY geo_reset_checksum_events.id;
CREATE TABLE geo_upload_deleted_events (
id bigint NOT NULL,
upload_id integer NOT NULL,
file_path character varying NOT NULL,
model_id integer NOT NULL,
model_type character varying NOT NULL,
uploader character varying NOT NULL
);
CREATE SEQUENCE geo_upload_deleted_events_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE geo_upload_deleted_events_id_seq OWNED BY geo_upload_deleted_events.id;
CREATE TABLE gitlab_subscription_histories (
id bigint NOT NULL,
gitlab_subscription_created_at timestamp with time zone,
......@@ -21510,8 +21487,6 @@ ALTER TABLE ONLY geo_repository_updated_events ALTER COLUMN id SET DEFAULT nextv
ALTER TABLE ONLY geo_reset_checksum_events ALTER COLUMN id SET DEFAULT nextval('geo_reset_checksum_events_id_seq'::regclass);
ALTER TABLE ONLY geo_upload_deleted_events ALTER COLUMN id SET DEFAULT nextval('geo_upload_deleted_events_id_seq'::regclass);
ALTER TABLE ONLY gitlab_subscription_histories ALTER COLUMN id SET DEFAULT nextval('gitlab_subscription_histories_id_seq'::regclass);
ALTER TABLE ONLY gitlab_subscriptions ALTER COLUMN id SET DEFAULT nextval('gitlab_subscriptions_id_seq'::regclass);
......@@ -23129,9 +23104,6 @@ ALTER TABLE ONLY geo_repository_updated_events
ALTER TABLE ONLY geo_reset_checksum_events
ADD CONSTRAINT geo_reset_checksum_events_pkey PRIMARY KEY (id);
ALTER TABLE ONLY geo_upload_deleted_events
ADD CONSTRAINT geo_upload_deleted_events_pkey PRIMARY KEY (id);
ALTER TABLE ONLY gitlab_subscription_histories
ADD CONSTRAINT gitlab_subscription_histories_pkey PRIMARY KEY (id);
......@@ -25950,8 +25922,6 @@ CREATE INDEX index_geo_event_log_on_repository_updated_event_id ON geo_event_log
CREATE INDEX index_geo_event_log_on_reset_checksum_event_id ON geo_event_log USING btree (reset_checksum_event_id) WHERE (reset_checksum_event_id IS NOT NULL);
CREATE INDEX index_geo_event_log_on_upload_deleted_event_id ON geo_event_log USING btree (upload_deleted_event_id) WHERE (upload_deleted_event_id IS NOT NULL);
CREATE INDEX index_geo_hashed_storage_attachments_events_on_project_id ON geo_hashed_storage_attachments_events USING btree (project_id);
CREATE INDEX index_geo_hashed_storage_migrated_events_on_project_id ON geo_hashed_storage_migrated_events USING btree (project_id);
......@@ -25988,8 +25958,6 @@ CREATE INDEX index_geo_repository_updated_events_on_source ON geo_repository_upd
CREATE INDEX index_geo_reset_checksum_events_on_project_id ON geo_reset_checksum_events USING btree (project_id);
CREATE INDEX index_geo_upload_deleted_events_on_upload_id ON geo_upload_deleted_events USING btree (upload_id);
CREATE INDEX index_gin_ci_pending_builds_on_namespace_traversal_ids ON ci_pending_builds USING gin (namespace_traversal_ids);
CREATE INDEX index_gitlab_subscription_histories_on_gitlab_subscription_id ON gitlab_subscription_histories USING btree (gitlab_subscription_id);
......@@ -29352,9 +29320,6 @@ ALTER TABLE ONLY design_management_versions
ALTER TABLE ONLY packages_packages
ADD CONSTRAINT fk_c188f0dba4 FOREIGN KEY (creator_id) REFERENCES users(id) ON DELETE SET NULL;
ALTER TABLE ONLY geo_event_log
ADD CONSTRAINT fk_c1f241c70d FOREIGN KEY (upload_deleted_event_id) REFERENCES geo_upload_deleted_events(id) ON DELETE CASCADE;
ALTER TABLE ONLY analytics_cycle_analytics_project_stages
ADD CONSTRAINT fk_c3339bdfc9 FOREIGN KEY (stage_event_hash_id) REFERENCES analytics_cycle_analytics_stage_event_hashes(id) ON DELETE CASCADE;
......@@ -190,9 +190,6 @@ configuration option in `gitlab.yml`. These metrics are served from the
| `geo_lfs_objects` | Gauge | 10.2 | Total number of LFS objects available on primary | `url` |
| `geo_lfs_objects_synced` | Gauge | 10.2 | Number of LFS objects synced on secondary | `url` |
| `geo_lfs_objects_failed` | Gauge | 10.2 | Number of LFS objects failed to sync on secondary | `url` |
| `geo_attachments` | Gauge | 10.2 | Total number of file attachments available on primary | `url` |
| `geo_attachments_synced` | Gauge | 10.2 | Number of attachments synced on secondary | `url` |
| `geo_attachments_failed` | Gauge | 10.2 | Number of attachments failed to sync on secondary | `url` |
| `geo_last_event_id` | Gauge | 10.2 | Database ID of the latest event log entry on the primary | `url` |
| `geo_last_event_timestamp` | Gauge | 10.2 | UNIX timestamp of the latest event log entry on the primary | `url` |
| `geo_cursor_last_event_id` | Gauge | 10.2 | Last database ID of the event log processed by the secondary | `url` |
......@@ -201,7 +198,6 @@ configuration option in `gitlab.yml`. These metrics are served from the
| `geo_last_successful_status_check_timestamp` | Gauge | 10.2 | Last timestamp when the status was successfully updated | `url` |
| `geo_lfs_objects_synced_missing_on_primary` | Gauge | 10.7 | Number of LFS objects marked as synced due to the file missing on the primary | `url` |
| `geo_job_artifacts_synced_missing_on_primary` | Gauge | 10.7 | Number of job artifacts marked as synced due to the file missing on the primary | `url` |
| `geo_attachments_synced_missing_on_primary` | Gauge | 10.7 | Number of attachments marked as synced due to the file missing on the primary | `url` |
| `geo_repositories_checksummed` | Gauge | 10.7 | Number of repositories checksummed on primary | `url` |
| `geo_repositories_checksum_failed` | Gauge | 10.7 | Number of repositories failed to calculate the checksum on primary | `url` |
| `geo_wikis_checksummed` | Gauge | 10.7 | Number of wikis checksummed on primary | `url` |
......
......@@ -306,11 +306,6 @@ Example response:
"health": "Healthy",
"health_status": "Healthy",
"missing_oauth_application": false,
"attachments_count": 1,
"attachments_synced_count": null,
"attachments_failed_count": null,
"attachments_synced_missing_on_primary_count": 0,
"attachments_synced_in_percentage": "0.00%",
"db_replication_lag_seconds": null,
"lfs_objects_count": 0,
"lfs_objects_synced_count": null,
......@@ -465,11 +460,6 @@ Example response:
"health": "Healthy",
"health_status": "Healthy",
"missing_oauth_application": false,
"attachments_count": 1,
"attachments_synced_count": 1,
"attachments_failed_count": 0,
"attachments_synced_missing_on_primary_count": 0,
"attachments_synced_in_percentage": "100.00%",
"db_replication_lag_seconds": 0,
"lfs_objects_count": 0,
"lfs_objects_synced_count": 0,
......@@ -628,11 +618,6 @@ Example response:
"health": "Healthy",
"health_status": "Healthy",
"missing_oauth_application": false,
"attachments_count": 1,
"attachments_synced_count": 1,
"attachments_failed_count": 0,
"attachments_synced_missing_on_primary_count": 0,
"attachments_synced_in_percentage": "100.00%",
"db_replication_lag_seconds": 0,
"lfs_objects_count": 0,
"lfs_objects_synced_count": 0,
......
......@@ -231,10 +231,6 @@ We also collect metrics specific to [Geo](../../administration/geo/index.md) sec
"repositories_replication_enabled"=>true,
"repositories_synced_count"=>24,
"repositories_failed_count"=>0,
"attachments_replication_enabled"=>true,
"attachments_count"=>1,
"attachments_synced_count"=>1,
"attachments_failed_count"=>0,
"git_fetch_event_count_weekly"=>nil,
"git_push_event_count_weekly"=>nil,
... other geo node status fields
......
# frozen_string_literal: true
class Admin::Geo::UploadsController < Admin::Geo::ApplicationController
before_action :check_license!
before_action :registries, only: [:index]
def index
end
def destroy
if registry.upload
flash[:alert] = s_('Geo|Could not remove tracking entry for an existing upload.')
return redirect_back_or_default(default: admin_geo_uploads_path)
end
registry.destroy
flash[:toast] = s_('Geo|Tracking entry for upload (%{type}/%{id}) was successfully removed.') % { type: registry.file_type, id: registry.file_id }
redirect_back_or_default(default: admin_geo_uploads_path)
end
private
def registries
@registries ||=
::Geo::UploadRegistry
.with_status(params[:sync_status])
.with_search(params[:name])
.fresh
.page(params[:page])
end
def registry
@registry ||= ::Geo::UploadRegistry.find_by_id(params[:id])
end
end
# frozen_string_literal: true
module Geo
class AttachmentLegacyRegistryFinder < FileRegistryFinder
def registry_class
Geo::UploadRegistry
end
end
end
......@@ -224,18 +224,6 @@ module EE
}
]
if ::Geo::UploadReplicator.disabled?
replicable_types.insert(2, {
data_type: 'blob',
data_type_title: _('File'),
title: _('Upload'),
title_plural: _('Uploads'),
name: 'attachment',
name_plural: 'attachments',
secondary_view: true
})
end
# Adds all the SSF Data Types automatically
enabled_replicator_classes.each do |replicator_class|
replicable_types.push(
......
......@@ -14,8 +14,6 @@ module EE
with_replicator ::Geo::UploadReplicator
after_destroy :log_geo_deleted_event
scope :for_model, ->(model) { where(model_id: model.id, model_type: model.class.name) }
scope :syncable, -> { with_files_stored_locally }
end
......@@ -76,7 +74,8 @@ module EE
end
def log_geo_deleted_event
::Geo::UploadDeletedEventStore.new(self).create!
# Keep empty for now. Should be addressed in future
# by https://gitlab.com/gitlab-org/gitlab/issues/33817
end
end
end
......@@ -15,7 +15,6 @@ module Geo
Geo::HashedStorageMigratedEvent
Geo::HashedStorageAttachmentsEvent
Geo::JobArtifactDeletedEvent
Geo::UploadDeletedEvent
Geo::ContainerRepositoryUpdatedEvent
Geo::Event].freeze
......@@ -55,10 +54,6 @@ module Geo
class_name: 'Geo::JobArtifactDeletedEvent',
foreign_key: :job_artifact_deleted_event_id
belongs_to :upload_deleted_event,
class_name: 'Geo::UploadDeletedEvent',
foreign_key: :upload_deleted_event_id
belongs_to :reset_checksum_event,
class_name: 'Geo::ResetChecksumEvent',
foreign_key: :reset_checksum_event_id
......@@ -101,7 +96,6 @@ module Geo
hashed_storage_migrated_event ||
hashed_storage_attachments_event ||
job_artifact_deleted_event ||
upload_deleted_event ||
reset_checksum_event ||
cache_invalidation_event ||
container_repository_updated_event ||
......
# frozen_string_literal: true
module Geo
class UploadDeletedEvent < ApplicationRecord
include Geo::Model
include Geo::Eventable
belongs_to :upload
validates :upload, :file_path, :model_id, :model_type, :uploader, presence: true
def upload_type
uploader&.sub(/Uploader\z/, '')&.underscore
end
end
end
# frozen_string_literal: true
class Geo::UploadRegistry < Geo::BaseRegistry
include Geo::Syncable
include ::Geo::ReplicableRegistry
extend ::Gitlab::Utils::Override
......@@ -15,35 +14,14 @@ class Geo::UploadRegistry < Geo::BaseRegistry
scope :fresh, -> { order(created_at: :desc) }
# Returns untracked uploads as well as tracked uploads that are unused.
#
# Untracked uploads is an array where each item is a tuple of [id, file_type]
# that is supposed to be synced but don't yet have a registry entry.
#
# Unused uploads is an array where each item is a tuple of [id, file_type]
# that is not supposed to be synced but already have a registry entry. For
# example:
#
# - orphaned registries
# - records that became excluded from selective sync
# - records that are in object storage, and `sync_object_storage` became
# disabled
#
# We compute both sets in this method to reduce the number of DB queries
# performed.
#
# @return [Array] the first element is an Array of untracked uploads, and the
# second element is an Array of tracked uploads that are unused.
# For example: [[[1, 'avatar'], [5, 'file']], [[3, 'attachment']]]
def self.find_registry_differences(range)
source =
self::MODEL_CLASS.replicables_for_current_secondary(range)
.pluck(self::MODEL_CLASS.arel_table[:id], self::MODEL_CLASS.arel_table[:uploader])
.map! { |id, uploader| [id, uploader.sub(/Uploader\z/, '').underscore] }
.pluck(self::MODEL_CLASS.arel_table[:id])
tracked =
self.model_id_in(range)
.pluck(:file_id, :file_type)
.pluck(:file_id)
untracked = source - tracked
unused_tracked = tracked - source
......@@ -58,16 +36,16 @@ class Geo::UploadRegistry < Geo::BaseRegistry
end
def self.insert_for_model_ids(attrs)
records = attrs.map do |file_id, file_type|
new(file_id: file_id, file_type: file_type, created_at: Time.zone.now)
records = attrs.map do |file_id|
new(file_id: file_id, created_at: Time.zone.now)
end
bulk_insert!(records, returns: :ids)
end
def self.delete_for_model_ids(attrs)
attrs.map do |file_id, file_type|
delete_worker_class.perform_async(file_type, file_id)
attrs.map do |file_id|
delete_worker_class.perform_async(:upload, file_id)
end
end
......@@ -93,62 +71,10 @@ class Geo::UploadRegistry < Geo::BaseRegistry
end
def file
upload&.path || s_('Removed %{type} with id %{id}') % { type: file_type, id: file_id }
upload&.path || s_('Removed upload with id %{id}') % { id: file_id }
end
def project
return upload.model if upload&.model.is_a?(Project)
end
# Returns a synchronization state based on existing attribute values
#
# It takes into account things like if a successful replication has been done
# if there are pending actions or existing errors
#
# @return [Symbol] :synced, :never, or :failed
def synchronization_state
return :synced if success?
return :never if retry_count.nil?
:failed
end
# TODO Remove this when enabling geo_upload_registry by default
# https://gitlab.com/gitlab-org/gitlab/-/issues/340617
override :registry_consistency_worker_enabled?
def self.registry_consistency_worker_enabled?
true
end
def self.failed
if ::Geo::UploadReplicator.enabled?
with_state(:failed)
else
where(success: false).where.not(retry_count: nil)
end
end
def self.never_attempted_sync
if ::Geo::UploadReplicator.enabled?
pending.where(last_synced_at: nil)
else
where(success: false, retry_count: nil)
end
end
def self.retry_due
if ::Geo::UploadReplicator.enabled?
where(arel_table[:retry_at].eq(nil).or(arel_table[:retry_at].lt(Time.current)))
else
where('retry_at is NULL OR retry_at < ?', Time.current)
end
end
def self.synced
if ::Geo::UploadReplicator.enabled?
with_state(:synced).or(where(success: true))
else
where(success: true)
end
end
end
......@@ -16,7 +16,7 @@ class GeoNodeStatus < ApplicationRecord
attr_accessor :event_log_max_id, :repository_created_max_id, :repository_updated_max_id,
:repository_deleted_max_id, :repository_renamed_max_id, :repositories_changed_max_id,
:lfs_object_deleted_max_id, :job_artifact_deleted_max_id,
:lfs_objects_registry_count, :job_artifacts_registry_count, :attachments_registry_count,
:lfs_objects_registry_count, :job_artifacts_registry_count,
:hashed_storage_migrated_max_id, :hashed_storage_attachments_max_id,
:repositories_checked_count, :repositories_checked_failed_count
......@@ -64,10 +64,6 @@ class GeoNodeStatus < ApplicationRecord
repositories_replication_enabled
repositories_synced_count
repositories_failed_count
attachments_replication_enabled
attachments_count
attachments_synced_count
attachments_failed_count
wikis_synced_count
wikis_failed_count
job_artifacts_replication_enabled
......@@ -81,7 +77,6 @@ class GeoNodeStatus < ApplicationRecord
wikis_verification_failed_count
wikis_verification_total_count
job_artifacts_synced_missing_on_primary_count
attachments_synced_missing_on_primary_count
repositories_checksummed_count
repositories_checksum_failed_count
repositories_checksum_mismatch_count
......@@ -137,12 +132,6 @@ class GeoNodeStatus < ApplicationRecord
job_artifacts_failed_count: 'Number of syncable job artifacts failed to sync on secondary',
job_artifacts_registry_count: 'Number of job artifacts in the registry',
job_artifacts_synced_missing_on_primary_count: 'Number of job artifacts marked as synced due to the file missing on the primary',
attachments_replication_enabled: 'Boolean denoting if replication is enabled for Attachments',
attachments_count: 'Total number of syncable file attachments available on primary',
attachments_synced_count: 'Number of syncable file attachments synced on secondary',
attachments_failed_count: 'Number of syncable file attachments failed to sync on secondary',
attachments_registry_count: 'Number of attachments in the registry',
attachments_synced_missing_on_primary_count: 'Number of attachments marked as synced due to the file missing on the primary',
replication_slots_count: 'Total number of replication slots on the primary',
replication_slots_used_count: 'Number of replication slots in use on the primary',
replication_slots_max_retained_wal_bytes: 'Maximum number of bytes retained in the WAL on the primary',
......@@ -287,7 +276,6 @@ class GeoNodeStatus < ApplicationRecord
self.repository_verification_enabled = Gitlab::Geo.repository_verification_enabled?
if Gitlab::Geo.secondary?
self.attachments_replication_enabled = Geo::UploadRegistry.replication_enabled?
self.container_repositories_replication_enabled = Geo::ContainerRepositoryRegistry.replication_enabled?
self.design_repositories_replication_enabled = Geo::DesignRegistry.replication_enabled?
self.job_artifacts_replication_enabled = Geo::JobArtifactRegistry.replication_enabled?
......@@ -390,7 +378,6 @@ class GeoNodeStatus < ApplicationRecord
attr_in_percentage :wikis_checksummed, :wikis_checksummed_count, :wikis_count
attr_in_percentage :wikis_verified, :wikis_verified_count, :wikis_count
attr_in_percentage :job_artifacts_synced, :job_artifacts_synced_count, :job_artifacts_count
attr_in_percentage :attachments_synced, :attachments_synced_count, :attachments_count
attr_in_percentage :replication_slots_used, :replication_slots_used_count, :replication_slots_count
attr_in_percentage :container_repositories_synced, :container_repositories_synced_count, :container_repositories_count
attr_in_percentage :design_repositories_synced, :design_repositories_synced_count, :design_repositories_count
......@@ -465,7 +452,6 @@ class GeoNodeStatus < ApplicationRecord
load_repositories_data
load_job_artifacts_data
load_attachments_data
load_container_registry_data
load_designs_data
load_ssf_replicable_data
......@@ -490,16 +476,6 @@ class GeoNodeStatus < ApplicationRecord
self.job_artifacts_synced_missing_on_primary_count = job_artifacts_finder.synced_missing_on_primary_count
end
def load_attachments_data
return unless attachments_replication_enabled
self.attachments_count = attachments_finder.registry_count
self.attachments_synced_count = attachments_finder.synced_count
self.attachments_failed_count = attachments_finder.failed_count
self.attachments_registry_count = attachments_finder.registry_count
self.attachments_synced_missing_on_primary_count = attachments_finder.synced_missing_on_primary_count
end
def load_container_registry_data
return unless container_repositories_replication_enabled
......@@ -594,10 +570,6 @@ class GeoNodeStatus < ApplicationRecord
@primary_storage_digest ||= Gitlab::Geo.primary_node.find_or_build_status.storage_configuration_digest
end
def attachments_finder
@attachments_finder ||= Geo::AttachmentLegacyRegistryFinder.new
end
def job_artifacts_finder
@job_artifacts_finder ||= Geo::JobArtifactRegistryFinder.new
end
......
......@@ -37,16 +37,5 @@ module Geo
def carrierwave_uploader
model_record.retrieve_uploader
end
# TODO: This method can be removed as part of
# https://gitlab.com/gitlab-org/gitlab/-/issues/340617
override :registry
def registry
super.tap do |record|
# We don't really need this value for SSF, it's only needed to make
# new registry records valid for legacy code in case of disabling the feature.
record.file_type ||= model_record.uploader.delete_suffix("Uploader").underscore
end
end
end
end
......@@ -2,9 +2,8 @@
module Geo
# This class is responsible for:
# * Finding the appropriate Downloader class for a UploadRegistry record
# * Executing the Downloader
# * Marking the UploadRegistry record as synced or needing retry
# * Marking the Registry record as synced or needing retry
class FileDownloadService < BaseFileService
include Gitlab::Utils::StrongMemoize
......@@ -36,7 +35,6 @@ module Geo
private
def downloader_klass
return Gitlab::Geo::Replication::FileDownloader if user_upload?
return Gitlab::Geo::Replication::JobArtifactDownloader if job_artifact?
fail_unimplemented_klass!(type: 'Downloader')
......@@ -59,11 +57,7 @@ module Geo
# rubocop: disable CodeReuse/ActiveRecord
def registry
strong_memoize(:registry) do
if job_artifact?
Geo::JobArtifactRegistry.find_or_initialize_by(artifact_id: object_db_id)
else
Geo::UploadRegistry.find_or_initialize_by(file_type: object_type, file_id: object_db_id)
end
Geo::JobArtifactRegistry.find_or_initialize_by(artifact_id: object_db_id)
end
end
# rubocop: enable CodeReuse/ActiveRecord
......
......@@ -43,8 +43,6 @@ module Geo
strong_memoize(:file_registry) do
if job_artifact?
::Geo::JobArtifactRegistry.find_by(artifact_id: object_db_id)
elsif user_upload?
::Geo::UploadRegistry.find_by(file_type: object_type, file_id: object_db_id)
elsif replicator
replicator.registry
end
......@@ -89,13 +87,7 @@ module Geo
next if file_uploader.nil?
next file_uploader.file.path if file_uploader.object_store == ObjectStorage::Store::LOCAL
# For remote storage more juggling is needed to actually get the full path on disk
if user_upload?
upload = file_uploader.upload
file_uploader.class.absolute_path(upload)
else
file_uploader.class.absolute_path(file_uploader.file)
end
file_uploader.class.absolute_path(file_uploader.file)
end
end
......
# frozen_string_literal: true
module Geo
class UploadDeletedEventStore < EventStore
extend ::Gitlab::Utils::Override
self.event_type = :upload_deleted_event
attr_reader :upload
def initialize(upload)
@upload = upload
end
private
def build_event
Geo::UploadDeletedEvent.new(
upload: upload,
file_path: upload.path,
model_id: upload.model_id,
model_type: upload.model_type,
uploader: upload.uploader
)
end
# This is called by LogHelpers to build json log with context info
#
# @see ::Gitlab::Geo::LogHelpers
def extra_log_data
{
upload_id: upload.id,
file_path: upload.path,
model_id: upload.model_id,
model_type: upload.model_type,
uploader: upload.uploader
}.compact
end
end
end
......@@ -10,11 +10,6 @@
= link_to admin_geo_projects_path, title: _('Projects') do
%span
= _('Projects')
- if ::Geo::UploadReplicator.disabled?
= nav_link(path: 'admin/geo/uploads#index', html_options: { class: 'gl-pr-2' }) do
= link_to admin_geo_uploads_path, title: _('Uploads') do
%span
= _('Uploads')
= nav_link(path: 'admin/geo/designs#index', html_options: { class: 'gl-pr-2' }) do
= link_to admin_geo_designs_path, title: _('Designs') do
%span
......
......@@ -71,13 +71,7 @@ module Geo
end
def job_finders
job_finders = [Geo::FileDownloadDispatchWorker::JobArtifactJobFinder.new(scheduled_file_ids(:job_artifact))]
if ::Geo::UploadReplicator.disabled?
job_finders << Geo::FileDownloadDispatchWorker::AttachmentJobFinder.new(scheduled_file_ids(Gitlab::Geo::Replication::USER_UPLOADS_OBJECT_TYPES))
end
job_finders
[Geo::FileDownloadDispatchWorker::JobArtifactJobFinder.new(scheduled_file_ids(:job_artifact))]
end
def scheduled_file_ids(file_types)
......
# frozen_string_literal: true
module Geo
class FileDownloadDispatchWorker # rubocop:disable Scalability/IdempotentWorker
class AttachmentJobFinder < JobFinder # rubocop:disable Scalability/IdempotentWorker
EXCEPT_RESOURCE_IDS_KEY = :except_ids
def registry_finder
@registry_finder ||= Geo::AttachmentLegacyRegistryFinder.new
end
private
# Why do we need a different `file_type` for each Uploader? Why not just use 'upload'?
# rubocop: disable CodeReuse/ActiveRecord
def convert_resource_relation_to_job_args(relation)
relation.pluck(:id, :uploader)
.map! { |id, uploader| [uploader.sub(/Uploader\z/, '').underscore, id] }
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def convert_registry_relation_to_job_args(relation)
relation.pluck(:file_type, :file_id)
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
end
......@@ -4,7 +4,6 @@
"type": "object",
"properties": {
"projects_count": { "type": "number", "description": "Projects count" },
"attachments_count": { "type": "number", "description": "Attachments count" },
"lfs_objects_count": { "type": "number" },
"wikis_failed_count": { "type": "number" },
"wikis_synced_count": { "type": "number" },
......@@ -13,8 +12,6 @@
"wikis_verified_count": { "type": "number" },
"pages_deployments_count": { "type": "number" },
"wikis_checksummed_count": { "type": "number" },
"attachments_failed_count": { "type": "number" },
"attachments_synced_count": { "type": "number" },
"lfs_objects_failed_count": { "type": "number" },
"lfs_objects_synced_count": { "type": "number" },
"pipeline_artifacts_count": { "type": "number" },
......@@ -45,7 +42,6 @@
"repositories_checksummed_count": { "type": "number" },
"terraform_state_versions_count": { "type": "number" },
"wikis_verification_total_count": { "type": "number" },
"attachments_replication_enabled": { "type": "boolean" },
"package_files_checksummed_count": { "type": "number" },
"pipeline_artifacts_failed_count": { "type": "number" },
"pipeline_artifacts_synced_count": { "type": "number" },
......@@ -109,7 +105,6 @@
"pages_deployments_verification_total_count": { "type": "number" },
"snippet_repositories_checksum_failed_count": { "type": "number" },
"terraform_state_versions_checksummed_count": { "type": "number" },
"attachments_synced_missing_on_primary_count": { "type": "number" },
"pages_deployments_verification_failed_count": { "type": "number" },
"pipeline_artifacts_verification_total_count": { "type": "number" },
"group_wiki_repositories_checksum_total_count": { "type": "number" },
......
......@@ -70,8 +70,6 @@ namespace :admin do
resources :designs, only: [:index]
resources :uploads, only: [:index, :destroy], path: 'legacy-uploads'
get '/:replicable_name_plural', to: 'replicables#index', as: 'replicables'
end
......
# frozen_string_literal: true
class RemoveUploadLegacyFields < Gitlab::Database::Migration[1.0]
def change
remove_column :file_registry, :file_type, :string, null: false
remove_column :file_registry, :success, :boolean, null: false
remove_column :file_registry, :bytes, :integer
remove_column :file_registry, :sha256, :string
end
end
b537418e1e9d1106f2ec388e1540ff67f19d4eb5a882770d1ce775bddbb167b4
\ No newline at end of file
......@@ -65,12 +65,8 @@ ALTER SEQUENCE event_log_states_event_id_seq OWNED BY event_log_states.event_id;
CREATE TABLE file_registry (
id integer NOT NULL,
file_type character varying NOT NULL,
file_id integer NOT NULL,
bytes bigint,
sha256 character varying,
created_at timestamp without time zone NOT NULL,
success boolean DEFAULT false NOT NULL,
retry_count integer DEFAULT 0,
retry_at timestamp without time zone,
missing_on_primary boolean DEFAULT false NOT NULL,
......@@ -510,14 +506,8 @@ CREATE INDEX index_design_registry_on_retry_at ON design_registry USING btree (r
CREATE INDEX index_design_registry_on_state ON design_registry USING btree (state);
CREATE INDEX index_file_registry_on_file_type ON file_registry USING btree (file_type);
CREATE UNIQUE INDEX index_file_registry_on_file_type_and_file_id ON file_registry USING btree (file_type, file_id);
CREATE INDEX index_file_registry_on_retry_at ON file_registry USING btree (retry_at);
CREATE INDEX index_file_registry_on_success ON file_registry USING btree (success);
CREATE UNIQUE INDEX index_g_wiki_repository_registry_on_group_wiki_repository_id ON group_wiki_repository_registry USING btree (group_wiki_repository_id);
CREATE INDEX index_group_wiki_repository_registry_on_retry_at ON group_wiki_repository_registry USING btree (retry_at);
......
......@@ -35,7 +35,6 @@ module EE
expose :db_replication_lag_seconds
expose :attachments_replication_enabled, if: -> (*) { ::Geo::UploadReplicator.disabled? }
expose :job_artifacts_replication_enabled
expose :container_repositories_replication_enabled
expose :design_repositories_replication_enabled
......
......@@ -17,8 +17,7 @@ module EE
geo_hashed_storage_migrated_events
geo_hashed_storage_attachments_events
geo_lfs_object_deleted_events
geo_job_artifact_deleted_events
geo_upload_deleted_events].freeze
geo_job_artifact_deleted_events].freeze
module PrunableEvent
extend ActiveSupport::Concern
......
......@@ -28,7 +28,6 @@ module Gitlab
print_verified_repositories
print_wikis_status
print_verified_wikis
print_attachments_status
print_ci_job_artifacts_status
print_container_repositories_status
print_design_repositories_status
......@@ -49,7 +48,6 @@ module Gitlab
print_verified_repositories
print_wikis_status
print_verified_wikis
print_attachments_status
print_ci_job_artifacts_status
print_container_repositories_status
print_design_repositories_status
......@@ -74,7 +72,6 @@ module Gitlab
["repositories", Gitlab::Geo.repository_verification_enabled?],
["wikis", Gitlab::Geo.repository_verification_enabled?],
["job_artifacts", false],
["attachments", false],
["design_repositories", false]
]
......@@ -252,15 +249,6 @@ module Gitlab
end
end
def print_attachments_status
return if ::Geo::UploadReplicator.enabled?
print 'Attachments: '.rjust(GEO_STATUS_COLUMN_WIDTH)
show_failed_value(current_node_status.attachments_failed_count)
print "#{current_node_status.attachments_synced_count}/#{current_node_status.attachments_count} "
puts using_percentage(current_node_status.attachments_synced_in_percentage)
end
def print_ci_job_artifacts_status
print 'CI job artifacts: '.rjust(GEO_STATUS_COLUMN_WIDTH)
show_failed_value(current_node_status.job_artifacts_failed_count)
......
# frozen_string_literal: true
module Gitlab
module Geo
module LogCursor
module Events
class UploadDeletedEvent
include BaseEvent
def process
job_id = ::Geo::FileRegistryRemovalWorker.perform_async(event.upload_type, event.upload_id)
log_event(job_id)
end
def log_event(job_id)
super(
'Delete upload file scheduled',
upload_id: event.upload_id,
upload_type: event.upload_type,
file_path: event.file_path,
model_id: event.model_id,
model_type: event.model_type,
job_id: job_id)
end
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Admin::Geo::UploadsController, :geo do
include EE::GeoHelpers
let_it_be(:admin) { create(:admin) }
let_it_be(:secondary) { create(:geo_node) }
let_it_be(:synced_registry) { create(:geo_upload_legacy_registry, :with_file, :attachment, success: true) }
let_it_be(:failed_registry) { create(:geo_upload_legacy_registry, :failed) }
let_it_be(:never_registry) { create(:geo_upload_legacy_registry, :failed, retry_count: nil) }
def css_id(registry)
"#upload-#{registry.id}-header"
end
before do
stub_feature_flags(geo_upload_replication: false)
sign_in(admin)
end
shared_examples 'license required' do
context 'without a valid license' do
it 'redirects to 403 page' do
expect(subject).to have_gitlab_http_status(:forbidden)
end
end
end
describe '#index' do
subject { get :index }
it_behaves_like 'license required'
context 'with a valid license' do
render_views
before do
stub_licensed_features(geo: true)
stub_current_geo_node(secondary)
end
it 'renders the index template' do
expect(subject).to have_gitlab_http_status(:ok)
expect(subject).to render_template(:index)
end
context 'without sync_status specified' do
it 'renders all registries' do
expect(subject).to have_gitlab_http_status(:ok)
expect(response.body).to have_css(css_id(synced_registry))
expect(response.body).to have_css(css_id(failed_registry))
expect(response.body).to have_css(css_id(never_registry))
end
end
context 'with sync_status=synced' do
subject { get :index, params: { sync_status: 'synced' } }
it 'renders only synced registries' do
expect(subject).to have_gitlab_http_status(:ok)
expect(response.body).to have_css(css_id(synced_registry))
expect(response.body).not_to have_css(css_id(failed_registry))
expect(response.body).not_to have_css(css_id(never_registry))
end
end
context 'with sync_status=failed' do
subject { get :index, params: { sync_status: 'failed' } }
it 'renders only failed registries' do
expect(subject).to have_gitlab_http_status(:ok)
expect(response.body).not_to have_css(css_id(synced_registry))
expect(response.body).to have_css(css_id(failed_registry))
expect(response.body).not_to have_css(css_id(never_registry))
end
end
# Explained via: https://gitlab.com/gitlab-org/gitlab/-/issues/216049
context 'with sync_status=pending' do
subject { get :index, params: { sync_status: 'pending' } }
it 'renders only never synced registries' do
expect(subject).to have_gitlab_http_status(:ok)
expect(response.body).not_to have_css(css_id(synced_registry))
expect(response.body).not_to have_css(css_id(failed_registry))
expect(response.body).to have_css(css_id(never_registry))
end
end
end
end
describe '#destroy' do
subject { delete :destroy, params: { id: registry } }
it_behaves_like 'license required' do
let(:registry) { create(:geo_upload_legacy_registry) }
end
context 'with a valid license' do
before do
stub_licensed_features(geo: true)
end
context 'with an orphaned registry' do
let(:registry) { create(:geo_upload_legacy_registry, success: true) }
it 'removes the registry' do
registry.update_column(:file_id, -1)
expect(subject).to redirect_to(admin_geo_uploads_path)
expect(flash[:toast]).to include('was successfully removed')
expect { Geo::UploadRegistry.find(registry.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context 'with a regular registry' do
let(:registry) { create(:geo_upload_legacy_registry, :avatar, :with_file, success: true) }
it 'does not delete the registry and gives an error' do
expect(subject).to redirect_to(admin_geo_uploads_path)
expect(flash[:alert]).to include('Could not remove tracking entry')
expect { Geo::UploadRegistry.find(registry.id) }.not_to raise_error
end
end
end
end
end
......@@ -30,10 +30,6 @@ FactoryBot.define do
job_artifact_deleted_event factory: :geo_job_artifact_deleted_event
end
trait :upload_deleted_event do
upload_deleted_event factory: :geo_upload_deleted_event
end
trait :reset_checksum_event do
reset_checksum_event factory: :geo_reset_checksum_event
end
......@@ -133,26 +129,6 @@ FactoryBot.define do
end
end
factory :geo_upload_deleted_event, class: 'Geo::UploadDeletedEvent' do
upload { association(:upload) }
file_path { upload.path }
model_id { upload.model_id }
model_type { upload.model_type }
uploader { upload.uploader }
trait :issuable_upload do
upload { association(:upload, :issuable_upload) }
end
trait :personal_snippet do
upload { association(:upload, :personal_snippet) }
end
trait :namespace_upload do
upload { association(:upload, :namespace_upload) }
end
end
factory :geo_reset_checksum_event, class: 'Geo::ResetChecksumEvent' do
project
end
......
# frozen_string_literal: true
FactoryBot.define do
factory :geo_upload_legacy_registry, class: 'Geo::UploadRegistry' do
sequence(:file_id)
file_type { :file }
success { true }
trait(:attachment) { file_type { :attachment } }
trait(:avatar) { file_type { :avatar } }
trait(:'bulk_imports/export') { file_type { :'bulk_imports/export' } }
trait(:favicon) { file_type { :favicon } }
trait(:file) { file_type { :file } }
trait(:import_export) { file_type { :import_export } }
trait(:issuable_metric_image) { file_type { :issuable_metric_image } }
trait(:namespace_file) { file_type { :namespace_file } }
trait(:personal_file) { file_type { :personal_file } }
trait :failed do
success { false }
retry_count { 1 }
end
trait :never_synced do
success { false }
retry_count { nil }
end
trait :with_file do
after(:build, :stub) do |registry, _|
file =
if registry.file_type.to_sym == :job_artifact
raise NotImplementedError, 'Use create(:geo_job_artifact_registry, :with_artifact) instead'
else
create(:upload)
end
registry.file_id = file.id
end
end
end
end
FactoryBot.define do
factory :geo_upload_registry, class: 'Geo::UploadRegistry' do
association(:upload, :with_file)
sequence(:file_id)
file_type { :file }
state { Geo::UploadRegistry.state_value(:pending) }
trait :synced do
......
......@@ -7,10 +7,6 @@ FactoryBot.define do
trait :healthy do
status_message { nil }
attachments_count { 329 }
attachments_failed_count { 13 }
attachments_synced_count { 141 }
attachments_synced_missing_on_primary_count { 89 }
job_artifacts_count { 580 }
job_artifacts_failed_count { 3 }
job_artifacts_synced_count { 577 }
......@@ -53,7 +49,6 @@ FactoryBot.define do
last_successful_status_check_timestamp { 2.minutes.ago }
version { Gitlab::VERSION }
revision { Gitlab.revision }
attachments_replication_enabled { true }
container_repositories_replication_enabled { false }
design_repositories_replication_enabled { true }
job_artifacts_replication_enabled { false }
......@@ -70,7 +65,6 @@ FactoryBot.define do
end
trait :replicated_and_verified do
attachments_failed_count { 0 }
job_artifacts_failed_count { 0 }
container_repositories_failed_count { 0 }
design_repositories_failed_count { 0 }
......@@ -92,7 +86,6 @@ FactoryBot.define do
wikis_verified_count { 10 }
wikis_verification_total_count { 10 }
job_artifacts_synced_count { 10 }
attachments_synced_count { 10 }
replication_slots_used_count { 10 }
container_repositories_synced_count { 10 }
design_repositories_synced_count { 10 }
......@@ -100,7 +93,6 @@ FactoryBot.define do
repositories_count { 10 }
wikis_count { 10 }
job_artifacts_count { 10 }
attachments_count { 10 }
replication_slots_count { 10 }
container_repositories_count { 10 }
design_repositories_count { 10 }
......
......@@ -33,16 +33,6 @@ RSpec.describe 'admin Geo Replication Nav', :js, :geo do
end
end
describe 'visit admin/geo/replication/legacy-uploads' do
before do
stub_feature_flags(geo_upload_replication: false)
end
it_behaves_like 'active sidebar link', 'Uploads' do
let(:path) { admin_geo_uploads_path }
end
end
describe 'visit admin/geo/replication/designs' do
it_behaves_like 'active sidebar link', 'Designs' do
let(:path) { admin_geo_designs_path }
......
......@@ -59,12 +59,6 @@ RSpec.describe 'admin Geo Sidebar', :js, :geo do
end
end
describe 'visiting geo uploads' do
it_behaves_like 'active sidebar link', 'Replication' do
let(:path) { admin_geo_uploads_path }
end
end
describe 'visiting geo replicables' do
Gitlab::Geo.enabled_replicator_classes.each do |replicator_class|
it_behaves_like 'active sidebar link', 'Replication' do
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'admin Geo Uploads', :js, :geo do
let!(:geo_node) { create(:geo_node) }
let!(:synced_registry) { create(:geo_upload_legacy_registry, :with_file, :attachment, success: true) }
before do
allow(Gitlab::Geo).to receive(:license_allows?).and_return(true)
stub_feature_flags(geo_upload_replication: false)
admin = create(:admin)
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
end
describe 'visiting geo uploads initial page' do
before do
visit(admin_geo_uploads_path)
wait_for_requests
end
it 'shows all uploads in the registry' do
page.within(find('#content-body', match: :first)) do
expect(page).to have_content(synced_registry.file)
expect(page).not_to have_content('There are no uploads to show')
end
end
describe 'searching for a geo upload' do
it 'filters out uploads with the search term' do
fill_in :name, with: synced_registry.file
find('#project-filter-form-field').native.send_keys(:enter)
wait_for_requests
page.within(find('#content-body', match: :first)) do
expect(page).to have_content(synced_registry.file)
expect(page).not_to have_content('There are no uploads to show')
end
end
end
describe 'with no registries' do
it 'shows empty state' do
fill_in :name, with: 'asdfasdf'
find('#project-filter-form-field').native.send_keys(:enter)
wait_for_requests
page.within(find('#content-body', match: :first)) do
expect(page).not_to have_content(synced_registry.file)
expect(page).to have_content('There are no uploads to show')
end
end
end
end
describe 'remove an orphaned Tracking Entry' do
before do
synced_registry.upload.destroy!
visit(admin_geo_uploads_path)
wait_for_requests
end
it 'removes an existing Geo Upload' do
card_count = page.all(:css, '.upload-card').length
page.within(find('.upload-card', match: :first)) do
page.click_button('Remove')
end
page.within('.modal') do
page.click_button('Remove entry')
end
# Wait for remove confirmation
expect(page.find('.gl-toast')).to have_text('removed')
expect(page.all(:css, '.upload-card').length).to be(card_count - 1)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Geo::AttachmentLegacyRegistryFinder, :geo do
before do
stub_feature_flags(geo_upload_replication: false )
end
it_behaves_like 'a file registry finder' do
let_it_be(:project) { create(:project) }
let_it_be(:replicable_1) { create(:upload, model: project) }
let_it_be(:replicable_2) { create(:upload, model: project) }
let_it_be(:replicable_3) { create(:upload, :issuable_upload, model: project) }
let_it_be(:replicable_4) { create(:upload, model: project) }
let_it_be(:replicable_5) { create(:upload, model: project) }
let_it_be(:replicable_6) { create(:upload, :personal_snippet_upload) }
let_it_be(:replicable_7) { create(:upload, :object_storage, model: project) }
let_it_be(:replicable_8) { create(:upload, :object_storage, model: project) }
let_it_be(:replicable_9) { create(:upload, :object_storage, model: project) }
let_it_be(:registry_1) { create(:geo_upload_legacy_registry, :attachment, :failed, file_id: replicable_1.id) }
let_it_be(:registry_2) { create(:geo_upload_legacy_registry, :attachment, file_id: replicable_2.id, missing_on_primary: true) }
let_it_be(:registry_3) { create(:geo_upload_legacy_registry, :attachment, :never_synced, file_id: replicable_3.id) }
let_it_be(:registry_4) { create(:geo_upload_legacy_registry, :attachment, :failed, file_id: replicable_4.id) }
let_it_be(:registry_5) { create(:geo_upload_legacy_registry, :attachment, file_id: replicable_5.id, missing_on_primary: true, retry_at: 1.day.ago) }
let_it_be(:registry_6) { create(:geo_upload_legacy_registry, :attachment, :failed, file_id: replicable_6.id) }
let_it_be(:registry_7) { create(:geo_upload_legacy_registry, :attachment, :failed, file_id: replicable_7.id, missing_on_primary: true) }
let_it_be(:registry_8) { create(:geo_upload_legacy_registry, :attachment, :never_synced, file_id: replicable_8.id) }
end
end
......@@ -6,11 +6,6 @@
"health",
"health_status",
"missing_oauth_application",
"attachments_replication_enabled",
"attachments_count",
"attachments_failed_count",
"attachments_synced_count",
"attachments_synced_missing_on_primary_count",
"lfs_objects_count",
"lfs_objects_failed_count",
"lfs_objects_synced_count",
......@@ -188,12 +183,6 @@
"health": { "type": ["string", "null"] },
"health_status": { "type": "string" },
"missing_oauth_application": { "type": "boolean" },
"attachments_replication_enabled": { "type": ["boolean", "null"] },
"attachments_count": { "type": "integer" },
"attachments_failed_count": { "type": ["integer", "null"] },
"attachments_synced_count": { "type": ["integer", "null"] },
"attachments_synced_missing_on_primary_count": { "type": ["integer", "null"] },
"attachments_synced_in_percentage": { "type": "string" },
"db_replication_lag_seconds": { "type": ["integer", "null"] },
"lfs_objects_count": { "type": "integer" },
"lfs_objects_failed_count": { "type": ["integer", "null"] },
......
......@@ -55,16 +55,6 @@ RSpec.describe EE::API::Entities::GeoNodeStatus do
end
end
describe '#attachments_synced_in_percentage' do
it 'formats as percentage' do
geo_node_status.assign_attributes(attachments_count: 329,
attachments_failed_count: 25,
attachments_synced_count: 141)
expect(subject[:attachments_synced_in_percentage]).to eq '42.86%'
end
end
describe '#job_artifacts_synced_in_percentage' do
it 'formats as percentage' do
geo_node_status.assign_attributes(job_artifacts_count: 256,
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Geo::LogCursor::Events::UploadDeletedEvent, :clean_gitlab_redis_shared_state do
let(:logger) { Gitlab::Geo::LogCursor::Logger.new(described_class, Logger::INFO) }
let(:project) { create(:project) }
let(:upload_deleted_event) { create(:geo_upload_deleted_event, project: project) }
let(:event_log) { create(:geo_event_log, upload_deleted_event: upload_deleted_event) }
let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
subject { described_class.new(upload_deleted_event, Time.now, logger) }
around do |example|
Sidekiq::Testing.inline! { example.run }
end
describe '#process' do
context 'with default handling' do
let(:event_log) { create(:geo_event_log, :upload_deleted_event) }
let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
let(:upload_deleted_event) { event_log.upload_deleted_event }
let(:upload) { upload_deleted_event.upload }
it 'does not create a tracking database entry' do
expect { subject.process }.not_to change(Geo::UploadRegistry, :count)
end
it 'removes the tracking database entry if exist' do
create(:geo_upload_legacy_registry, :avatar, file_id: upload.id)
expect { subject.process }.to change(Geo::UploadRegistry, :count).by(-1)
end
it_behaves_like 'logs event source info'
end
end
end
......@@ -21,42 +21,4 @@ RSpec.describe Appearance do
it { is_expected.to allow_value(hex).for(:message_font_color) }
it { is_expected.not_to allow_value('000').for(:message_font_color) }
end
context 'object storage with background upload' do
context 'when running in a Geo primary node' do
let_it_be(:primary) { create(:geo_node, :primary) }
let_it_be(:secondary) { create(:geo_node) }
before do
stub_current_geo_node(primary)
stub_uploads_object_storage(AttachmentUploader, background_upload: true)
end
it 'creates a Geo deleted log event for logo' do
Sidekiq::Testing.inline! do
expect do
create(:appearance, :with_logo)
end.to change(Geo::UploadDeletedEvent, :count).by(1)
end
end
it 'creates a Geo deleted log event for header logo' do
Sidekiq::Testing.inline! do
expect do
create(:appearance, :with_header_logo)
end.to change(Geo::UploadDeletedEvent, :count).by(1)
end
end
it 'creates only a Geo deleted log event for the migrated header logo' do
Sidekiq::Testing.inline! do
appearance = create(:appearance, :with_header_logo, :with_logo)
expect do
appearance.update!(header_logo: fixture_file_upload('spec/fixtures/rails_sample.jpg'))
end.to change(Geo::UploadDeletedEvent, :count).by(1)
end
end
end
end
end
......@@ -107,13 +107,6 @@ RSpec.describe Geo::EventLog, type: :model do
expect(subject.event).to eq job_artifact_deleted_event
end
it 'returns upload_deleted_event when set' do
upload_deleted_event = build(:geo_upload_deleted_event)
subject.upload_deleted_event = upload_deleted_event
expect(subject.event).to eq upload_deleted_event
end
it 'returns reset_checksum_event when set' do
reset_checksum_event = build(:geo_reset_checksum_event)
subject.reset_checksum_event = reset_checksum_event
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Geo::UploadDeletedEvent, type: :model do
describe 'relationships' do
it { is_expected.to belong_to(:upload) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:upload) }
it { is_expected.to validate_presence_of(:file_path) }
it { is_expected.to validate_presence_of(:model_id) }
it { is_expected.to validate_presence_of(:model_type) }
it { is_expected.to validate_presence_of(:uploader) }
end
describe '#upload_type' do
it 'returns nil when uploader is not set' do
subject.uploader = nil
expect(subject.upload_type).to be_nil
end
it 'returns uploader type when uploader is set' do
subject.uploader = 'PersonalFileUploader'
expect(subject.upload_type).to eq 'personal_file'
end
end
end
......@@ -2,174 +2,6 @@
require 'spec_helper'
RSpec.describe Geo::UploadRegistry, :geo do
include EE::GeoHelpers
before do
stub_feature_flags(geo_upload_replication: false)
end
it_behaves_like 'a BulkInsertSafe model', Geo::UploadRegistry do
let(:valid_items_for_bulk_insertion) { build_list(:geo_upload_legacy_registry, 10, created_at: Time.zone.now) }
let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined
end
it 'finds associated Upload record' do
registry = create(:geo_upload_legacy_registry, :attachment, :with_file)
expect(described_class.find(registry.id).upload).to be_an_instance_of(Upload)
end
describe '.find_registry_differences' do
let_it_be(:secondary) { create(:geo_node) }
let_it_be(:project) { create(:project) }
let_it_be(:upload_1) { create(:upload, model: project) }
let_it_be(:upload_2) { create(:upload, model: project) }
let_it_be(:upload_3) { create(:upload, :issuable_upload, model: project) }
let_it_be(:upload_4) { create(:upload, model: project) }
let_it_be(:upload_5) { create(:upload, model: project) }
let_it_be(:upload_6) { create(:upload, :personal_snippet_upload) }
let_it_be(:upload_7) { create(:upload, :object_storage, model: project) }
let_it_be(:upload_8) { create(:upload, :object_storage, model: project) }
let_it_be(:upload_9) { create(:upload, :object_storage, model: project) }
before do
stub_current_geo_node(secondary)
end
it 'returns untracked IDs as well as tracked IDs that are unused', :aggregate_failures do
max_id = Upload.maximum(:id)
create(:geo_upload_legacy_registry, :avatar, file_id: upload_1.id)
create(:geo_upload_legacy_registry, :file, file_id: upload_3.id)
create(:geo_upload_legacy_registry, :avatar, file_id: upload_5.id)
create(:geo_upload_legacy_registry, :personal_file, file_id: upload_6.id)
create(:geo_upload_legacy_registry, :avatar, file_id: upload_7.id)
unused_registry_1 = create(:geo_upload_legacy_registry, :attachment, file_id: max_id + 1)
unused_registry_2 = create(:geo_upload_legacy_registry, :personal_file, file_id: max_id + 2)
range = 1..(max_id + 2)
untracked, unused = described_class.find_registry_differences(range)
expected_untracked = [
[upload_2.id, 'avatar'],
[upload_4.id, 'avatar'],
[upload_8.id, 'avatar'],
[upload_9.id, 'avatar']
]
expected_unused = [
[unused_registry_1.file_id, 'attachment'],
[unused_registry_2.file_id, 'personal_file']
]
expect(untracked).to match_array(expected_untracked)
expect(unused).to match_array(expected_unused)
end
end
describe '.failed' do
it 'returns registries in the failed state' do
failed = create(:geo_upload_legacy_registry, :failed)
create(:geo_upload_legacy_registry)
expect(described_class.failed).to match_ids(failed)
end
end
describe '.synced' do
it 'returns registries in the synced state' do
create(:geo_upload_legacy_registry, :failed)
synced = create(:geo_upload_legacy_registry)
expect(described_class.synced).to match_ids(synced)
end
end
describe '.retry_due' do
it 'returns registries in the synced state' do
failed = create(:geo_upload_legacy_registry, :failed)
synced = create(:geo_upload_legacy_registry)
retry_yesterday = create(:geo_upload_legacy_registry, retry_at: Date.yesterday)
create(:geo_upload_legacy_registry, retry_at: Date.tomorrow)
expect(described_class.retry_due).to match_ids([failed, synced, retry_yesterday])
end
end
describe '.never_attempted_sync' do
it 'returns registries that are never synced' do
create(:geo_upload_legacy_registry, :failed)
create(:geo_upload_legacy_registry)
pending = create(:geo_upload_legacy_registry, retry_count: nil, success: false)
expect(described_class.never_attempted_sync).to match_ids([pending])
end
end
describe '.with_status' do
it 'finds the registries with status "synced"' do
expect(described_class).to receive(:synced)
described_class.with_status('synced')
end
it 'finds the registries with status "never_attempted_sync" when filter is set to "pending"' do
expect(described_class).to receive(:never_attempted_sync)
described_class.with_status('pending')
end
it 'finds the registries with status "failed"' do
expect(described_class).to receive(:failed)
described_class.with_status('failed')
end
end
describe '.with_search' do
it 'searches registries on path' do
upload = create(:upload, path: 'uploads/-/system/project/avatar/my-awesome-avatar.png')
upload_registry = create(:geo_upload_legacy_registry, file_id: upload.id, file_type: :avatar)
expect(described_class.with_search('awesome-avatar')).to match_ids(upload_registry)
end
end
describe '#file' do
it 'returns the path of the upload of a registry' do
upload = create(:upload, :with_file)
registry = create(:geo_upload_legacy_registry, :file, file_id: upload.id)
expect(registry.file).to eq(upload.path)
end
it 'return "removed" message when the upload no longer exists' do
registry = create(:geo_upload_legacy_registry, :avatar)
expect(registry.file).to match(/^Removed avatar with id/)
end
end
describe '#synchronization_state' do
let_it_be(:failed) { create(:geo_upload_legacy_registry, :failed) }
let_it_be(:synced) { create(:geo_upload_legacy_registry) }
it 'returns :synced for a successful synced registry' do
expect(synced.synchronization_state).to eq(:synced)
end
it 'returns :never for a successful registry never synced' do
never = build(:geo_upload_legacy_registry, success: false, retry_count: nil)
expect(never.synchronization_state).to eq(:never)
end
it 'returns :failed for a failed registry' do
expect(failed.synchronization_state).to eq(:failed)
end
end
end
RSpec.describe Geo::UploadRegistry, :geo, type: :model do
let_it_be(:registry) { create(:geo_upload_registry) }
......
......@@ -144,50 +144,6 @@ RSpec.describe GeoNodeStatus, :geo do
end
end
describe '#attachments_synced_count' do
it 'only counts successful syncs' do
create_list(:user, 3, avatar: fixture_file_upload('spec/fixtures/dk.png', 'image/png'))
uploads = Upload.pluck(:id)
create(:geo_upload_registry, :synced, file_id: uploads[0])
create(:geo_upload_registry, :synced, file_id: uploads[1])
create(:geo_upload_registry, :failed, file_id: uploads[2])
expect(subject.attachments_synced_count).to eq(2)
end
end
describe '#attachments_failed_count' do
it 'counts failed avatars, attachment, personal snippets and files' do
# These two should be ignored
create(:geo_lfs_object_registry, :failed)
create(:geo_upload_registry)
create(:geo_upload_registry, :failed)
create(:geo_upload_registry, :failed)
expect(subject.attachments_failed_count).to eq(2)
end
end
describe '#attachments_synced_in_percentage' do
it 'returns 0 when no registries are available' do
expect(subject.attachments_synced_in_percentage).to eq(0)
end
it 'returns the right percentage' do
create_list(:user, 4, avatar: fixture_file_upload('spec/fixtures/dk.png', 'image/png'))
uploads = Upload.pluck(:id)
create(:geo_upload_registry, :synced, file_id: uploads[0])
create(:geo_upload_registry, :synced, file_id: uploads[1])
create(:geo_upload_registry, :failed, file_id: uploads[2])
create(:geo_upload_registry, :started, file_id: uploads[3])
expect(subject.attachments_synced_in_percentage).to be_within(0.0001).of(50)
end
end
describe '#db_replication_lag_seconds' do
it 'returns the set replication lag if secondary' do
allow(Gitlab::Geo).to receive(:secondary?).and_return(true)
......@@ -979,7 +935,6 @@ RSpec.describe GeoNodeStatus, :geo do
result = described_class.from_json(data)
expect(result.id).to be_nil
expect(result.attachments_count).to eq(status.attachments_count)
expect(result.cursor_last_event_date).to eq(Time.zone.at(status.cursor_last_event_timestamp))
expect(result.storage_shards.count).to eq(Settings.repositories.storages.count)
end
......@@ -1327,12 +1282,6 @@ RSpec.describe GeoNodeStatus, :geo do
stub_current_geo_node(primary)
end
it 'does not call AttachmentLegacyRegistryFinder#registry_count' do
expect_any_instance_of(Geo::AttachmentLegacyRegistryFinder).not_to receive(:registry_count)
subject
end
it 'does not call JobArtifactRegistryFinder#registry_count' do
expect_any_instance_of(Geo::JobArtifactRegistryFinder).not_to receive(:registry_count)
......@@ -1341,12 +1290,6 @@ RSpec.describe GeoNodeStatus, :geo do
end
context 'on the secondary' do
it 'calls AttachmentLegacyRegistryFinder#registry_count' do
expect_any_instance_of(Geo::AttachmentLegacyRegistryFinder).to receive(:registry_count).twice
subject
end
it 'calls JobArtifactRegistryFinder#registry_count' do
expect_any_instance_of(Geo::JobArtifactRegistryFinder).to receive(:registry_count).twice
......
......@@ -91,29 +91,6 @@ RSpec.describe Note do
end
end
context 'object storage with background upload' do
before do
stub_uploads_object_storage(AttachmentUploader, background_upload: true)
end
context 'when running in a Geo primary node' do
let_it_be(:primary) { create(:geo_node, :primary) }
let_it_be(:secondary) { create(:geo_node) }
before do
stub_current_geo_node(primary)
end
it 'creates a Geo deleted log event for attachment' do
Sidekiq::Testing.inline! do
expect do
create(:note, :with_attachment)
end.to change(Geo::UploadDeletedEvent, :count).by(1)
end
end
end
end
describe '#resource_parent' do
it 'returns group for epic notes' do
group = create(:group)
......
......@@ -93,12 +93,6 @@ RSpec.describe Upload do
let_it_be(:primary) { create(:geo_node, :primary) }
let_it_be(:secondary) { create(:geo_node) }
it 'logs an event to the Geo event log' do
stub_current_geo_node(primary)
expect { subject.destroy }.to change(Geo::UploadDeletedEvent, :count).by(1)
end
it 'logs an event to the Geo event log when bulk removal is used', :sidekiq_inline do
stub_current_geo_node(primary)
......
......@@ -317,11 +317,6 @@ RSpec.describe API::Geo do
design_repositories_count: 100,
design_repositories_synced_count: 50,
design_repositories_failed_count: 12,
attachments_count: 30,
attachments_synced_count: 30,
attachments_failed_count: 25,
attachments_synced_missing_on_primary_count: 6,
attachments_replication_enabled: false,
container_repositories_replication_enabled: true,
design_repositories_replication_enabled: false,
job_artifacts_replication_enabled: true,
......
......@@ -26,18 +26,6 @@ RSpec.describe 'EE-specific admin routing' do
end
end
describe Admin::Geo::UploadsController, 'routing' do
let!(:upload_registry) { create(:geo_upload_legacy_registry, :with_file, :attachment, success: true) }
it 'routes / to #index' do
expect(get('/admin/geo/replication/legacy-uploads')).to route_to('admin/geo/uploads#index')
end
it 'routes delete /:id to #destroy' do
expect(delete("/admin/geo/replication/legacy-uploads/#{upload_registry.id}")).to route_to('admin/geo/uploads#destroy', id: upload_registry.to_param)
end
end
describe Admin::Geo::DesignsController, 'routing' do
it 'routes / to #index' do
expect(get('/admin/geo/replication/designs')).to route_to('admin/geo/designs#index')
......
......@@ -11,19 +11,9 @@ RSpec.describe Geo::FileDownloadService do
before do
stub_current_geo_node(secondary)
stub_feature_flags(geo_upload_replication: false)
end
describe '#downloader' do
Gitlab::Geo::Replication::USER_UPLOADS_OBJECT_TYPES.each do |object_type|
it "returns a FileDownloader given object_type is #{object_type}" do
subject = described_class.new(object_type, 1)
expect(subject.downloader).to be_a(Gitlab::Geo::Replication::FileDownloader)
end
end
it "returns a JobArtifactDownloader given object_type is job_artifact" do
subject = described_class.new('job_artifact', 1)
......@@ -36,135 +26,61 @@ RSpec.describe Geo::FileDownloadService do
stub_transfer_result(bytes_downloaded: 0, success: false)
end
shared_examples_for 'a service that sets next retry time capped at some value' do
it 'ensures the next retry time is capped properly' do
download_service.execute
expect(registry_entry.reload).to have_attributes(
retry_at: be_within(100.seconds).of(1.hour.from_now),
retry_count: 32
)
end
end
context 'with job_artifacts' do
let!(:registry_entry) do
let!(:geo_job_artifact_registry) do
create(:geo_job_artifact_registry, success: false, retry_count: 31, artifact_id: file.id)
end
let(:file) { create(:ci_job_artifact) }
let(:download_service) { described_class.new('job_artifact', file.id) }
it_behaves_like 'a service that sets next retry time capped at some value'
end
it 'ensures the next retry time is capped properly' do
download_service.execute
context 'with uploads' do
let!(:registry_entry) do
create(:geo_upload_legacy_registry, :avatar, success: false, file_id: file.id, retry_count: 31)
expect(geo_job_artifact_registry.reload).to have_attributes(
retry_at: be_within(100.seconds).of(1.hour.from_now),
retry_count: 32
)
end
let(:file) { create(:upload) }
let(:download_service) { described_class.new('avatar', file.id) }
it_behaves_like 'a service that sets next retry time capped at some value'
end
end
shared_examples_for 'a service that handles orphaned uploads' do |file_type|
let(:download_service) { described_class.new(file_type, file.id) }
let(:registry) { Geo::UploadRegistry }
before do
stub_exclusive_lease("file_download_service:#{file_type}:#{file.id}",
timeout: Geo::FileDownloadService::LEASE_TIMEOUT)
file.update_column(:model_id, 22222) # Not-existing record
end
it 'marks upload as successful and missing_on_primary' do
expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message,
:download_time_s,
download_success: true,
bytes_downloaded: 0,
primary_missing_file: true)).and_call_original
expect { download_service.execute }.to change { registry.synced.missing_on_primary.count }.by(1)
end
end
shared_examples_for 'a service that downloads the file and registers the sync result' do |file_type|
let(:download_service) { described_class.new(file_type, file.id) }
describe '#execute' do
context 'job artifacts' do
let(:file) { create(:ci_job_artifact) }
let(:download_service) { described_class.new('job_artifact', file.id) }
let(:registry) do
case file_type
when 'job_artifact'
let(:registry) do
Geo::JobArtifactRegistry
else
Geo::UploadRegistry
end
end
subject(:execute!) { download_service.execute }
before do
stub_exclusive_lease("file_download_service:#{file_type}:#{file.id}",
timeout: Geo::FileDownloadService::LEASE_TIMEOUT)
end
subject(:execute!) { download_service.execute }
context 'for a new file' do
context 'when the downloader fails before attempting a transfer' do
it 'logs that the download failed before attempting a transfer' do
result = double(:result, success: false, bytes_downloaded: 0, primary_missing_file: false, failed_before_transfer: true, reason: 'Something went wrong')
downloader = double(:downloader, execute: result)
allow(download_service).to receive(:downloader).and_return(downloader)
expect(Gitlab::Geo::Logger)
.to receive(:info)
.with(hash_including(:message, :download_time_s, download_success: false, reason: 'Something went wrong', bytes_downloaded: 0, failed_before_transfer: true))
.and_call_original
execute!
end
before do
stub_exclusive_lease("file_download_service:job_artifact:#{file.id}",
timeout: Geo::FileDownloadService::LEASE_TIMEOUT)
end
context 'when the downloader attempts a transfer' do
context 'when the file is successfully downloaded' do
before do
stub_transfer_result(bytes_downloaded: 100, success: true)
end
context 'for a new file' do
context 'when the downloader fails before attempting a transfer' do
it 'logs that the download failed before attempting a transfer' do
result = double(:result, success: false, bytes_downloaded: 0, primary_missing_file: false, failed_before_transfer: true, reason: 'Something went wrong')
downloader = double(:downloader, execute: result)
allow(download_service).to receive(:downloader).and_return(downloader)
it 'registers the file' do
expect { execute! }.to change { registry.count }.by(1)
end
expect(Gitlab::Geo::Logger)
.to receive(:info)
.with(hash_including(:message, :download_time_s, download_success: false, reason: 'Something went wrong', bytes_downloaded: 0, failed_before_transfer: true))
.and_call_original
it 'marks the file as synced' do
expect { execute! }.to change { registry.synced.count }.by(1)
end
it 'does not mark the file as missing on the primary' do
execute!
expect(registry.last.missing_on_primary).to be_falsey
end
it 'logs the result' do
expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, download_success: true, bytes_downloaded: 100)).and_call_original
execute!
end
it 'resets the retry fields' do
execute!
expect(registry.last.reload.retry_count).to eq(0)
expect(registry.last.retry_at).to be_nil
end
end
context 'when the file fails to download' do
context 'when the file is missing on the primary' do
context 'when the downloader attempts a transfer' do
context 'when the file is successfully downloaded' do
before do
stub_transfer_result(bytes_downloaded: 100, success: true, primary_missing_file: true)
stub_transfer_result(bytes_downloaded: 100, success: true)
end
it 'registers the file' do
......@@ -175,115 +91,102 @@ RSpec.describe Geo::FileDownloadService do
expect { execute! }.to change { registry.synced.count }.by(1)
end
it 'marks the file as missing on the primary' do
it 'does not mark the file as missing on the primary' do
execute!
expect(registry.last.missing_on_primary).to be_truthy
expect(registry.last.missing_on_primary).to be_falsey
end
it 'logs the result' do
expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, download_success: true, bytes_downloaded: 100, primary_missing_file: true)).and_call_original
expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, download_success: true, bytes_downloaded: 100)).and_call_original
execute!
end
it 'sets a retry date and increments the retry count' do
freeze_time do
execute!
it 'resets the retry fields' do
execute!
expect(registry.last.reload.retry_count).to eq(1)
expect(registry.last.retry_at > Time.current).to be_truthy
end
expect(registry.last.reload.retry_count).to eq(0)
expect(registry.last.retry_at).to be_nil
end
end
context 'when the file is not missing on the primary' do
before do
stub_transfer_result(bytes_downloaded: 0, success: false)
end
it 'registers the file' do
expect { execute! }.to change { registry.count }.by(1)
end
it 'marks the file as failed to sync' do
expect { execute! }.to change { registry.failed.count }.by(1)
end
context 'when the file fails to download' do
context 'when the file is missing on the primary' do
before do
stub_transfer_result(bytes_downloaded: 100, success: true, primary_missing_file: true)
end
it 'does not mark the file as missing on the primary' do
execute!
it 'registers the file' do
expect { execute! }.to change { registry.count }.by(1)
end
expect(registry.last.missing_on_primary).to be_falsey
end
it 'marks the file as synced' do
expect { execute! }.to change { registry.synced.count }.by(1)
end
it 'sets a retry date and increments the retry count' do
freeze_time do
it 'marks the file as missing on the primary' do
execute!
expect(registry.last.reload.retry_count).to eq(1)
expect(registry.last.retry_at > Time.current).to be_truthy
expect(registry.last.missing_on_primary).to be_truthy
end
end
end
end
end
end
context 'for a registered file that failed to sync' do
let!(:registry_entry) do
case file_type
when 'job_artifact'
create(:geo_job_artifact_registry, success: false, artifact_id: file.id, retry_count: 3, retry_at: 1.hour.ago)
else
create(:geo_upload_legacy_registry, file_type.to_sym, success: false, file_id: file.id, retry_count: 3, retry_at: 1.hour.ago)
end
end
it 'logs the result' do
expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, download_success: true, bytes_downloaded: 100, primary_missing_file: true)).and_call_original
context 'when the file is successfully downloaded' do
before do
stub_transfer_result(bytes_downloaded: 100, success: true)
end
execute!
end
it 'does not register a new file' do
expect { execute! }.not_to change { registry.count }
end
it 'sets a retry date and increments the retry count' do
freeze_time do
execute!
it 'marks the file as synced' do
expect { execute! }.to change { registry.synced.count }.by(1)
end
expect(registry.last.reload.retry_count).to eq(1)
expect(registry.last.retry_at > Time.current).to be_truthy
end
end
end
it 'resets the retry fields' do
execute!
context 'when the file is not missing on the primary' do
before do
stub_transfer_result(bytes_downloaded: 0, success: false)
end
expect(registry_entry.reload.retry_count).to eq(0)
expect(registry_entry.retry_at).to be_nil
end
it 'registers the file' do
expect { execute! }.to change { registry.count }.by(1)
end
context 'when the file was marked as missing on the primary' do
before do
registry_entry.update_column(:missing_on_primary, true)
end
it 'marks the file as failed to sync' do
expect { execute! }.to change { registry.failed.count }.by(1)
end
it 'marks the file as no longer missing on the primary' do
execute!
it 'does not mark the file as missing on the primary' do
execute!
expect(registry_entry.reload.missing_on_primary).to be_falsey
end
end
expect(registry.last.missing_on_primary).to be_falsey
end
context 'when the file was not marked as missing on the primary' do
it 'does not mark the file as missing on the primary' do
execute!
it 'sets a retry date and increments the retry count' do
freeze_time do
execute!
expect(registry_entry.reload.missing_on_primary).to be_falsey
expect(registry.last.reload.retry_count).to eq(1)
expect(registry.last.retry_at > Time.current).to be_truthy
end
end
end
end
end
end
context 'when the file fails to download' do
context 'when the file is missing on the primary' do
context 'for a registered file that failed to sync' do
let!(:geo_job_artifact_registry) do
create(:geo_job_artifact_registry, success: false, artifact_id: file.id, retry_count: 3, retry_at: 1.hour.ago)
end
context 'when the file is successfully downloaded' do
before do
stub_transfer_result(bytes_downloaded: 100, success: true, primary_missing_file: true)
stub_transfer_result(bytes_downloaded: 100, success: true)
end
it 'does not register a new file' do
......@@ -294,167 +197,119 @@ RSpec.describe Geo::FileDownloadService do
expect { execute! }.to change { registry.synced.count }.by(1)
end
it 'marks the file as missing on the primary' do
it 'resets the retry fields' do
execute!
expect(registry_entry.reload.missing_on_primary).to be_truthy
expect(geo_job_artifact_registry.reload.retry_count).to eq(0)
expect(geo_job_artifact_registry.retry_at).to be_nil
end
it 'logs the result' do
expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, download_success: true, bytes_downloaded: 100, primary_missing_file: true)).and_call_original
execute!
end
context 'when the file was marked as missing on the primary' do
before do
geo_job_artifact_registry.update_column(:missing_on_primary, true)
end
it 'sets a retry date and increments the retry count' do
freeze_time do
it 'marks the file as no longer missing on the primary' do
execute!
expect(registry_entry.reload.retry_count).to eq(4)
expect(registry_entry.retry_at > Time.current).to be_truthy
expect(geo_job_artifact_registry.reload.missing_on_primary).to be_falsey
end
end
it 'sets a retry date with a maximum of about 4 hours' do
registry_entry.update!(retry_count: 100, retry_at: 1.minute.ago)
freeze_time do
context 'when the file was not marked as missing on the primary' do
it 'does not mark the file as missing on the primary' do
execute!
expect(registry_entry.reload.retry_at).to be_within(3.minutes).of(4.hours.from_now)
expect(geo_job_artifact_registry.reload.missing_on_primary).to be_falsey
end
end
end
context 'when the file is not missing on the primary' do
before do
stub_transfer_result(bytes_downloaded: 0, success: false)
end
it 'does not register a new file' do
expect { execute! }.not_to change { registry.count }
end
it 'does not change the success flag' do
expect { execute! }.not_to change { registry.failed.count }
end
context 'when the file fails to download' do
context 'when the file is missing on the primary' do
before do
stub_transfer_result(bytes_downloaded: 100, success: true, primary_missing_file: true)
end
it 'does not mark the file as missing on the primary' do
execute!
it 'does not register a new file' do
expect { execute! }.not_to change { registry.count }
end
expect(registry_entry.reload.missing_on_primary).to be_falsey
end
it 'marks the file as synced' do
expect { execute! }.to change { registry.synced.count }.by(1)
end
it 'sets a retry date and increments the retry count' do
freeze_time do
it 'marks the file as missing on the primary' do
execute!
expect(registry_entry.reload.retry_count).to eq(4)
expect(registry_entry.retry_at > Time.current).to be_truthy
expect(geo_job_artifact_registry.reload.missing_on_primary).to be_truthy
end
end
it 'sets a retry date with a maximum of about 1 hour' do
registry_entry.update!(retry_count: 100, retry_at: 1.minute.ago)
it 'logs the result' do
expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, download_success: true, bytes_downloaded: 100, primary_missing_file: true)).and_call_original
freeze_time do
execute!
expect(registry_entry.reload.retry_at).to be_within(3.minutes).of(1.hour.from_now)
end
end
end
end
end
end
describe '#execute' do
context 'user avatar' do
let(:file) { create(:upload, model: build(:user)) }
it_behaves_like "a service that downloads the file and registers the sync result", 'avatar'
it_behaves_like 'a service that handles orphaned uploads', 'avatar'
end
context 'group avatar' do
let(:file) { create(:upload, model: build(:group)) }
it_behaves_like "a service that downloads the file and registers the sync result", 'avatar'
it_behaves_like 'a service that handles orphaned uploads', 'avatar'
end
context 'project avatar' do
let(:file) { create(:upload, model: build(:project)) }
it_behaves_like "a service that downloads the file and registers the sync result", 'avatar'
it_behaves_like 'a service that handles orphaned uploads', 'avatar'
end
context 'with an attachment' do
let(:file) { create(:upload, :attachment_upload) }
it_behaves_like "a service that downloads the file and registers the sync result", 'attachment'
it_behaves_like 'a service that handles orphaned uploads', 'attachment'
end
context 'with a favicon' do
let(:file) { create(:upload, :favicon_upload) }
it_behaves_like "a service that downloads the file and registers the sync result", 'favicon'
it_behaves_like 'a service that handles orphaned uploads', 'favicon'
end
it 'sets a retry date and increments the retry count' do
freeze_time do
execute!
context 'with a snippet' do
let(:file) { create(:upload, :personal_snippet_upload) }
expect(geo_job_artifact_registry.reload.retry_count).to eq(4)
expect(geo_job_artifact_registry.retry_at > Time.current).to be_truthy
end
end
it_behaves_like "a service that downloads the file and registers the sync result", 'personal_file'
it_behaves_like 'a service that handles orphaned uploads', 'personal_file'
end
it 'sets a retry date with a maximum of about 4 hours' do
geo_job_artifact_registry.update!(retry_count: 100, retry_at: 1.minute.ago)
context 'with file upload' do
let(:file) { create(:upload, :issuable_upload) }
freeze_time do
execute!
it_behaves_like "a service that downloads the file and registers the sync result", 'file'
it_behaves_like 'a service that handles orphaned uploads', 'file'
end
expect(geo_job_artifact_registry.reload.retry_at).to be_within(3.minutes).of(4.hours.from_now)
end
end
end
context 'with namespace file upload' do
let(:file) { create(:upload, :namespace_upload) }
context 'when the file is not missing on the primary' do
before do
stub_transfer_result(bytes_downloaded: 0, success: false)
end
it_behaves_like "a service that downloads the file and registers the sync result", 'namespace_file'
it_behaves_like 'a service that handles orphaned uploads', 'namespace_file'
end
it 'does not register a new file' do
expect { execute! }.not_to change { registry.count }
end
context 'with an incident metrics upload' do
let(:file) { create(:upload, :issue_metric_image) }
it 'does not change the success flag' do
expect { execute! }.not_to change { registry.failed.count }
end
it_behaves_like 'a service that downloads the file and registers the sync result', 'issuable_metric_image'
it_behaves_like 'a service that handles orphaned uploads', 'issuable_metric_image'
end
it 'does not mark the file as missing on the primary' do
execute!
context 'job artifacts' do
it_behaves_like "a service that downloads the file and registers the sync result", 'job_artifact' do
let(:file) { create(:ci_job_artifact) }
end
end
expect(geo_job_artifact_registry.reload.missing_on_primary).to be_falsey
end
context 'Import/Export' do
let(:file) { create(:upload, model: build(:import_export_upload)) }
it 'sets a retry date and increments the retry count' do
freeze_time do
execute!
it_behaves_like "a service that downloads the file and registers the sync result", 'import_export'
it_behaves_like 'a service that handles orphaned uploads', 'import_export'
end
expect(geo_job_artifact_registry.reload.retry_count).to eq(4)
expect(geo_job_artifact_registry.retry_at > Time.current).to be_truthy
end
end
context 'with bulk imports export upload' do
let(:file) { create(:upload, model: build(:bulk_import_export_upload)) }
it 'sets a retry date with a maximum of about 1 hour' do
geo_job_artifact_registry.update!(retry_count: 100, retry_at: 1.minute.ago)
it_behaves_like 'a service that downloads the file and registers the sync result', :'bulk_imports/export'
it_behaves_like 'a service that handles orphaned uploads', :'bulk_imports/export'
end
freeze_time do
execute!
context 'bad object type' do
it 'raises an error' do
expect { described_class.new(:bad, 1).execute }.to raise_error(NotImplementedError)
expect(geo_job_artifact_registry.reload.retry_at).to be_within(3.minutes).of(1.hour.from_now)
end
end
end
end
end
end
end
......
......@@ -21,42 +21,6 @@ RSpec.describe Geo::FileRegistryRemovalService, :geo do
described_class.new(:lfs, 99).execute
end
shared_examples 'removes' do
subject(:service) { described_class.new(registry.file_type, registry.file_id) }
before do
stub_exclusive_lease("file_registry_removal_service:#{registry.file_type}:#{registry.file_id}",
timeout: Geo::FileRegistryRemovalService::LEASE_TIMEOUT)
end
it 'file from disk' do
expect do
service.execute
end.to change { File.exist?(file_path) }.from(true).to(false)
end
it 'deletes registry entry' do
expect do
service.execute
end.to change(Geo::UploadRegistry, :count).by(-1)
end
end
shared_examples 'removes registry entry' do
subject(:service) { described_class.new(registry.file_type, registry.file_id) }
before do
stub_exclusive_lease("file_registry_removal_service:#{registry.file_type}:#{registry.file_id}",
timeout: Geo::FileRegistryRemovalService::LEASE_TIMEOUT)
end
it 'deletes registry entry' do
expect do
service.execute
end.to change(Geo::UploadRegistry, :count).by(-1)
end
end
shared_examples 'removes artifact' do
subject(:service) { described_class.new('job_artifact', registry.artifact_id) }
......@@ -221,158 +185,6 @@ RSpec.describe Geo::FileRegistryRemovalService, :geo do
end
end
context 'with avatar' do
let!(:upload) { create(:user, :with_avatar).avatar.upload }
let!(:registry) { create(:geo_upload_legacy_registry, :avatar, file_id: upload.id) }
let!(:file_path) { upload.retrieve_uploader.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_uploads_object_storage(AvatarUploader)
upload.update_column(:store, AvatarUploader::Store::REMOTE)
end
context 'with object storage enabled' do
it_behaves_like 'removes'
end
context 'with object storage disabled' do
before do
stub_uploads_object_storage(AvatarUploader, enabled: false)
end
it_behaves_like 'removes registry entry'
end
end
end
context 'with attachment' do
let!(:upload) { create(:note, :with_attachment).attachment.upload }
let!(:registry) { create(:geo_upload_legacy_registry, :attachment, file_id: upload.id) }
let!(:file_path) { upload.retrieve_uploader.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_uploads_object_storage(AttachmentUploader)
upload.update_column(:store, AttachmentUploader::Store::REMOTE)
end
context 'with object storage enabled' do
it_behaves_like 'removes'
end
context 'with object storage disabled' do
before do
stub_uploads_object_storage(AttachmentUploader, enabled: false)
end
it_behaves_like 'removes registry entry'
end
end
end
context 'with namespace_file' do
let_it_be(:group) { create(:group) }
let(:file) { fixture_file_upload('spec/fixtures/dk.png', 'image/png') }
let!(:upload) do
NamespaceFileUploader.new(group).store!(file)
Upload.find_by(model: group, uploader: NamespaceFileUploader.name)
end
let!(:registry) { create(:geo_upload_legacy_registry, :namespace_file, file_id: upload.id) }
let!(:file_path) { upload.retrieve_uploader.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_uploads_object_storage(NamespaceFileUploader)
upload.update_column(:store, NamespaceFileUploader::Store::REMOTE)
end
context 'with object storage enabled' do
it_behaves_like 'removes'
end
context 'with object storage disabled' do
before do
stub_uploads_object_storage(NamespaceFileUploader, enabled: false)
end
it_behaves_like 'removes registry entry'
end
end
end
context 'with personal_file' do
let(:snippet) { create(:personal_snippet) }
let(:file) { fixture_file_upload('spec/fixtures/dk.png', 'image/png') }
let!(:upload) do
PersonalFileUploader.new(snippet).store!(file)
Upload.find_by(model: snippet, uploader: PersonalFileUploader.name)
end
let!(:registry) { create(:geo_upload_legacy_registry, :personal_file, file_id: upload.id) }
let!(:file_path) { upload.retrieve_uploader.file.path }
context 'migrated to object storage' do
before do
stub_uploads_object_storage(PersonalFileUploader)
upload.update_column(:store, PersonalFileUploader::Store::REMOTE)
end
context 'with object storage enabled' do
it_behaves_like 'removes'
end
context 'with object storage disabled' do
before do
stub_uploads_object_storage(PersonalFileUploader, enabled: false)
end
it_behaves_like 'removes registry entry'
end
end
end
context 'with favicon' do
let(:appearance) { create(:appearance) }
let(:file) { fixture_file_upload('spec/fixtures/dk.png', 'image/png') }
let!(:upload) do
FaviconUploader.new(appearance).store!(file)
Upload.find_by(model: appearance, uploader: FaviconUploader.name)
end
let!(:registry) { create(:geo_upload_legacy_registry, :favicon, file_id: upload.id) }
let!(:file_path) { upload.retrieve_uploader.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_uploads_object_storage(FaviconUploader)
upload.update_column(:store, FaviconUploader::Store::REMOTE)
end
context 'with object storage enabled' do
it_behaves_like 'removes'
end
context 'with object storage disabled' do
before do
stub_uploads_object_storage(FaviconUploader, enabled: false)
end
it_behaves_like 'removes registry entry'
end
end
end
context 'with package file' do
let(:package_file) { create(:package_file_with_file) }
let!(:registry) { create(:geo_package_file_registry, package_file: package_file) }
......@@ -407,30 +219,28 @@ RSpec.describe Geo::FileRegistryRemovalService, :geo do
end
end
context 'with Uploads(after migrating to SSF)' do
context 'with uploads' do
let!(:upload) { create(:user, :with_avatar).avatar.upload }
let!(:registry) { create(:geo_upload_registry, file_id: upload.id) }
let!(:file_path) { upload.retrieve_uploader.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_uploads_object_storage(AvatarUploader)
upload.update_column(:store, AvatarUploader::Store::REMOTE)
end
subject(:service) { described_class.new('upload', registry.file_id) }
context 'with object storage enabled' do
it_behaves_like 'removes'
end
before do
stub_exclusive_lease("file_registry_removal_service:upload:#{registry.file_id}",
timeout: Geo::FileRegistryRemovalService::LEASE_TIMEOUT)
end
context 'with object storage disabled' do
before do
stub_uploads_object_storage(AvatarUploader, enabled: false)
end
it 'file from disk' do
expect do
service.execute
end.to change { File.exist?(file_path) }.from(true).to(false)
end
it_behaves_like 'removes registry entry'
end
it 'deletes registry entry' do
expect do
service.execute
end.to change(Geo::UploadRegistry, :count).by(-1)
end
end
end
......
......@@ -36,10 +36,6 @@ RSpec.describe Geo::MetricsUpdateService, :geo, :prometheus do
design_repositories_count: 100,
design_repositories_synced_count: 50,
design_repositories_failed_count: 12,
attachments_count: 30,
attachments_synced_count: 30,
attachments_failed_count: 25,
attachments_synced_missing_on_primary_count: 6,
last_event_id: 2,
last_event_date: event_date,
cursor_last_event_id: 1,
......@@ -63,7 +59,6 @@ RSpec.describe Geo::MetricsUpdateService, :geo, :prometheus do
projects_count: 10,
lfs_objects_count: 100,
job_artifacts_count: 100,
attachments_count: 30,
container_repositories_count: 100,
last_event_id: 2,
last_event_date: event_date,
......@@ -171,10 +166,6 @@ RSpec.describe Geo::MetricsUpdateService, :geo, :prometheus do
expect(metric_value(:geo_job_artifacts_synced)).to eq(50)
expect(metric_value(:geo_job_artifacts_failed)).to eq(12)
expect(metric_value(:geo_job_artifacts_synced_missing_on_primary)).to eq(5)
expect(metric_value(:geo_attachments)).to eq(30)
expect(metric_value(:geo_attachments_synced)).to eq(30)
expect(metric_value(:geo_attachments_failed)).to eq(25)
expect(metric_value(:geo_attachments_synced_missing_on_primary)).to eq(6)
expect(metric_value(:geo_last_event_id)).to eq(2)
expect(metric_value(:geo_last_event_timestamp)).to eq(event_date.to_i)
expect(metric_value(:geo_cursor_last_event_id)).to eq(1)
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Geo::UploadDeletedEventStore do
include EE::GeoHelpers
let_it_be(:secondary_node) { create(:geo_node) }
let(:upload) { create(:upload) }
subject { described_class.new(upload) }
describe '#create!' do
it_behaves_like 'a Geo event store', Geo::UploadDeletedEvent do
let(:file_subject) { upload }
end
context 'when running on a primary node' do
before do
stub_primary_node
end
it 'tracks upload attributes' do
subject.create!
expect(Geo::UploadDeletedEvent.last).to have_attributes(
upload_id: upload.id,
file_path: upload.path,
model_id: upload.model_id,
model_type: upload.model_type,
uploader: upload.uploader
)
end
end
end
end
......@@ -24,7 +24,5 @@ RSpec.shared_examples 'allowlisted /admin/geo requests' do
it_behaves_like 'allowlisted request', :post, '/admin/geo/replication/projects/resync_all'
it_behaves_like 'allowlisted request', :post, '/admin/geo/replication/projects/1/force_redownload'
it_behaves_like 'allowlisted request', :delete, '/admin/geo/replication/legacy-uploads/1'
end
end
......@@ -84,7 +84,12 @@ RSpec.describe 'Every GitLab uploader' do
def handled_by_ssf?(uploader)
replicable_name = replicable_name_for(uploader)
replicable_names.include?(replicable_name)
replicable_names.include?(replicable_name) || uploads?(uploader)
end
def uploads?(uploader)
upload_name = uploader.name.delete_suffix('Uploader').underscore
Gitlab::Geo::Replication.object_type_from_user_uploads?(upload_name)
end
def object_types
......
......@@ -18,8 +18,6 @@ RSpec.describe Geo::FileDownloadDispatchWorker, :geo, :use_sql_query_cache_for_t
WebMock.stub_request(:get, /primary-geo-node/).to_return(status: 200, body: "", headers: {})
stub_feature_flags(geo_upload_replication: false)
allow(Geo::FileDownloadWorker).to receive(:with_status).and_return(Geo::FileDownloadWorker)
end
......@@ -48,113 +46,6 @@ RSpec.describe Geo::FileDownloadDispatchWorker, :geo, :use_sql_query_cache_for_t
subject.perform
end
context 'with attachments (Upload records)' do
let(:upload) { create(:upload) }
it 'performs Geo::FileDownloadWorker for unsynced attachments' do
create(:geo_upload_legacy_registry, :avatar, :never_synced, file_id: upload.id)
expect(Geo::FileDownloadWorker).to receive(:perform_async).with('avatar', upload.id)
subject.perform
end
it 'performs Geo::FileDownloadWorker for failed-sync attachments' do
create(:geo_upload_legacy_registry, :avatar, :failed, file_id: upload.id, bytes: 0)
expect(Geo::FileDownloadWorker).to receive(:perform_async)
.with('avatar', upload.id).once.and_return(spy)
subject.perform
end
it 'does not perform Geo::FileDownloadWorker for synced attachments' do
create(:geo_upload_legacy_registry, :avatar, file_id: upload.id, bytes: 1234)
expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
subject.perform
end
it 'does not perform Geo::FileDownloadWorker for synced attachments even with 0 bytes downloaded' do
create(:geo_upload_legacy_registry, :avatar, file_id: upload.id, bytes: 0)
expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
subject.perform
end
context 'with a failed file' do
let(:failed_registry) { create(:geo_upload_legacy_registry, :avatar, :failed, file_id: non_existing_record_id) }
it 'does not stall backfill' do
unsynced_registry = create(:geo_upload_legacy_registry, :avatar, :with_file, :never_synced)
stub_const('Geo::Scheduler::SchedulerWorker::DB_RETRIEVE_BATCH_SIZE', 1)
expect(Geo::FileDownloadWorker).not_to receive(:perform_async).with('avatar', failed_registry.file_id)
expect(Geo::FileDownloadWorker).to receive(:perform_async).with('avatar', unsynced_registry.file_id)
subject.perform
end
it 'retries failed files' do
expect(Geo::FileDownloadWorker).to receive(:perform_async).with('avatar', failed_registry.file_id)
subject.perform
end
it 'does not retry failed files when retry_at is tomorrow' do
failed_registry = create(:geo_upload_legacy_registry, :avatar, :failed, file_id: non_existing_record_id, retry_at: Date.tomorrow)
expect(Geo::FileDownloadWorker).not_to receive(:perform_async).with('avatar', failed_registry.file_id)
subject.perform
end
it 'retries failed files when retry_at is in the past' do
failed_registry = create(:geo_upload_legacy_registry, :avatar, :failed, file_id: non_existing_record_id, retry_at: Date.yesterday)
expect(Geo::FileDownloadWorker).to receive(:perform_async).with('avatar', failed_registry.file_id)
subject.perform
end
end
context 'with Upload files missing on the primary that are marked as synced' do
let(:synced_upload_with_file_missing_on_primary) { create(:upload) }
before do
Geo::UploadRegistry.create!(file_type: :avatar, file_id: synced_upload_with_file_missing_on_primary.id, bytes: 1234, success: true, missing_on_primary: true)
end
it 'retries the files if there is spare capacity' do
expect(Geo::FileDownloadWorker).to receive(:perform_async).with('avatar', synced_upload_with_file_missing_on_primary.id)
subject.perform
end
it 'does not retry those files if there is no spare capacity' do
unsynced_registry = create(:geo_upload_legacy_registry, :avatar, :with_file, :never_synced)
expect(subject).to receive(:db_retrieve_batch_size).and_return(1).twice
expect(Geo::FileDownloadWorker).to receive(:perform_async).with('avatar', unsynced_registry.file_id)
subject.perform
end
it 'does not retry those files if they are already scheduled' do
unsynced_registry = create(:geo_upload_legacy_registry, :avatar, :with_file, :never_synced)
scheduled_jobs = [{ type: 'avatar', id: synced_upload_with_file_missing_on_primary.id, job_id: 'foo' }]
expect(subject).to receive(:scheduled_jobs).and_return(scheduled_jobs).at_least(1)
expect(Geo::FileDownloadWorker).to receive(:perform_async).with('avatar', unsynced_registry.file_id)
subject.perform
end
end
end
context 'with job artifacts' do
it 'performs Geo::FileDownloadWorker for unsynced job artifacts' do
registry = create(:geo_job_artifact_registry, :with_artifact, :never_synced)
......@@ -280,14 +171,9 @@ RSpec.describe Geo::FileDownloadDispatchWorker, :geo, :use_sql_query_cache_for_t
result_object = double(:result, success: true, bytes_downloaded: 100, primary_missing_file: false)
allow_any_instance_of(::Gitlab::Geo::Replication::BaseTransfer).to receive(:download_from_primary).and_return(result_object)
create_list(:geo_upload_legacy_registry, 2, :avatar, :with_file, :never_synced)
create_list(:geo_upload_legacy_registry, 2, :attachment, :with_file, :never_synced)
create(:geo_upload_legacy_registry, :favicon, :with_file, :never_synced)
create(:geo_upload_legacy_registry, :import_export, :with_file, :never_synced)
create(:geo_upload_legacy_registry, :personal_file, :with_file, :never_synced)
create(:geo_job_artifact_registry, :with_artifact, :never_synced)
create_list(:geo_job_artifact_registry, 6, :with_artifact, :never_synced)
expect(Geo::FileDownloadWorker).to receive(:perform_async).exactly(8).times.and_call_original
expect(Geo::FileDownloadWorker).to receive(:perform_async).exactly(6).times.and_call_original
# For 10 downloads, we expect four database reloads:
# 1. Load the first batch of 5.
# 2. 4 get sent out, 1 remains. This triggers another reload, which loads in the next 5.
......
......@@ -220,7 +220,6 @@ geo_repository_deleted_events: :gitlab_main
geo_repository_renamed_events: :gitlab_main
geo_repository_updated_events: :gitlab_main
geo_reset_checksum_events: :gitlab_main
geo_upload_deleted_events: :gitlab_main
gitlab_subscription_histories: :gitlab_main
gitlab_subscriptions: :gitlab_main
gpg_keys: :gitlab_main
......
......@@ -15340,9 +15340,6 @@ msgstr ""
msgid "Geo|Could not remove tracking entry for an existing project."
msgstr ""
msgid "Geo|Could not remove tracking entry for an existing upload."
msgstr ""
msgid "Geo|Data replication lag"
msgstr ""
......@@ -15616,9 +15613,6 @@ msgstr ""
msgid "Geo|Tracking entry for project (%{project_id}) was successfully removed."
msgstr ""
msgid "Geo|Tracking entry for upload (%{type}/%{id}) was successfully removed."
msgstr ""
msgid "Geo|URL can't be blank"
msgstr ""
......@@ -28894,9 +28888,6 @@ msgstr ""
msgid "Removed %{reviewer_text} %{reviewer_references}."
msgstr ""
msgid "Removed %{type} with id %{id}"
msgstr ""
msgid "Removed all labels."
msgstr ""
......@@ -28921,6 +28912,9 @@ msgstr ""
msgid "Removed time estimate."
msgstr ""
msgid "Removed upload with id %{id}"
msgstr ""
msgid "RemovedProjects|Projects which are removed and are yet to be permanently removed are visible here."
msgstr ""
......@@ -37209,9 +37203,6 @@ msgstr ""
msgid "Uploading changes to terminal"
msgstr ""
msgid "Uploads"
msgstr ""
msgid "Upon performing this action, the contents of this group, its subgroup and projects will be permanently deleted after %{deletion_adjourned_period} days on %{date}. Until that time:"
msgstr ""
......
......@@ -48,7 +48,6 @@ RSpec.describe 'Database schema' do
geo_node_statuses: %w[last_event_id cursor_last_event_id],
geo_nodes: %w[oauth_application_id],
geo_repository_deleted_events: %w[project_id],
geo_upload_deleted_events: %w[upload_id model_id],
gitlab_subscription_histories: %w[gitlab_subscription_id hosted_plan_id namespace_id],
identities: %w[user_id],
import_failures: %w[project_id],
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment