Commit ad66b824 authored by Sean McGivern's avatar Sean McGivern

Merge branch 'include-worker-metadata-in-all-queues-yml' into 'master'

Add Rake task to generate all_queues.yml

See merge request gitlab-org/gitlab!22892
parents 22d37c56 32f0cd79
# This file is generated automatically by
# bin/rake gitlab:sidekiq:all_queues_yml:generate
#
# Do not edit it manually!
---
- auto_devops:auto_devops_disable
- auto_merge:auto_merge_process
- chaos:chaos_cpu_spin
- chaos:chaos_db_spin
- chaos:chaos_kill
- chaos:chaos_leak_mem
- chaos:chaos_sleep
- container_repository:cleanup_container_repository
- container_repository:delete_container_repository
- cronjob:admin_email
- cronjob:ci_archive_traces_cron
- cronjob:container_expiration_policy
- cronjob:expire_build_artifacts
- cronjob:gitlab_usage_ping
- cronjob:import_export_project_cleanup
- cronjob:pages_domain_verification_cron
- cronjob:issue_due_scheduler
- cronjob:namespaces_prune_aggregation_schedules
- cronjob:pages_domain_removal_cron
- cronjob:pages_domain_ssl_renewal_cron
- cronjob:pages_domain_verification_cron
- cronjob:personal_access_tokens_expiring
- cronjob:pipeline_schedule
- cronjob:prune_old_events
- cronjob:prune_web_hook_logs
- cronjob:remove_expired_group_links
- cronjob:remove_expired_members
- cronjob:remove_unreferenced_lfs_objects
- cronjob:repository_archive_cache
- cronjob:repository_check_dispatch
- cronjob:requests_profiles
- cronjob:schedule_migrate_external_diffs
- cronjob:stuck_ci_jobs
- cronjob:stuck_import_jobs
- cronjob:stuck_merge_jobs
- cronjob:ci_archive_traces_cron
- cronjob:trending_projects
- cronjob:issue_due_scheduler
- cronjob:prune_web_hook_logs
- cronjob:schedule_migrate_external_diffs
- cronjob:namespaces_prune_aggregation_schedules
- deployment:deployments_finished
- deployment:deployments_success
- gcp_cluster:cluster_configure
- gcp_cluster:cluster_install_app
- gcp_cluster:cluster_patch_app
- gcp_cluster:cluster_upgrade_app
- gcp_cluster:cluster_project_configure
- gcp_cluster:cluster_provision
- gcp_cluster:clusters_cleanup_app
- gcp_cluster:clusters_cleanup_project_namespace
- gcp_cluster:clusters_cleanup_service_account
- gcp_cluster:cluster_upgrade_app
- gcp_cluster:cluster_wait_for_app_installation
- gcp_cluster:wait_for_cluster_creation
- gcp_cluster:cluster_wait_for_ingress_ip_address
- gcp_cluster:cluster_configure
- gcp_cluster:cluster_project_configure
- gcp_cluster:clusters_applications_wait_for_uninstall_app
- gcp_cluster:clusters_applications_activate_service
- gcp_cluster:clusters_applications_deactivate_service
- gcp_cluster:clusters_applications_uninstall
- gcp_cluster:clusters_applications_wait_for_uninstall_app
- gcp_cluster:clusters_cleanup_app
- gcp_cluster:clusters_cleanup_project_namespace
- gcp_cluster:clusters_cleanup_service_account
- gcp_cluster:clusters_applications_activate_service
- gcp_cluster:clusters_applications_deactivate_service
- github_import_advance_stage
- gcp_cluster:wait_for_cluster_creation
- github_importer:github_import_import_diff_note
- github_importer:github_import_import_issue
- github_importer:github_import_import_note
- github_importer:github_import_import_lfs_object
- github_importer:github_import_import_note
- github_importer:github_import_import_pull_request
- github_importer:github_import_refresh_import_jid
- github_importer:github_import_stage_finish_import
- github_importer:github_import_stage_import_base_data
- github_importer:github_import_stage_import_issues_and_diff_notes
- github_importer:github_import_stage_import_notes
- github_importer:github_import_stage_import_lfs_objects
- github_importer:github_import_stage_import_notes
- github_importer:github_import_stage_import_pull_requests
- github_importer:github_import_stage_import_repository
- hashed_storage:hashed_storage_migrator
- hashed_storage:hashed_storage_rollbacker
- hashed_storage:hashed_storage_project_migrate
- hashed_storage:hashed_storage_project_rollback
- hashed_storage:hashed_storage_rollbacker
- mail_scheduler:mail_scheduler_issue_due
- mail_scheduler:mail_scheduler_notification_service
- notifications:new_release
- object_pool:object_pool_create
- object_pool:object_pool_destroy
- object_pool:object_pool_join
- object_pool:object_pool_schedule_join
- object_storage:object_storage_background_move
- object_storage:object_storage_migrate_uploads
- pipeline_background:archive_trace
- pipeline_background:ci_build_trace_chunk_flush
- pipeline_cache:expire_job_cache
- pipeline_cache:expire_pipeline_cache
- pipeline_creation:create_pipeline
- pipeline_creation:run_pipeline_schedule
- pipeline_background:archive_trace
- pipeline_background:ci_build_trace_chunk_flush
- pipeline_default:build_coverage
- pipeline_default:build_trace_sections
- pipeline_default:pipeline_metrics
......@@ -95,74 +95,67 @@
- pipeline_hooks:build_hooks
- pipeline_hooks:pipeline_hooks
- pipeline_processing:build_finished
- pipeline_processing:ci_build_prepare
- pipeline_processing:build_queue
- pipeline_processing:build_success
- pipeline_processing:ci_build_prepare
- pipeline_processing:ci_build_schedule
- pipeline_processing:ci_resource_groups_assign_resource_from_resource_group
- pipeline_processing:pipeline_process
- pipeline_processing:pipeline_success
- pipeline_processing:pipeline_update
- pipeline_processing:stage_update
- pipeline_processing:update_head_pipeline_for_merge_request
- pipeline_processing:ci_build_schedule
- pipeline_processing:ci_resource_groups_assign_resource_from_resource_group
- deployment:deployments_success
- deployment:deployments_finished
- repository_check:repository_check_clear
- repository_check:repository_check_batch
- repository_check:repository_check_clear
- repository_check:repository_check_single_repository
- todos_destroyer:todos_destroyer_confidential_issue
- todos_destroyer:todos_destroyer_entity_leave
- todos_destroyer:todos_destroyer_group_private
- todos_destroyer:todos_destroyer_project_private
- todos_destroyer:todos_destroyer_private_features
- update_namespace_statistics:namespaces_schedule_aggregation
- todos_destroyer:todos_destroyer_project_private
- update_namespace_statistics:namespaces_root_statistics
- object_pool:object_pool_create
- object_pool:object_pool_schedule_join
- object_pool:object_pool_join
- object_pool:object_pool_destroy
- container_repository:delete_container_repository
- container_repository:cleanup_container_repository
- notifications:new_release
- default
- mailers # ActionMailer::DeliveryJob.queue_name
- update_namespace_statistics:namespaces_schedule_aggregation
- authorized_projects
- background_migration
- chat_notification
- create_evidence
- create_gpg_signature
- create_note_diff_file
- default
- delete_diff_files
- delete_merged_branches
- delete_stored_files
- delete_user
- detect_repository_languages
- email_receiver
- emails_on_push
- error_tracking_issue_link
- expire_build_instance_artifacts
- file_hook
- git_garbage_collect
- github_import_advance_stage
- gitlab_shell
- group_destroy
- group_export
- import_issues_csv
- invalid_gpg_signature_update
- irker
- mailers
- merge
- merge_request_mergeability_check
- migrate_external_diffs
- namespaceless_project_destroy
- new_issue
- new_merge_request
- new_note
- pages
- pages_domain_verification
- pages_domain_ssl_renewal
- file_hook
- pages_domain_verification
- phabricator_import_import_tasks
- post_receive
- process_commit
- project_cache
- project_daily_statistics
- project_destroy
- project_export
- project_service
......@@ -170,26 +163,16 @@
- reactive_caching
- rebase
- remote_mirror_notification
- repository_cleanup
- repository_fork
- repository_import
- repository_remove_remote
- repository_update_remote_mirror
- self_monitoring_project_create
- self_monitoring_project_delete
- system_hook_push
- update_external_pull_requests
- update_merge_requests
- update_project_statistics
- upload_checksum
- web_hook
- repository_update_remote_mirror
- create_note_diff_file
- delete_diff_files
- detect_repository_languages
- repository_cleanup
- delete_stored_files
- import_issues_csv
- project_daily_statistics
- create_evidence
- group_export
- self_monitoring_project_create
- self_monitoring_project_delete
- merge_request_mergeability_check
- phabricator_import_import_tasks
......@@ -268,8 +268,9 @@ default value can be found in `/opt/gitlab/etc/gitlab-rails/env/RAILS_ENV`.
### Using negation
You're able to run all queues in `sidekiq_queues.yml` file on a single or
multiple processes with exceptions using the `--negate` flag.
You're able to run all queues in the `all_queues.yml` file (or the equivalent EE
file) on a single or multiple processes with exceptions using the `--negate`
flag.
For example, say you want to run a single process for all queues,
except `process_commit` and `post_receive`:
......
......@@ -17,8 +17,11 @@ would be `process_something`. If you're not sure what queue a worker uses,
you can find it using `SomeWorker.queue`. There is almost never a reason to
manually override the queue name using `sidekiq_options queue: :some_queue`.
You must always add any new queues to `app/workers/all_queues.yml` or `ee/app/workers/all_queues.yml`
otherwise your worker will not run.
After adding a new queue, run `bin/rake
gitlab:sidekiq:all_queues_yml:generate` to regenerate
`app/workers/all_queues.yml` or `ee/app/workers/all_queues.yml` so that
it can be picked up by
[`sidekiq-cluster`](../administration/operations/extra_sidekiq_processes.md).
## Queue Namespaces
......
# This file is generated automatically by
# bin/rake gitlab:sidekiq:all_queues_yml:generate
#
# Do not edit it manually!
---
- cronjob:adjourned_group_deletion
- cronjob:adjourned_projects_deletion_cron
- cronjob:clear_shared_runners_minutes
- cronjob:geo_container_repository_sync_dispatch
- cronjob:geo_file_download_dispatch
- cronjob:geo_metrics_update
- cronjob:geo_prune_event_log
- cronjob:geo_migrated_local_files_clean_up
- cronjob:geo_prune_event_log
- cronjob:geo_repository_sync
- cronjob:geo_repository_verification_primary_batch
- cronjob:geo_repository_verification_secondary_scheduler
- cronjob:geo_sidekiq_cron_config
- cronjob:geo_repository_verification_secondary_shard
- cronjob:geo_scheduler_per_shard_scheduler
- cronjob:geo_scheduler_primary_per_shard_scheduler
- cronjob:geo_scheduler_secondary_per_shard_scheduler
- cronjob:geo_repository_verification_secondary_shard
- cronjob:geo_container_repository_sync_dispatch
- cronjob:geo_sidekiq_cron_config
- cronjob:historical_data
- cronjob:import_software_licenses
- cronjob:ldap_all_groups_sync
- cronjob:ldap_sync
- cronjob:update_all_mirrors
- cronjob:pseudonymizer
- cronjob:update_all_mirrors
- cronjob:update_max_seats_used_for_gitlab_com_subscriptions
- cronjob:adjourned_projects_deletion_cron
- epics:epics_update_epics_dates
- gcp_cluster:cluster_update_app
- gcp_cluster:cluster_wait_for_app_update
- geo:geo_batch_project_registry
- geo:geo_batch_project_registry_scheduler
- geo:geo_container_repository_sync
- geo:geo_design_repository_shard_sync
- geo:geo_design_repository_sync
- geo:geo_file_download
- geo:geo_file_removal
- geo:geo_file_registry_removal
- geo:geo_file_removal
- geo:geo_hashed_storage_attachments_migration
- geo:geo_hashed_storage_migration
- geo:geo_project_sync
- geo:geo_container_repository_sync
- geo:geo_design_repository_sync
- geo:geo_rename_repository
- geo:geo_secondary_repository_backfill
- geo:geo_repositories_clean_up
- geo:geo_repository_cleanup
- geo:geo_repository_destroy
- geo:geo_repository_shard_sync
- geo:geo_design_repository_shard_sync
- geo:geo_repository_verification_primary_shard
- geo:geo_repository_verification_primary_single
- geo:geo_repository_verification_secondary_single
- geo:geo_scheduler_scheduler
- geo:geo_scheduler_primary_scheduler
- geo:geo_scheduler_scheduler
- geo:geo_scheduler_secondary_scheduler
- pipeline_default:store_security_reports
- pipeline_default:sync_security_reports_to_report_approval_rules
- pipeline_default:ci_create_cross_project_pipeline
- pipeline_default:ci_pipeline_bridge_status
- geo:geo_secondary_repository_backfill
- incident_management:incident_management_process_alert
- incident_management:incident_management_process_prometheus_alert
- jira_connect:jira_connect_sync_branch
- jira_connect:jira_connect_sync_merge_request
- personal_access_tokens:personal_access_tokens_policy
- pipeline_default:ci_create_cross_project_pipeline
- pipeline_default:ci_pipeline_bridge_status
- pipeline_default:store_security_reports
- pipeline_default:sync_security_reports_to_report_approval_rules
- adjourned_project_deletion
- admin_emails
- create_github_webhook
- design_management_new_version
- elastic_batch_project_indexer
- elastic_namespace_indexer
- elastic_commit_indexer
- elastic_indexer
- elastic_full_index
- elastic_indexer
- elastic_namespace_indexer
- elastic_namespace_rollout
- export_csv
- ldap_group_sync
- new_epic
- project_import_schedule
- project_update_repository_storage
- adjourned_project_deletion
- rebase
- refresh_license_compliance_checks
- repository_update_mirror
- repository_push_audit_event
- epics:epics_update_epics_dates
- repository_update_mirror
......@@ -5,10 +5,10 @@ require 'spec_helper'
describe Gitlab::SidekiqConfig do
describe '.workers' do
it 'includes EE workers' do
workers = described_class.workers
worker_classes = described_class.workers.map(&:klass)
expect(workers).to include(RepositoryUpdateMirrorWorker)
expect(workers).to include(LdapGroupSyncWorker)
expect(worker_classes).to include(RepositoryUpdateMirrorWorker)
expect(worker_classes).to include(LdapGroupSyncWorker)
end
end
......@@ -41,4 +41,42 @@ describe Gitlab::SidekiqConfig do
expect(queues).to include('unknown')
end
end
describe '.workers_for_all_queues_yml' do
it 'returns a tuple with EE workers second' do
expect(described_class.workers_for_all_queues_yml.second)
.to include(an_object_having_attributes(queue: 'repository_update_mirror'))
end
end
describe '.all_queues_yml_outdated?' do
before do
workers = [
LdapGroupSyncWorker,
RepositoryUpdateMirrorWorker
].map { |worker| described_class::Worker.new(worker, ee: true) }
allow(described_class).to receive(:workers).and_return(workers)
allow(File).to receive(:read)
.with(described_class::FOSS_QUEUE_CONFIG_PATH)
.and_return(YAML.dump([]))
end
it 'returns true if the YAML file does not match the application code' do
allow(File).to receive(:read)
.with(described_class::EE_QUEUE_CONFIG_PATH)
.and_return(YAML.dump(['ldap_group_sync']))
expect(described_class.all_queues_yml_outdated?).to be(true)
end
it 'returns false if the YAML file matches the application code' do
allow(File).to receive(:read)
.with(described_class::EE_QUEUE_CONFIG_PATH)
.and_return(YAML.dump(%w[ldap_group_sync repository_update_mirror]))
expect(described_class.all_queues_yml_outdated?).to be(false)
end
end
end
......@@ -4,6 +4,22 @@ require 'yaml'
module Gitlab
module SidekiqConfig
FOSS_QUEUE_CONFIG_PATH = 'app/workers/all_queues.yml'
EE_QUEUE_CONFIG_PATH = 'ee/app/workers/all_queues.yml'
QUEUE_CONFIG_PATHS = [
FOSS_QUEUE_CONFIG_PATH,
(EE_QUEUE_CONFIG_PATH if Gitlab.ee?)
].compact.freeze
# For queues that don't have explicit workers - default and mailers
DummyWorker = Struct.new(:queue)
DEFAULT_WORKERS = [
Gitlab::SidekiqConfig::Worker.new(DummyWorker.new('default'), ee: false),
Gitlab::SidekiqConfig::Worker.new(DummyWorker.new('mailers'), ee: false)
].freeze
class << self
include Gitlab::SidekiqConfig::CliMethods
......@@ -25,28 +41,46 @@ module Gitlab
def workers
@workers ||= begin
result = find_workers(Rails.root.join('app', 'workers'))
result.concat(find_workers(Rails.root.join('ee', 'app', 'workers'))) if Gitlab.ee?
result = []
result.concat(DEFAULT_WORKERS)
result.concat(find_workers(Rails.root.join('app', 'workers'), ee: false))
if Gitlab.ee?
result.concat(find_workers(Rails.root.join('ee', 'app', 'workers'), ee: true))
end
result
end
end
def workers_for_all_queues_yml
workers.partition(&:ee?).reverse.map(&:sort)
end
def all_queues_yml_outdated?
foss_workers, ee_workers = workers_for_all_queues_yml
return true if foss_workers != YAML.safe_load(File.read(FOSS_QUEUE_CONFIG_PATH))
Gitlab.ee? && ee_workers != YAML.safe_load(File.read(EE_QUEUE_CONFIG_PATH))
end
private
def find_workers(root)
def find_workers(root, ee:)
concerns = root.join('concerns').to_s
workers = Dir[root.join('**', '*.rb')]
Dir[root.join('**', '*.rb')]
.reject { |path| path.start_with?(concerns) }
.map { |path| worker_from_path(path, root) }
.select { |worker| worker < Sidekiq::Worker }
.map { |worker| Gitlab::SidekiqConfig::Worker.new(worker, ee: ee) }
end
workers.map! do |path|
ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '')
ns.camelize.constantize
end
def worker_from_path(path, root)
ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '')
# Skip things that aren't workers
workers.select { |w| w < Sidekiq::Worker }
ns.camelize.constantize
end
end
end
......
# frozen_string_literal: true
module Gitlab
module SidekiqConfig
class Worker
include Comparable
attr_reader :klass
delegate :feature_category_not_owned?, :get_feature_category,
:get_worker_resource_boundary, :latency_sensitive_worker?,
:queue, :worker_has_external_dependencies?,
to: :klass
def initialize(klass, ee:)
@klass = klass
@ee = ee
end
def ee?
@ee
end
def ==(other)
to_yaml == case other
when self.class
other.to_yaml
else
other
end
end
def <=>(other)
to_sort <=> other.to_sort
end
# Put namespaced queues first
def to_sort
[queue.include?(':') ? 0 : 1, queue]
end
# YAML representation
def encode_with(coder)
coder.represent_scalar(nil, to_yaml)
end
def to_yaml
queue
end
end
end
end
# frozen_string_literal: true
return if Rails.env.production?
namespace :gitlab do
namespace :sidekiq do
namespace :all_queues_yml do
def write_yaml(path, object)
banner = <<~BANNER
# This file is generated automatically by
# bin/rake gitlab:sidekiq:all_queues_yml:generate
#
# Do not edit it manually!
BANNER
File.write(path, banner + YAML.dump(object))
end
desc 'GitLab | Generate all_queues.yml based on worker definitions'
task generate: :environment do
foss_workers, ee_workers = Gitlab::SidekiqConfig.workers_for_all_queues_yml
write_yaml(Gitlab::SidekiqConfig::FOSS_QUEUE_CONFIG_PATH, foss_workers)
if Gitlab.ee?
write_yaml(Gitlab::SidekiqConfig::EE_QUEUE_CONFIG_PATH, ee_workers)
end
end
desc 'GitLab | Validate that all_queues.yml matches worker definitions'
task check: :environment do
if Gitlab::SidekiqConfig.all_queues_yml_outdated?
raise <<~MSG
Changes in worker queues found, please update the metadata by running:
bin/rake gitlab:sidekiq:all_queues_yml:generate
Then commit and push the changes from:
- #{Gitlab::SidekiqConfig::FOSS_QUEUE_CONFIG_PATH}
- #{Gitlab::SidekiqConfig::EE_QUEUE_CONFIG_PATH}
MSG
end
end
end
end
end
......@@ -34,6 +34,7 @@ unless Rails.env.production?
scss_lint
gettext:lint
lint:static_verification
gitlab:sidekiq:all_queues_yml:check
]
if Gitlab.ee?
......
# frozen_string_literal: true
require 'fast_spec_helper'
describe Gitlab::SidekiqConfig::Worker do
def worker_with_queue(queue)
described_class.new(double(queue: queue), ee: false)
end
describe '#ee?' do
it 'returns the EE status set on creation' do
expect(described_class.new(double, ee: true)).to be_ee
expect(described_class.new(double, ee: false)).not_to be_ee
end
end
describe '#==' do
def worker_with_yaml(yaml)
described_class.new(double, ee: false).tap do |worker|
allow(worker).to receive(:to_yaml).and_return(yaml)
end
end
it 'defines two workers as equal if their YAML representations are equal' do
expect(worker_with_yaml('a')).to eq(worker_with_yaml('a'))
expect(worker_with_yaml('a')).not_to eq(worker_with_yaml('b'))
end
it 'returns true when a worker is compared with its YAML representation' do
expect(worker_with_yaml('a')).to eq('a')
expect(worker_with_yaml(a: 1, b: 2)).to eq(a: 1, b: 2)
end
end
describe 'delegations' do
[
:feature_category_not_owned?, :get_feature_category,
:get_worker_resource_boundary, :latency_sensitive_worker?, :queue,
:worker_has_external_dependencies?
].each do |meth|
it "delegates #{meth} to the worker class" do
worker = double
expect(worker).to receive(meth)
described_class.new(worker, ee: false).send(meth)
end
end
end
describe 'sorting' do
it 'sorts queues with a namespace before those without a namespace' do
namespaced_worker = worker_with_queue('namespace:queue')
plain_worker = worker_with_queue('a_queue')
expect([plain_worker, namespaced_worker].sort)
.to eq([namespaced_worker, plain_worker])
end
it 'sorts alphabetically by queue' do
workers = [
worker_with_queue('namespace:a'),
worker_with_queue('namespace:b'),
worker_with_queue('other_namespace:a'),
worker_with_queue('other_namespace:b'),
worker_with_queue('a'),
worker_with_queue('b')
]
expect(workers.shuffle.sort).to eq(workers)
end
end
describe 'YAML encoding' do
it 'encodes the worker in YAML as a string of the queue' do
worker_a = worker_with_queue('a')
worker_b = worker_with_queue('b')
expect(YAML.dump(worker_a)).to eq(YAML.dump('a'))
expect(YAML.dump([worker_a, worker_b]))
.to eq(YAML.dump(%w[a b]))
end
end
end
......@@ -5,10 +5,10 @@ require 'spec_helper'
describe Gitlab::SidekiqConfig do
describe '.workers' do
it 'includes all workers' do
workers = described_class.workers
worker_classes = described_class.workers.map(&:klass)
expect(workers).to include(PostReceive)
expect(workers).to include(MergeWorker)
expect(worker_classes).to include(PostReceive)
expect(worker_classes).to include(MergeWorker)
end
end
......@@ -44,4 +44,40 @@ describe Gitlab::SidekiqConfig do
expect(queues).to include('unknown')
end
end
describe '.workers_for_all_queues_yml' do
it 'returns a tuple with FOSS workers first' do
expect(described_class.workers_for_all_queues_yml.first)
.to include(an_object_having_attributes(queue: 'post_receive'))
end
end
describe '.all_queues_yml_outdated?' do
before do
workers = [
PostReceive,
MergeWorker,
ProcessCommitWorker
].map { |worker| described_class::Worker.new(worker, ee: false) }
allow(described_class).to receive(:workers).and_return(workers)
allow(Gitlab).to receive(:ee?).and_return(false)
end
it 'returns true if the YAML file does not match the application code' do
allow(File).to receive(:read)
.with(described_class::FOSS_QUEUE_CONFIG_PATH)
.and_return(YAML.dump(%w[post_receive merge]))
expect(described_class.all_queues_yml_outdated?).to be(true)
end
it 'returns false if the YAML file matches the application code' do
allow(File).to receive(:read)
.with(described_class::FOSS_QUEUE_CONFIG_PATH)
.and_return(YAML.dump(%w[merge post_receive process_commit]))
expect(described_class.all_queues_yml_outdated?).to be(false)
end
end
end
......@@ -3,8 +3,12 @@
require 'spec_helper'
describe 'Every Sidekiq worker' do
let(:workers_without_defaults) do
Gitlab::SidekiqConfig.workers - Gitlab::SidekiqConfig::DEFAULT_WORKERS
end
it 'does not use the default queue' do
expect(Gitlab::SidekiqConfig.workers.map(&:queue)).not_to include('default')
expect(workers_without_defaults.map(&:queue)).not_to include('default')
end
it 'uses the cronjob queue when the worker runs as a cronjob' do
......@@ -45,7 +49,7 @@ describe 'Every Sidekiq worker' do
# or explicitly be excluded with the `feature_category_not_owned!` annotation.
# Please see doc/development/sidekiq_style_guide.md#Feature-Categorization for more details.
it 'has a feature_category or feature_category_not_owned! attribute', :aggregate_failures do
Gitlab::SidekiqConfig.workers.each do |worker|
workers_without_defaults.each do |worker|
expect(worker.get_feature_category).to be_a(Symbol), "expected #{worker.inspect} to declare a feature_category or feature_category_not_owned!"
end
end
......@@ -54,7 +58,7 @@ describe 'Every Sidekiq worker' do
# The category should match a value in `config/feature_categories.yml`.
# Please see doc/development/sidekiq_style_guide.md#Feature-Categorization for more details.
it 'has a feature_category that maps to a value in feature_categories.yml', :aggregate_failures do
workers_with_feature_categories = Gitlab::SidekiqConfig.workers
workers_with_feature_categories = workers_without_defaults
.select(&:get_feature_category)
.reject(&:feature_category_not_owned?)
......@@ -69,7 +73,7 @@ describe 'Every Sidekiq worker' do
# rather than scaling the hardware to meet the SLO. For this reason, memory-bound,
# latency-sensitive jobs are explicitly discouraged and disabled.
it 'is (exclusively) memory-bound or latency-sentitive, not both', :aggregate_failures do
latency_sensitive_workers = Gitlab::SidekiqConfig.workers
latency_sensitive_workers = workers_without_defaults
.select(&:latency_sensitive_worker?)
latency_sensitive_workers.each do |worker|
......@@ -86,7 +90,7 @@ describe 'Every Sidekiq worker' do
# Please see doc/development/sidekiq_style_guide.md#Jobs-with-External-Dependencies for more
# details.
it 'has (exclusively) external dependencies or is latency-sentitive, not both', :aggregate_failures do
latency_sensitive_workers = Gitlab::SidekiqConfig.workers
latency_sensitive_workers = workers_without_defaults
.select(&:latency_sensitive_worker?)
latency_sensitive_workers.each do |worker|
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment