Commit d47100b5 authored by Vitali Tatarintev's avatar Vitali Tatarintev

Merge branch 'pl-rubocop-todo-raise-args' into 'master'

Resolves rubocop offense Style/RaiseArgs [RUN AS-IF-FOSS]

See merge request gitlab-org/gitlab!58009
parents 4ea9f1bb d926e54c
...@@ -820,14 +820,6 @@ Style/NumericLiteralPrefix: ...@@ -820,14 +820,6 @@ Style/NumericLiteralPrefix:
Style/PercentLiteralDelimiters: Style/PercentLiteralDelimiters:
Enabled: false Enabled: false
# Offense count: 247
# Cop supports --auto-correct.
# Configuration parameters: .
# SupportedStyles: compact, exploded
Style/RaiseArgs:
Enabled: false
EnforcedStyle: exploded
# Offense count: 26 # Offense count: 26
# Cop supports --auto-correct. # Cop supports --auto-correct.
# Configuration parameters: SafeForConstants. # Configuration parameters: SafeForConstants.
......
...@@ -46,7 +46,7 @@ module PageLimiter ...@@ -46,7 +46,7 @@ module PageLimiter
if params[:page].present? && params[:page].to_i > max_page_number if params[:page].present? && params[:page].to_i > max_page_number
record_page_limit_interception record_page_limit_interception
raise PageOutOfBoundsError.new(max_page_number) raise PageOutOfBoundsError, max_page_number
end end
end end
......
...@@ -37,7 +37,7 @@ module Packages ...@@ -37,7 +37,7 @@ module Packages
@mod.version_by(commit: target) @mod.version_by(commit: target)
else else
raise ArgumentError.new 'not a valid target' raise ArgumentError, 'not a valid target'
end end
end end
end end
......
...@@ -18,7 +18,7 @@ module TimeZoneHelper ...@@ -18,7 +18,7 @@ module TimeZoneHelper
def timezone_data(format: :short) def timezone_data(format: :short)
attrs = TIME_ZONE_FORMAT_ATTRS.fetch(format) do attrs = TIME_ZONE_FORMAT_ATTRS.fetch(format) do
valid_formats = TIME_ZONE_FORMAT_ATTRS.keys.map { |k| ":#{k}"}.join(", ") valid_formats = TIME_ZONE_FORMAT_ATTRS.keys.map { |k| ":#{k}"}.join(", ")
raise ArgumentError.new("Invalid format :#{format}. Valid formats are #{valid_formats}.") raise ArgumentError, "Invalid format :#{format}. Valid formats are #{valid_formats}."
end end
ActiveSupport::TimeZone.all.map do |timezone| ActiveSupport::TimeZone.all.map do |timezone|
......
...@@ -35,7 +35,7 @@ class BulkImports::Tracker < ApplicationRecord ...@@ -35,7 +35,7 @@ class BulkImports::Tracker < ApplicationRecord
def pipeline_class def pipeline_class
unless BulkImports::Stage.pipeline_exists?(pipeline_name) unless BulkImports::Stage.pipeline_exists?(pipeline_name)
raise NameError.new("'#{pipeline_name}' is not a valid BulkImport Pipeline") raise NameError, "'#{pipeline_name}' is not a valid BulkImport Pipeline"
end end
pipeline_name.constantize pipeline_name.constantize
......
...@@ -26,7 +26,7 @@ module CacheMarkdownField ...@@ -26,7 +26,7 @@ module CacheMarkdownField
# Returns the default Banzai render context for the cached markdown field. # Returns the default Banzai render context for the cached markdown field.
def banzai_render_context(field) def banzai_render_context(field)
raise ArgumentError.new("Unknown field: #{field.inspect}") unless raise ArgumentError, "Unknown field: #{field.inspect}" unless
cached_markdown_fields.markdown_fields.include?(field) cached_markdown_fields.markdown_fields.include?(field)
# Always include a project key, or Banzai complains # Always include a project key, or Banzai complains
...@@ -99,7 +99,7 @@ module CacheMarkdownField ...@@ -99,7 +99,7 @@ module CacheMarkdownField
end end
def cached_html_for(markdown_field) def cached_html_for(markdown_field)
raise ArgumentError.new("Unknown field: #{markdown_field}") unless raise ArgumentError, "Unknown field: #{markdown_field}" unless
cached_markdown_fields.markdown_fields.include?(markdown_field) cached_markdown_fields.markdown_fields.include?(markdown_field)
__send__(cached_markdown_fields.html_field(markdown_field)) # rubocop:disable GitlabSecurity/PublicSend __send__(cached_markdown_fields.html_field(markdown_field)) # rubocop:disable GitlabSecurity/PublicSend
......
...@@ -22,7 +22,7 @@ module GroupDescendant ...@@ -22,7 +22,7 @@ module GroupDescendant
return [] if descendants.empty? return [] if descendants.empty?
unless descendants.all? { |hierarchy| hierarchy.is_a?(GroupDescendant) } unless descendants.all? { |hierarchy| hierarchy.is_a?(GroupDescendant) }
raise ArgumentError.new(_('element is not a hierarchy')) raise ArgumentError, _('element is not a hierarchy')
end end
all_hierarchies = descendants.map do |descendant| all_hierarchies = descendants.map do |descendant|
...@@ -56,7 +56,7 @@ module GroupDescendant ...@@ -56,7 +56,7 @@ module GroupDescendant
end end
if parent.nil? && hierarchy_top.present? if parent.nil? && hierarchy_top.present?
raise ArgumentError.new(_('specified top is not part of the tree')) raise ArgumentError, _('specified top is not part of the tree')
end end
if parent && parent != hierarchy_top if parent && parent != hierarchy_top
......
...@@ -59,7 +59,7 @@ module HasWikiPageMetaAttributes ...@@ -59,7 +59,7 @@ module HasWikiPageMetaAttributes
if conflict.present? if conflict.present?
meta.errors.add(:canonical_slug, 'Duplicate value found') meta.errors.add(:canonical_slug, 'Duplicate value found')
raise CanonicalSlugConflictError.new(meta) raise CanonicalSlugConflictError, meta
end end
meta meta
......
...@@ -168,7 +168,7 @@ module ReactiveCaching ...@@ -168,7 +168,7 @@ module ReactiveCaching
data_deep_size = Gitlab::Utils::DeepSize.new(data, max_size: self.class.reactive_cache_hard_limit) data_deep_size = Gitlab::Utils::DeepSize.new(data, max_size: self.class.reactive_cache_hard_limit)
raise ExceededReactiveCacheLimit.new unless data_deep_size.valid? raise ExceededReactiveCacheLimit unless data_deep_size.valid?
end end
end end
end end
...@@ -31,7 +31,7 @@ module Sha256Attribute ...@@ -31,7 +31,7 @@ module Sha256Attribute
end end
unless column.type == :binary unless column.type == :binary
raise ArgumentError.new("sha256_attribute #{name.inspect} is invalid since the column type is not :binary") raise ArgumentError, "sha256_attribute #{name.inspect} is invalid since the column type is not :binary"
end end
rescue StandardError => error rescue StandardError => error
Gitlab::AppLogger.error "Sha256Attribute initialization: #{error.message}" Gitlab::AppLogger.error "Sha256Attribute initialization: #{error.message}"
......
...@@ -24,7 +24,7 @@ module ShaAttribute ...@@ -24,7 +24,7 @@ module ShaAttribute
return unless column return unless column
unless column.type == :binary unless column.type == :binary
raise ArgumentError.new("sha_attribute #{name.inspect} is invalid since the column type is not :binary") raise ArgumentError, "sha_attribute #{name.inspect} is invalid since the column type is not :binary"
end end
rescue StandardError => error rescue StandardError => error
Gitlab::AppLogger.error "ShaAttribute initialization: #{error.message}" Gitlab::AppLogger.error "ShaAttribute initialization: #{error.message}"
......
...@@ -10,7 +10,7 @@ module Storage ...@@ -10,7 +10,7 @@ module Storage
proj_with_tags = first_project_with_container_registry_tags proj_with_tags = first_project_with_container_registry_tags
if proj_with_tags if proj_with_tags
raise Gitlab::UpdatePathError.new("Namespace #{name} (#{id}) cannot be moved because at least one project (e.g. #{proj_with_tags.name} (#{proj_with_tags.id})) has tags in container registry") raise Gitlab::UpdatePathError, "Namespace #{name} (#{id}) cannot be moved because at least one project (e.g. #{proj_with_tags.name} (#{proj_with_tags.id})) has tags in container registry"
end end
parent_was = if saved_change_to_parent? && parent_id_before_last_save.present? parent_was = if saved_change_to_parent? && parent_id_before_last_save.present?
...@@ -83,7 +83,7 @@ module Storage ...@@ -83,7 +83,7 @@ module Storage
# if we cannot move namespace directory we should rollback # if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs # db changes in order to prevent out of sync between db and fs
raise Gitlab::UpdatePathError.new('namespace directory cannot be moved') raise Gitlab::UpdatePathError, 'namespace directory cannot be moved'
end end
end end
end end
......
...@@ -12,7 +12,7 @@ module TokenAuthenticatable ...@@ -12,7 +12,7 @@ module TokenAuthenticatable
def add_authentication_token_field(token_field, options = {}) def add_authentication_token_field(token_field, options = {})
if token_authenticatable_fields.include?(token_field) if token_authenticatable_fields.include?(token_field)
raise ArgumentError.new("#{token_field} already configured via add_authentication_token_field") raise ArgumentError, "#{token_field} already configured via add_authentication_token_field"
end end
token_authenticatable_fields.push(token_field) token_authenticatable_fields.push(token_field)
......
...@@ -31,7 +31,7 @@ module X509SerialNumberAttribute ...@@ -31,7 +31,7 @@ module X509SerialNumberAttribute
end end
unless column.type == :binary unless column.type == :binary
raise ArgumentError.new("x509_serial_number_attribute #{name.inspect} is invalid since the column type is not :binary") raise ArgumentError, "x509_serial_number_attribute #{name.inspect} is invalid since the column type is not :binary"
end end
rescue StandardError => error rescue StandardError => error
Gitlab::AppLogger.error "X509SerialNumberAttribute initialization: #{error.message}" Gitlab::AppLogger.error "X509SerialNumberAttribute initialization: #{error.message}"
......
...@@ -20,7 +20,7 @@ class Namespace ...@@ -20,7 +20,7 @@ class Namespace
end end
def initialize(root) def initialize(root)
raise StandardError.new('Must specify a root node') if root.parent_id raise StandardError, 'Must specify a root node' if root.parent_id
@root = root @root = root
end end
......
...@@ -85,7 +85,7 @@ module Namespaces ...@@ -85,7 +85,7 @@ module Namespaces
# Search this namespace's lineage. Bound inclusively by top node. # Search this namespace's lineage. Bound inclusively by top node.
def lineage(top) def lineage(top)
raise UnboundedSearch.new('Must bound search by a top') unless top raise UnboundedSearch, 'Must bound search by a top' unless top
without_sti_condition without_sti_condition
.traversal_ids_contains("{#{top.id}}") .traversal_ids_contains("{#{top.id}}")
......
...@@ -18,8 +18,8 @@ module Packages ...@@ -18,8 +18,8 @@ module Packages
end end
def version_by(ref: nil, commit: nil) def version_by(ref: nil, commit: nil)
raise ArgumentError.new 'no filter specified' unless ref || commit raise ArgumentError, 'no filter specified' unless ref || commit
raise ArgumentError.new 'ref and commit are mutually exclusive' if ref && commit raise ArgumentError, 'ref and commit are mutually exclusive' if ref && commit
if commit if commit
return version_by_sha(commit) if commit.is_a? String return version_by_sha(commit) if commit.is_a? String
......
...@@ -17,15 +17,15 @@ module Packages ...@@ -17,15 +17,15 @@ module Packages
delegate :build, to: :@semver, allow_nil: true delegate :build, to: :@semver, allow_nil: true
def initialize(mod, type, commit, name: nil, semver: nil, ref: nil) def initialize(mod, type, commit, name: nil, semver: nil, ref: nil)
raise ArgumentError.new("invalid type '#{type}'") unless VALID_TYPES.include? type raise ArgumentError, "invalid type '#{type}'" unless VALID_TYPES.include? type
raise ArgumentError.new("mod is required") unless mod raise ArgumentError, "mod is required" unless mod
raise ArgumentError.new("commit is required") unless commit raise ArgumentError, "commit is required" unless commit
if type == :ref if type == :ref
raise ArgumentError.new("ref is required") unless ref raise ArgumentError, "ref is required" unless ref
elsif type == :pseudo elsif type == :pseudo
raise ArgumentError.new("name is required") unless name raise ArgumentError, "name is required" unless name
raise ArgumentError.new("semver is required") unless semver raise ArgumentError, "semver is required" unless semver
end end
@mod = mod @mod = mod
......
...@@ -1004,7 +1004,7 @@ class Project < ApplicationRecord ...@@ -1004,7 +1004,7 @@ class Project < ApplicationRecord
end end
def latest_successful_build_for_ref!(job_name, ref = default_branch) def latest_successful_build_for_ref!(job_name, ref = default_branch)
latest_successful_build_for_ref(job_name, ref) || raise(ActiveRecord::RecordNotFound.new("Couldn't find job #{job_name}")) latest_successful_build_for_ref(job_name, ref) || raise(ActiveRecord::RecordNotFound, "Couldn't find job #{job_name}")
end end
def latest_pipeline(ref = default_branch, sha = nil) def latest_pipeline(ref = default_branch, sha = nil)
......
...@@ -128,10 +128,10 @@ class SshHostKey ...@@ -128,10 +128,10 @@ class SshHostKey
def normalize_url(url) def normalize_url(url)
full_url = ::Addressable::URI.parse(url) full_url = ::Addressable::URI.parse(url)
raise ArgumentError.new("Invalid URL") unless full_url&.scheme == 'ssh' raise ArgumentError, "Invalid URL" unless full_url&.scheme == 'ssh'
Addressable::URI.parse("ssh://#{full_url.host}:#{full_url.inferred_port}") Addressable::URI.parse("ssh://#{full_url.host}:#{full_url.inferred_port}")
rescue Addressable::URI::InvalidURIError rescue Addressable::URI::InvalidURIError
raise ArgumentError.new("Invalid URL") raise ArgumentError, "Invalid URL"
end end
end end
...@@ -14,7 +14,7 @@ module Clusters ...@@ -14,7 +14,7 @@ module Clusters
end end
def execute def execute
raise MissingRoleError.new('AWS provisioning role not configured') unless provision_role.present? raise MissingRoleError, 'AWS provisioning role not configured' unless provision_role.present?
::Aws::AssumeRoleCredentials.new( ::Aws::AssumeRoleCredentials.new(
client: client, client: client,
......
...@@ -96,7 +96,7 @@ module Groups ...@@ -96,7 +96,7 @@ module Groups
def notify_error! def notify_error!
notify_error notify_error
raise Gitlab::ImportExport::Error.new(shared.errors.to_sentence) raise Gitlab::ImportExport::Error, shared.errors.to_sentence
end end
def notify_success def notify_success
......
...@@ -114,7 +114,7 @@ module Groups ...@@ -114,7 +114,7 @@ module Groups
def notify_error! def notify_error!
notify_error notify_error
raise Gitlab::ImportExport::Error.new(shared.errors.to_sentence) raise Gitlab::ImportExport::Error, shared.errors.to_sentence
end end
def remove_base_tmp_dir def remove_base_tmp_dir
......
...@@ -80,7 +80,7 @@ module Metrics ...@@ -80,7 +80,7 @@ module Metrics
def fetch_dashboard def fetch_dashboard
uid = GrafanaUidParser.new(grafana_url, project).parse uid = GrafanaUidParser.new(grafana_url, project).parse
raise DashboardProcessingError.new(_('Dashboard uid not found')) unless uid raise DashboardProcessingError, _('Dashboard uid not found') unless uid
response = client.get_dashboard(uid: uid) response = client.get_dashboard(uid: uid)
...@@ -89,7 +89,7 @@ module Metrics ...@@ -89,7 +89,7 @@ module Metrics
def fetch_datasource(dashboard) def fetch_datasource(dashboard)
name = DatasourceNameParser.new(grafana_url, dashboard).parse name = DatasourceNameParser.new(grafana_url, dashboard).parse
raise DashboardProcessingError.new(_('Datasource name not found')) unless name raise DashboardProcessingError, _('Datasource name not found') unless name
response = client.get_datasource(name: name) response = client.get_datasource(name: name)
...@@ -115,7 +115,7 @@ module Metrics ...@@ -115,7 +115,7 @@ module Metrics
def parse_json(json) def parse_json(json)
Gitlab::Json.parse(json, symbolize_names: true) Gitlab::Json.parse(json, symbolize_names: true)
rescue JSON::ParserError rescue JSON::ParserError
raise DashboardProcessingError.new(_('Grafana response contains invalid json')) raise DashboardProcessingError, _('Grafana response contains invalid json')
end end
end end
......
...@@ -39,7 +39,7 @@ module Metrics ...@@ -39,7 +39,7 @@ module Metrics
end end
def invalid_embed_json!(message) def invalid_embed_json!(message)
raise DashboardProcessingError.new(_("Parsing error for param :embed_json. %{message}") % { message: message }) raise DashboardProcessingError, _("Parsing error for param :embed_json. %{message}") % { message: message }
end end
end end
end end
......
...@@ -9,7 +9,7 @@ module Namespaces ...@@ -9,7 +9,7 @@ module Namespaces
root_storage_statistics.recalculate! root_storage_statistics.recalculate!
rescue ActiveRecord::ActiveRecordError => e rescue ActiveRecord::ActiveRecordError => e
raise RefresherError.new(e.message) raise RefresherError, e.message
end end
private private
......
...@@ -20,7 +20,7 @@ module Packages ...@@ -20,7 +20,7 @@ module Packages
files: files files: files
} }
rescue ActiveModel::ValidationError => e rescue ActiveModel::ValidationError => e
raise ExtractionError.new(e.message) raise ExtractionError, e.message
end end
private private
...@@ -41,10 +41,10 @@ module Packages ...@@ -41,10 +41,10 @@ module Packages
def files def files
strong_memoize(:files) do strong_memoize(:files) do
raise ExtractionError.new("is not a changes file") unless file_type == :changes raise ExtractionError, "is not a changes file" unless file_type == :changes
raise ExtractionError.new("Files field is missing") if fields['Files'].blank? raise ExtractionError, "Files field is missing" if fields['Files'].blank?
raise ExtractionError.new("Checksums-Sha1 field is missing") if fields['Checksums-Sha1'].blank? raise ExtractionError, "Checksums-Sha1 field is missing" if fields['Checksums-Sha1'].blank?
raise ExtractionError.new("Checksums-Sha256 field is missing") if fields['Checksums-Sha256'].blank? raise ExtractionError, "Checksums-Sha256 field is missing" if fields['Checksums-Sha256'].blank?
init_entries_from_files init_entries_from_files
entries_from_checksums_sha1 entries_from_checksums_sha1
...@@ -73,8 +73,8 @@ module Packages ...@@ -73,8 +73,8 @@ module Packages
each_lines_for('Checksums-Sha1') do |line| each_lines_for('Checksums-Sha1') do |line|
sha1sum, size, filename = line.split sha1sum, size, filename = line.split
entry = @entries[filename] entry = @entries[filename]
raise ExtractionError.new("#{filename} is listed in Checksums-Sha1 but not in Files") unless entry raise ExtractionError, "#{filename} is listed in Checksums-Sha1 but not in Files" unless entry
raise ExtractionError.new("Size for #{filename} in Files and Checksums-Sha1 differ") unless entry.size == size.to_i raise ExtractionError, "Size for #{filename} in Files and Checksums-Sha1 differ" unless entry.size == size.to_i
entry.sha1sum = sha1sum entry.sha1sum = sha1sum
end end
...@@ -84,8 +84,8 @@ module Packages ...@@ -84,8 +84,8 @@ module Packages
each_lines_for('Checksums-Sha256') do |line| each_lines_for('Checksums-Sha256') do |line|
sha256sum, size, filename = line.split sha256sum, size, filename = line.split
entry = @entries[filename] entry = @entries[filename]
raise ExtractionError.new("#{filename} is listed in Checksums-Sha256 but not in Files") unless entry raise ExtractionError, "#{filename} is listed in Checksums-Sha256 but not in Files" unless entry
raise ExtractionError.new("Size for #{filename} in Files and Checksums-Sha256 differ") unless entry.size == size.to_i raise ExtractionError, "Size for #{filename} in Files and Checksums-Sha256 differ" unless entry.size == size.to_i
entry.sha256sum = sha256sum entry.sha256sum = sha256sum
end end
...@@ -104,7 +104,7 @@ module Packages ...@@ -104,7 +104,7 @@ module Packages
entry.package_file = ::Packages::PackageFileFinder.new(@package_file.package, filename).execute! entry.package_file = ::Packages::PackageFileFinder.new(@package_file.package, filename).execute!
entry.validate! entry.validate!
rescue ActiveRecord::RecordNotFound rescue ActiveRecord::RecordNotFound
raise ExtractionError.new("#{filename} is listed in Files but was not uploaded") raise ExtractionError, "#{filename} is listed in Files but was not uploaded"
end end
end end
end end
......
...@@ -12,7 +12,7 @@ module Packages ...@@ -12,7 +12,7 @@ module Packages
end end
def execute def execute
raise ExtractionError.new('invalid package file') unless valid_package_file? raise ExtractionError, 'invalid package file' unless valid_package_file?
extract_metadata extract_metadata
end end
......
...@@ -26,7 +26,7 @@ module Packages ...@@ -26,7 +26,7 @@ module Packages
end end
def execute def execute
raise ExtractionError.new('invalid package file') unless valid_package_file? raise ExtractionError, 'invalid package file' unless valid_package_file?
extract_metadata(nuspec_file) extract_metadata(nuspec_file)
end end
...@@ -94,8 +94,8 @@ module Packages ...@@ -94,8 +94,8 @@ module Packages
Zip::File.open(file_path) do |zip_file| Zip::File.open(file_path) do |zip_file|
entry = zip_file.glob('*.nuspec').first entry = zip_file.glob('*.nuspec').first
raise ExtractionError.new('nuspec file not found') unless entry raise ExtractionError, 'nuspec file not found' unless entry
raise ExtractionError.new('nuspec file too big') if entry.size > MAX_FILE_SIZE raise ExtractionError, 'nuspec file too big' if entry.size > MAX_FILE_SIZE
entry.get_input_stream.read entry.get_input_stream.read
end end
......
...@@ -16,7 +16,7 @@ module Packages ...@@ -16,7 +16,7 @@ module Packages
end end
def execute def execute
raise InvalidMetadataError.new('package name and/or package version not found in metadata') unless valid_metadata? raise InvalidMetadataError, 'package name and/or package version not found in metadata' unless valid_metadata?
try_obtain_lease do try_obtain_lease do
@package_file.transaction do @package_file.transaction do
...@@ -33,7 +33,7 @@ module Packages ...@@ -33,7 +33,7 @@ module Packages
end end
end end
rescue ActiveRecord::RecordInvalid => e rescue ActiveRecord::RecordInvalid => e
raise InvalidMetadataError.new(e.message) raise InvalidMetadataError, e.message
end end
private private
......
...@@ -13,7 +13,7 @@ module Packages ...@@ -13,7 +13,7 @@ module Packages
) )
unless meta.valid? unless meta.valid?
raise ActiveRecord::RecordInvalid.new(meta) raise ActiveRecord::RecordInvalid, meta
end end
Packages::Pypi::Metadatum.upsert(meta.attributes) Packages::Pypi::Metadatum.upsert(meta.attributes)
......
...@@ -107,7 +107,7 @@ module Packages ...@@ -107,7 +107,7 @@ module Packages
Gem::Package.new(File.open(file_path)) Gem::Package.new(File.open(file_path))
end end
rescue StandardError rescue StandardError
raise ExtractionError.new('Unable to read gem file') raise ExtractionError, 'Unable to read gem file'
end end
# used by ExclusiveLeaseGuard # used by ExclusiveLeaseGuard
......
...@@ -49,10 +49,8 @@ module Projects ...@@ -49,10 +49,8 @@ module Projects
def first_ensure_no_registry_tags_are_present def first_ensure_no_registry_tags_are_present
return unless project.has_container_registry_tags? return unless project.has_container_registry_tags?
raise RenameFailedError.new( raise RenameFailedError, "Project #{full_path_before} cannot be renamed because images are " \
"Project #{full_path_before} cannot be renamed because images are " \
"present in its container registry" "present in its container registry"
)
end end
def expire_caches_before_rename def expire_caches_before_rename
...@@ -144,7 +142,7 @@ module Projects ...@@ -144,7 +142,7 @@ module Projects
Gitlab::AppLogger.error(error) Gitlab::AppLogger.error(error)
raise RenameFailedError.new(error) raise RenameFailedError, error
end end
end end
end end
......
...@@ -174,7 +174,7 @@ module Projects ...@@ -174,7 +174,7 @@ module Projects
end end
def raise_error(message) def raise_error(message)
raise DestroyError.new(message) raise DestroyError, message
end end
def flush_caches(project) def flush_caches(project)
......
...@@ -112,7 +112,7 @@ module Projects ...@@ -112,7 +112,7 @@ module Projects
def notify_error! def notify_error!
notify_error notify_error
raise Gitlab::ImportExport::Error.new(shared.errors.to_sentence) raise Gitlab::ImportExport::Error, shared.errors.to_sentence
end end
def notify_success def notify_success
......
...@@ -47,16 +47,16 @@ module Projects ...@@ -47,16 +47,16 @@ module Projects
@old_namespace = project.namespace @old_namespace = project.namespace
if Project.where(namespace_id: @new_namespace.try(:id)).where('path = ? or name = ?', project.path, project.name).exists? if Project.where(namespace_id: @new_namespace.try(:id)).where('path = ? or name = ?', project.path, project.name).exists?
raise TransferError.new(s_("TransferProject|Project with same name or path in target namespace already exists")) raise TransferError, s_("TransferProject|Project with same name or path in target namespace already exists")
end end
if project.has_container_registry_tags? if project.has_container_registry_tags?
# We currently don't support renaming repository if it contains tags in container registry # We currently don't support renaming repository if it contains tags in container registry
raise TransferError.new(s_('TransferProject|Project cannot be transferred, because tags are present in its container registry')) raise TransferError, s_('TransferProject|Project cannot be transferred, because tags are present in its container registry')
end end
if project.has_packages?(:npm) && !new_namespace_has_same_root?(project) if project.has_packages?(:npm) && !new_namespace_has_same_root?(project)
raise TransferError.new(s_("TransferProject|Root namespace can't be updated if project has NPM packages")) raise TransferError, s_("TransferProject|Root namespace can't be updated if project has NPM packages")
end end
proceed_to_transfer proceed_to_transfer
...@@ -170,7 +170,7 @@ module Projects ...@@ -170,7 +170,7 @@ module Projects
# Move main repository # Move main repository
unless move_repo_folder(@old_path, @new_path) unless move_repo_folder(@old_path, @new_path)
raise TransferError.new(s_("TransferProject|Cannot move project")) raise TransferError, s_("TransferProject|Cannot move project")
end end
# Disk path is changed; we need to ensure we reload it # Disk path is changed; we need to ensure we reload it
......
...@@ -49,11 +49,11 @@ module Projects ...@@ -49,11 +49,11 @@ module Projects
def validate! def validate!
unless valid_visibility_level_change?(project, params[:visibility_level]) unless valid_visibility_level_change?(project, params[:visibility_level])
raise ValidationError.new(s_('UpdateProject|New visibility level not allowed!')) raise ValidationError, s_('UpdateProject|New visibility level not allowed!')
end end
if renaming_project_with_container_registry_tags? if renaming_project_with_container_registry_tags?
raise ValidationError.new(s_('UpdateProject|Cannot rename project because it contains container registry tags!')) raise ValidationError, s_('UpdateProject|Cannot rename project because it contains container registry tags!')
end end
validate_default_branch_change validate_default_branch_change
...@@ -67,7 +67,7 @@ module Projects ...@@ -67,7 +67,7 @@ module Projects
if project.change_head(params[:default_branch]) if project.change_head(params[:default_branch])
after_default_branch_change(previous_default_branch) after_default_branch_change(previous_default_branch)
else else
raise ValidationError.new(s_("UpdateProject|Could not set the default branch")) raise ValidationError, s_("UpdateProject|Could not set the default branch")
end end
end end
......
...@@ -67,7 +67,7 @@ module StaticSiteEditor ...@@ -67,7 +67,7 @@ module StaticSiteEditor
def check_for_duplicate_keys!(generated_data, file_data) def check_for_duplicate_keys!(generated_data, file_data)
duplicate_keys = generated_data.keys & file_data.keys duplicate_keys = generated_data.keys & file_data.keys
raise ValidationError.new("Duplicate key(s) '#{duplicate_keys}' found.") if duplicate_keys.present? raise ValidationError, "Duplicate key(s) '#{duplicate_keys}' found." if duplicate_keys.present?
end end
def merged_data(generated_data, file_data) def merged_data(generated_data, file_data)
......
...@@ -22,7 +22,7 @@ class SubmitUsagePingService ...@@ -22,7 +22,7 @@ class SubmitUsagePingService
usage_data = Gitlab::UsageData.data(force_refresh: true) usage_data = Gitlab::UsageData.data(force_refresh: true)
raise SubmissionError.new('Usage data is blank') if usage_data.blank? raise SubmissionError, 'Usage data is blank' if usage_data.blank?
raw_usage_data = save_raw_usage_data(usage_data) raw_usage_data = save_raw_usage_data(usage_data)
...@@ -33,12 +33,12 @@ class SubmitUsagePingService ...@@ -33,12 +33,12 @@ class SubmitUsagePingService
headers: { 'Content-type' => 'application/json' } headers: { 'Content-type' => 'application/json' }
) )
raise SubmissionError.new("Unsuccessful response code: #{response.code}") unless response.success? raise SubmissionError, "Unsuccessful response code: #{response.code}" unless response.success?
version_usage_data_id = response.dig('conv_index', 'usage_data_id') || response.dig('dev_ops_score', 'usage_data_id') version_usage_data_id = response.dig('conv_index', 'usage_data_id') || response.dig('dev_ops_score', 'usage_data_id')
unless version_usage_data_id.is_a?(Integer) && version_usage_data_id > 0 unless version_usage_data_id.is_a?(Integer) && version_usage_data_id > 0
raise SubmissionError.new("Invalid usage_data_id in response: #{version_usage_data_id}") raise SubmissionError, "Invalid usage_data_id in response: #{version_usage_data_id}"
end end
raw_usage_data.update_version_metadata!(usage_data_id: version_usage_data_id) raw_usage_data.update_version_metadata!(usage_data_id: version_usage_data_id)
......
...@@ -94,7 +94,7 @@ module Terraform ...@@ -94,7 +94,7 @@ module Terraform
end end
def find_state!(find_params) def find_state!(find_params)
find_state(find_params) || raise(ActiveRecord::RecordNotFound.new("Couldn't find state")) find_state(find_params) || raise(ActiveRecord::RecordNotFound, "Couldn't find state")
end end
end end
end end
...@@ -9,7 +9,7 @@ module Todos ...@@ -9,7 +9,7 @@ module Todos
def initialize(user_id, entity_id, entity_type) def initialize(user_id, entity_id, entity_type)
unless %w(Group Project).include?(entity_type) unless %w(Group Project).include?(entity_type)
raise ArgumentError.new("#{entity_type} is not an entity user can leave") raise ArgumentError, "#{entity_type} is not an entity user can leave"
end end
@user = UserFinder.new(user_id).find_by_id @user = UserFinder.new(user_id).find_by_id
......
...@@ -8,7 +8,7 @@ module Users ...@@ -8,7 +8,7 @@ module Users
def initialize(target_user:) def initialize(target_user:)
@target_user = target_user @target_user = target_user
raise ArgumentError.new("Please provide a target user") unless target_user.is_a?(User) raise ArgumentError, "Please provide a target user" unless target_user.is_a?(User)
end end
def execute def execute
......
...@@ -7,7 +7,7 @@ module Users ...@@ -7,7 +7,7 @@ module Users
INCLUDED_DOMAINS_PATTERN = [/gmail.com/].freeze INCLUDED_DOMAINS_PATTERN = [/gmail.com/].freeze
def initialize(user:) def initialize(user:)
raise ArgumentError.new("Please provide a user") unless user.is_a?(User) raise ArgumentError, "Please provide a user" unless user.is_a?(User)
@user = user @user = user
end end
......
...@@ -34,7 +34,7 @@ module Users ...@@ -34,7 +34,7 @@ module Users
def execute!(*args, &block) def execute!(*args, &block)
result = execute(*args, &block) result = execute(*args, &block)
raise ActiveRecord::RecordInvalid.new(@user) unless result[:status] == :success raise ActiveRecord::RecordInvalid, @user unless result[:status] == :success
true true
end end
......
...@@ -451,7 +451,7 @@ module ObjectStorage ...@@ -451,7 +451,7 @@ module ObjectStorage
def with_exclusive_lease def with_exclusive_lease
lease_key = exclusive_lease_key lease_key = exclusive_lease_key
uuid = Gitlab::ExclusiveLease.new(lease_key, timeout: 1.hour.to_i).try_obtain uuid = Gitlab::ExclusiveLease.new(lease_key, timeout: 1.hour.to_i).try_obtain
raise ExclusiveLeaseTaken.new(lease_key) unless uuid raise ExclusiveLeaseTaken, lease_key unless uuid
yield uuid yield uuid
ensure ensure
......
...@@ -10,7 +10,7 @@ class CronValidator < ActiveModel::EachValidator ...@@ -10,7 +10,7 @@ class CronValidator < ActiveModel::EachValidator
cron_parser = Gitlab::Ci::CronParser.new(record.public_send(attribute), record.cron_timezone) # rubocop:disable GitlabSecurity/PublicSend cron_parser = Gitlab::Ci::CronParser.new(record.public_send(attribute), record.cron_timezone) # rubocop:disable GitlabSecurity/PublicSend
record.errors.add(attribute, " is invalid syntax") unless cron_parser.cron_valid? record.errors.add(attribute, " is invalid syntax") unless cron_parser.cron_valid?
else else
raise NonWhitelistedAttributeError.new "Non-whitelisted attribute" raise NonWhitelistedAttributeError, "Non-whitelisted attribute"
end end
end end
end end
...@@ -97,10 +97,10 @@ module GitGarbageCollectMethods ...@@ -97,10 +97,10 @@ module GitGarbageCollectMethods
end end
rescue GRPC::NotFound => e rescue GRPC::NotFound => e
Gitlab::GitLogger.error("#{__method__} failed:\nRepository not found") Gitlab::GitLogger.error("#{__method__} failed:\nRepository not found")
raise Gitlab::Git::Repository::NoRepository.new(e) raise Gitlab::Git::Repository::NoRepository, e
rescue GRPC::BadStatus => e rescue GRPC::BadStatus => e
Gitlab::GitLogger.error("#{__method__} failed:\n#{e}") Gitlab::GitLogger.error("#{__method__} failed:\n#{e}")
raise Gitlab::Git::CommandError.new(e) raise Gitlab::Git::CommandError, e
end end
def get_gitaly_client(task, repository) def get_gitaly_client(task, repository)
......
...@@ -50,7 +50,7 @@ module ObjectStorage ...@@ -50,7 +50,7 @@ module ObjectStorage
Gitlab::AppLogger.info header(success, failures) Gitlab::AppLogger.info header(success, failures)
Gitlab::AppLogger.warn failures(failures) Gitlab::AppLogger.warn failures(failures)
raise MigrationFailures.new(failures.map(&:error)) if failures.any? raise MigrationFailures, failures.map(&:error) if failures.any?
end end
def header(success, failures) def header(success, failures)
......
...@@ -33,10 +33,10 @@ module Packages ...@@ -33,10 +33,10 @@ module Packages
if result.success? if result.success?
log_extra_metadata_on_done(:message, result.message) log_extra_metadata_on_done(:message, result.message)
else else
raise SyncError.new(result.message) raise SyncError, result.message
end end
raise SyncError.new(result.message) unless result.success? raise SyncError, result.message unless result.success?
end end
private private
......
...@@ -11,10 +11,10 @@ module Users ...@@ -11,10 +11,10 @@ module Users
def perform(target_user_ids) def perform(target_user_ids)
target_user_ids = Array.wrap(target_user_ids) target_user_ids = Array.wrap(target_user_ids)
raise ArgumentError.new('No target user ID provided') if target_user_ids.empty? raise ArgumentError, 'No target user ID provided' if target_user_ids.empty?
target_users = User.id_in(target_user_ids) target_users = User.id_in(target_user_ids)
raise ArgumentError.new('No valid target user ID provided') if target_users.empty? raise ArgumentError, 'No valid target user ID provided' if target_users.empty?
target_users.each do |user| target_users.each do |user|
Users::UpdateAssignedOpenIssueCountService.new(target_user: user).execute Users::UpdateAssignedOpenIssueCountService.new(target_user: user).execute
......
---
title: Resolves offenses Style/RaiseArgs
merge_request: 58009
author: Shubham Kumar @imskr
type: fixed
...@@ -914,7 +914,7 @@ class BackportEnterpriseSchema < ActiveRecord::Migration[5.0] ...@@ -914,7 +914,7 @@ class BackportEnterpriseSchema < ActiveRecord::Migration[5.0]
MSG MSG
end end
raise StandardError.new(message) raise StandardError, message
end end
def create_missing_tables def create_missing_tables
......
...@@ -17,7 +17,7 @@ module EE ...@@ -17,7 +17,7 @@ module EE
class_methods do class_methods do
def priority(algorithm_type) def priority(algorithm_type)
raise ArgumentError.new("No priority for #{algorithm_type.inspect}") unless PRIORITIES.key?(algorithm_type) raise ArgumentError, "No priority for #{algorithm_type.inspect}" unless PRIORITIES.key?(algorithm_type)
PRIORITIES[algorithm_type] PRIORITIES[algorithm_type]
end end
......
...@@ -23,13 +23,13 @@ module Geo ...@@ -23,13 +23,13 @@ module Geo
unless ::Gitlab::Geo.geo_database_configured? unless ::Gitlab::Geo.geo_database_configured?
message = NOT_CONFIGURED_MSG message = NOT_CONFIGURED_MSG
message = "#{message}\nIn the GDK root, try running `make geo-setup`" if Rails.env.development? message = "#{message}\nIn the GDK root, try running `make geo-setup`" if Rails.env.development?
raise SecondaryNotConfigured.new(message) raise SecondaryNotConfigured, message
end end
# Don't call super because LoadBalancing::ActiveRecordProxy will intercept it # Don't call super because LoadBalancing::ActiveRecordProxy will intercept it
retrieve_connection retrieve_connection
rescue ActiveRecord::NoDatabaseError rescue ActiveRecord::NoDatabaseError
raise SecondaryNotConfigured.new(NOT_CONFIGURED_MSG) raise SecondaryNotConfigured, NOT_CONFIGURED_MSG
end end
end end
end end
...@@ -58,11 +58,11 @@ module Vulnerabilities ...@@ -58,11 +58,11 @@ module Vulnerabilities
def self.validate_enums(feedback_params) def self.validate_enums(feedback_params)
unless feedback_types.include?(feedback_params[:feedback_type]) unless feedback_types.include?(feedback_params[:feedback_type])
raise ArgumentError.new("'#{feedback_params[:feedback_type]}' is not a valid feedback_type") raise ArgumentError, "'#{feedback_params[:feedback_type]}' is not a valid feedback_type"
end end
unless categories.include?(feedback_params[:category]) unless categories.include?(feedback_params[:category])
raise ArgumentError.new("'#{feedback_params[:category]}' is not a valid category") raise ArgumentError, "'#{feedback_params[:category]}' is not a valid category"
end end
end end
......
...@@ -32,7 +32,7 @@ module Ci ...@@ -32,7 +32,7 @@ module Ci
reset_ci_minutes!(namespaces) reset_ci_minutes!(namespaces)
end end
raise BatchNotResetError.new(@failed_batches) if @failed_batches.any? raise BatchNotResetError, @failed_batches if @failed_batches.any?
end end
private private
......
...@@ -6,7 +6,7 @@ module DastSiteValidations ...@@ -6,7 +6,7 @@ module DastSiteValidations
TokenNotFound = Class.new(StandardError) TokenNotFound = Class.new(StandardError)
def execute! def execute!
raise PermissionsError.new('Insufficient permissions') unless allowed? raise PermissionsError, 'Insufficient permissions' unless allowed?
return if dast_site_validation.passed? return if dast_site_validation.passed?
...@@ -49,7 +49,7 @@ module DastSiteValidations ...@@ -49,7 +49,7 @@ module DastSiteValidations
end end
def validate!(response) def validate!(response)
raise TokenNotFound.new('Could not find token') unless token_found?(response) raise TokenNotFound, 'Could not find token' unless token_found?(response)
dast_site_validation.pass dast_site_validation.pass
end end
......
...@@ -5,7 +5,7 @@ module DastSites ...@@ -5,7 +5,7 @@ module DastSites
PermissionsError = Class.new(StandardError) PermissionsError = Class.new(StandardError)
def execute!(url:) def execute!(url:)
raise PermissionsError.new('Insufficient permissions') unless allowed? raise PermissionsError, 'Insufficient permissions' unless allowed?
find_or_create_by!(url) find_or_create_by!(url)
end end
......
...@@ -10,7 +10,7 @@ module Licenses ...@@ -10,7 +10,7 @@ module Licenses
def execute def execute
raise ActiveRecord::RecordNotFound unless license raise ActiveRecord::RecordNotFound unless license
raise Gitlab::Access::AccessDeniedError unless can?(user, :destroy_licenses) raise Gitlab::Access::AccessDeniedError unless can?(user, :destroy_licenses)
raise DestroyCloudLicenseError.new(_('Cloud licenses can not be removed.')) if license.cloud_license? raise DestroyCloudLicenseError, _('Cloud licenses can not be removed.') if license.cloud_license?
license.destroy license.destroy
end end
......
...@@ -67,7 +67,7 @@ class Gitlab::Seeder::ComplianceDashboardMergeRequests ...@@ -67,7 +67,7 @@ class Gitlab::Seeder::ComplianceDashboardMergeRequests
merge_request merge_request
end end
rescue ::Gitlab::Access::AccessDeniedError rescue ::Gitlab::Access::AccessDeniedError
raise ::Gitlab::Access::AccessDeniedError.new("If you are re-creating your GitLab database, you should also delete your old repositories located at $GDK/repositories/@hashed") raise ::Gitlab::Access::AccessDeniedError, "If you are re-creating your GitLab database, you should also delete your old repositories located at $GDK/repositories/@hashed"
end end
def create_pipeline!(project, ref, commit, status) def create_pipeline!(project, ref, commit, status)
......
...@@ -16,7 +16,7 @@ module EE ...@@ -16,7 +16,7 @@ module EE
response = faraday.put(url, payload, headers) response = faraday.put(url, payload, headers)
raise Error.new("Push Blob error: #{response.body}") unless response.success? raise Error, "Push Blob error: #{response.body}" unless response.success?
true true
end end
...@@ -24,7 +24,7 @@ module EE ...@@ -24,7 +24,7 @@ module EE
def push_manifest(name, tag, manifest, manifest_type) def push_manifest(name, tag, manifest, manifest_type)
response = faraday.put("v2/#{name}/manifests/#{tag}", manifest, { 'Content-Type' => manifest_type }) response = faraday.put("v2/#{name}/manifests/#{tag}", manifest, { 'Content-Type' => manifest_type })
raise Error.new("Push manifest error: #{response.body}") unless response.success? raise Error, "Push manifest error: #{response.body}" unless response.success?
true true
end end
...@@ -60,7 +60,7 @@ module EE ...@@ -60,7 +60,7 @@ module EE
file.write(chunk) file.write(chunk)
end end
raise Error.new("Could not download the blob: #{digest}") unless response.status.success? raise Error, "Could not download the blob: #{digest}" unless response.status.success?
file file
ensure ensure
...@@ -76,7 +76,7 @@ module EE ...@@ -76,7 +76,7 @@ module EE
def get_upload_url(name, digest) def get_upload_url(name, digest)
response = faraday.post("/v2/#{name}/blobs/uploads/") response = faraday.post("/v2/#{name}/blobs/uploads/")
raise Error.new("Get upload URL error: #{response.body}") unless response.success? raise Error, "Get upload URL error: #{response.body}" unless response.success?
upload_url = URI(response.headers['location']) upload_url = URI(response.headers['location'])
upload_url.query = "#{upload_url.query}&#{URI.encode_www_form(digest: digest)}" upload_url.query = "#{upload_url.query}&#{URI.encode_www_form(digest: digest)}"
......
...@@ -14,7 +14,7 @@ module EE ...@@ -14,7 +14,7 @@ module EE
logger.log_timed(LOG_MESSAGE) do logger.log_timed(LOG_MESSAGE) do
unless branch_name_allowed_by_push_rule? unless branch_name_allowed_by_push_rule?
message = ERROR_MESSAGE % { branch_name_regex: push_rule.branch_name_regex } message = ERROR_MESSAGE % { branch_name_regex: push_rule.branch_name_regex }
raise ::Gitlab::GitAccess::ForbiddenError.new(message) raise ::Gitlab::GitAccess::ForbiddenError, message
end end
end end
......
...@@ -14,11 +14,11 @@ module Gitlab ...@@ -14,11 +14,11 @@ module Gitlab
end end
def title def title
raise NotImplementedError.new("Expected #{self.name} to implement title") raise NotImplementedError, "Expected #{self.name} to implement title"
end end
def value def value
raise NotImplementedError.new("Expected #{self.name} to implement value") raise NotImplementedError, "Expected #{self.name} to implement value"
end end
end end
end end
......
...@@ -21,11 +21,11 @@ module Gitlab ...@@ -21,11 +21,11 @@ module Gitlab
end end
def start_event_identifier def start_event_identifier
raise NotImplementedError.new("Expected #{self.name} to implement start_event_identifier") raise NotImplementedError, "Expected #{self.name} to implement start_event_identifier"
end end
def end_event_identifier def end_event_identifier
raise NotImplementedError.new("Expected #{self.name} to implement end_event_identifier") raise NotImplementedError, "Expected #{self.name} to implement end_event_identifier"
end end
private private
......
...@@ -58,7 +58,7 @@ module Gitlab ...@@ -58,7 +58,7 @@ module Gitlab
when Array when Array
serialize_array(anything) serialize_array(anything)
else else
raise InvalidError.new("Don't know how to serialize #{anything.class}") raise InvalidError, "Don't know how to serialize #{anything.class}"
end end
end end
...@@ -85,7 +85,7 @@ module Gitlab ...@@ -85,7 +85,7 @@ module Gitlab
private private
def test_array!(array) def test_array!(array)
raise InvalidError.new("Bad array representation: #{array.inspect}") unless raise InvalidError, "Bad array representation: #{array.inspect}" unless
(3..4).cover?(array.size) (3..4).cover?(array.size)
end end
end end
......
...@@ -59,7 +59,7 @@ module Gitlab ...@@ -59,7 +59,7 @@ module Gitlab
rescue OpenSSL::Cipher::CipherError rescue OpenSSL::Cipher::CipherError
message = 'Error decrypting the Geo secret from the database. Check that the primary and secondary have the same db_key_base.' message = 'Error decrypting the Geo secret from the database. Check that the primary and secondary have the same db_key_base.'
log_error(message) log_error(message)
raise InvalidDecryptionKeyError.new(message) raise InvalidDecryptionKeyError, message
end end
return unless data.present? return unless data.present?
...@@ -81,7 +81,7 @@ module Gitlab ...@@ -81,7 +81,7 @@ module Gitlab
rescue JWT::ImmatureSignature, JWT::ExpiredSignature rescue JWT::ImmatureSignature, JWT::ExpiredSignature
message = "Signature not within leeway of #{IAT_LEEWAY} seconds. Check your system clocks!" message = "Signature not within leeway of #{IAT_LEEWAY} seconds. Check your system clocks!"
log_error(message) log_error(message)
raise InvalidSignatureTimeError.new(message) raise InvalidSignatureTimeError, message
rescue JWT::DecodeError => e rescue JWT::DecodeError => e
log_error("Error decoding Geo request: #{e}") log_error("Error decoding Geo request: #{e}")
nil nil
......
...@@ -19,7 +19,7 @@ module Gitlab ...@@ -19,7 +19,7 @@ module Gitlab
error = validate_facet(aggregate_facet) error = validate_facet(aggregate_facet)
if error if error
raise ArgumentError.new("#{error}. Please specify either #{COUNT} or #{WEIGHT_SUM}") raise ArgumentError, "#{error}. Please specify either #{COUNT} or #{WEIGHT_SUM}"
end end
@facet = aggregate_facet.to_sym @facet = aggregate_facet.to_sym
......
...@@ -26,14 +26,14 @@ module Gitlab ...@@ -26,14 +26,14 @@ module Gitlab
# limit (~200), then postgres uses a slow query plan and first does # limit (~200), then postgres uses a slow query plan and first does
# left join of epic_issues with issues which times out # left join of epic_issues with issues which times out
epic_ids = ::Epic.ids_for_base_and_decendants(target_epic_ids) epic_ids = ::Epic.ids_for_base_and_decendants(target_epic_ids)
raise ArgumentError.new("There are too many epics to load. Please select fewer epics or contact your administrator.") if epic_ids.count >= MAXIMUM_LOADABLE raise ArgumentError, "There are too many epics to load. Please select fewer epics or contact your administrator." if epic_ids.count >= MAXIMUM_LOADABLE
# We do a left outer join in order to capture epics with no issues # We do a left outer join in order to capture epics with no issues
# This is so we can aggregate the epic counts for every epic # This is so we can aggregate the epic counts for every epic
raw_results = [] raw_results = []
epic_ids.in_groups_of(EPIC_BATCH_SIZE).each do |epic_batch_ids| epic_ids.in_groups_of(EPIC_BATCH_SIZE).each do |epic_batch_ids|
raw_results += ::Epic.issue_metadata_for_epics(epic_ids: epic_ids, limit: MAXIMUM_LOADABLE) raw_results += ::Epic.issue_metadata_for_epics(epic_ids: epic_ids, limit: MAXIMUM_LOADABLE)
raise ArgumentError.new("There are too many records to load. Please select fewer epics or contact your administrator.") if raw_results.count >= MAXIMUM_LOADABLE raise ArgumentError, "There are too many records to load. Please select fewer epics or contact your administrator." if raw_results.count >= MAXIMUM_LOADABLE
end end
@results = raw_results.group_by { |record| record[:id] } @results = raw_results.group_by { |record| record[:id] }
......
...@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do ...@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
def raise_and_wrap(wrapper, original) def raise_and_wrap(wrapper, original)
raise original raise original
rescue original.class rescue original.class
raise wrapper.new('boom') raise wrapper, 'boom'
end end
def wrapped_exception(wrapper, original) def wrapped_exception(wrapper, original)
......
...@@ -21,7 +21,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do ...@@ -21,7 +21,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
def raise_and_wrap(wrapper, original) def raise_and_wrap(wrapper, original)
raise original raise original
rescue original.class rescue original.class
raise wrapper.new('boop') raise wrapper, 'boop'
end end
def wrapped_exception(wrapper, original) def wrapped_exception(wrapper, original)
...@@ -94,7 +94,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do ...@@ -94,7 +94,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
returned = lb.read do returned = lb.read do
unless raised unless raised
raised = true raised = true
raise conflict_error.new raise conflict_error
end end
10 10
...@@ -107,7 +107,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do ...@@ -107,7 +107,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
expect(lb).to receive(:release_host).exactly(6).times expect(lb).to receive(:release_host).exactly(6).times
expect(lb).to receive(:read_write) expect(lb).to receive(:read_write)
lb.read { raise conflict_error.new } lb.read { raise conflict_error }
end end
it 'uses the primary if no secondaries are available' do it 'uses the primary if no secondaries are available' do
......
...@@ -59,7 +59,7 @@ RSpec.describe AutoMerge::MergeTrainService do ...@@ -59,7 +59,7 @@ RSpec.describe AutoMerge::MergeTrainService do
context 'when failed to save the record' do context 'when failed to save the record' do
before do before do
allow(merge_request).to receive(:save!) { raise PG::QueryCanceled.new } allow(merge_request).to receive(:save!) { raise PG::QueryCanceled }
end end
it 'returns result code' do it 'returns result code' do
...@@ -69,7 +69,7 @@ RSpec.describe AutoMerge::MergeTrainService do ...@@ -69,7 +69,7 @@ RSpec.describe AutoMerge::MergeTrainService do
context 'when statement timeout happened on system note creation' do context 'when statement timeout happened on system note creation' do
before do before do
allow(SystemNoteService).to receive(:merge_train) { raise PG::QueryCanceled.new } allow(SystemNoteService).to receive(:merge_train) { raise PG::QueryCanceled }
end end
it 'returns failed status' do it 'returns failed status' do
...@@ -218,7 +218,7 @@ RSpec.describe AutoMerge::MergeTrainService do ...@@ -218,7 +218,7 @@ RSpec.describe AutoMerge::MergeTrainService do
context 'when statement timeout happened on system note creation' do context 'when statement timeout happened on system note creation' do
before do before do
allow(SystemNoteService).to receive(:cancel_merge_train) { raise PG::QueryCanceled.new } allow(SystemNoteService).to receive(:cancel_merge_train) { raise PG::QueryCanceled }
end end
it 'returns error' do it 'returns error' do
...@@ -304,7 +304,7 @@ RSpec.describe AutoMerge::MergeTrainService do ...@@ -304,7 +304,7 @@ RSpec.describe AutoMerge::MergeTrainService do
context 'when statement timeout happened on system note creation' do context 'when statement timeout happened on system note creation' do
before do before do
allow(SystemNoteService).to receive(:abort_merge_train) { raise PG::QueryCanceled.new } allow(SystemNoteService).to receive(:abort_merge_train) { raise PG::QueryCanceled }
end end
it 'returns error' do it 'returns error' do
......
...@@ -141,7 +141,7 @@ RSpec.describe MergeTrains::CreatePipelineService do ...@@ -141,7 +141,7 @@ RSpec.describe MergeTrains::CreatePipelineService do
context 'when there is a conflict on merge ref creation' do context 'when there is a conflict on merge ref creation' do
before do before do
allow(project.repository).to receive(:merge_to_ref) do allow(project.repository).to receive(:merge_to_ref) do
raise Gitlab::Git::CommandError.new('Failed to create merge commit') raise Gitlab::Git::CommandError, 'Failed to create merge commit'
end end
end end
......
...@@ -29,7 +29,7 @@ RSpec.describe Geo::RepositorySyncWorker, :geo, :clean_gitlab_redis_cache do ...@@ -29,7 +29,7 @@ RSpec.describe Geo::RepositorySyncWorker, :geo, :clean_gitlab_redis_cache do
create(:geo_project_registry, :synced, :repository_dirty, project: unhealthy_dirty) create(:geo_project_registry, :synced, :repository_dirty, project: unhealthy_dirty)
allow(Gitlab::GitalyClient).to receive(:call) do allow(Gitlab::GitalyClient).to receive(:call) do
raise GRPC::Unavailable.new('No Gitaly available') raise GRPC::Unavailable, 'No Gitaly available'
end end
expect(repository_worker).not_to receive(:perform_async).with('broken') expect(repository_worker).not_to receive(:perform_async).with('broken')
......
...@@ -26,7 +26,7 @@ RSpec.describe Geo::RepositoryVerification::Primary::BatchWorker, :clean_gitlab_ ...@@ -26,7 +26,7 @@ RSpec.describe Geo::RepositoryVerification::Primary::BatchWorker, :clean_gitlab_
create(:repository_state, :repository_outdated, project: unhealthy_outdated) create(:repository_state, :repository_outdated, project: unhealthy_outdated)
allow(Gitlab::GitalyClient).to receive(:call) do allow(Gitlab::GitalyClient).to receive(:call) do
raise GRPC::Unavailable.new('No Gitaly available') raise GRPC::Unavailable, 'No Gitaly available'
end end
expect(Geo::RepositoryVerification::Primary::ShardWorker).not_to receive(:perform_async).with('broken') expect(Geo::RepositoryVerification::Primary::ShardWorker).not_to receive(:perform_async).with('broken')
......
...@@ -23,7 +23,7 @@ RSpec.describe Geo::RepositoryVerification::Secondary::SchedulerWorker, :clean_g ...@@ -23,7 +23,7 @@ RSpec.describe Geo::RepositoryVerification::Secondary::SchedulerWorker, :clean_g
create(:project, :broken_storage) create(:project, :broken_storage)
allow(Gitlab::GitalyClient).to receive(:call) do allow(Gitlab::GitalyClient).to receive(:call) do
raise GRPC::Unavailable.new('No Gitaly available') raise GRPC::Unavailable, 'No Gitaly available'
end end
expect(Geo::RepositoryVerification::Secondary::ShardWorker).not_to receive(:perform_async).with('broken') expect(Geo::RepositoryVerification::Secondary::ShardWorker).not_to receive(:perform_async).with('broken')
......
...@@ -8,7 +8,7 @@ module API ...@@ -8,7 +8,7 @@ module API
def set_http_headers(header_data) def set_http_headers(header_data)
header_data.each do |key, value| header_data.each do |key, value|
if value.is_a?(Enumerable) if value.is_a?(Enumerable)
raise ArgumentError.new("Header value should be a string") raise ArgumentError, "Header value should be a string"
end end
header "X-Gitlab-#{key.to_s.split('_').collect(&:capitalize).join('-')}", value.to_s header "X-Gitlab-#{key.to_s.split('_').collect(&:capitalize).join('-')}", value.to_s
......
...@@ -158,7 +158,7 @@ module API ...@@ -158,7 +158,7 @@ module API
status 200 status 200
unless actor.key_or_user unless actor.key_or_user
raise ActiveRecord::RecordNotFound.new('User not found!') raise ActiveRecord::RecordNotFound, 'User not found!'
end end
actor.update_last_used_at! actor.update_last_used_at!
......
...@@ -63,7 +63,7 @@ module BulkImports ...@@ -63,7 +63,7 @@ module BulkImports
def with_error_handling def with_error_handling
response = yield response = yield
raise ConnectionError.new("Error #{response.code}") unless response.success? raise ConnectionError, "Error #{response.code}" unless response.success?
response response
rescue *Gitlab::HTTP::HTTP_ERRORS => e rescue *Gitlab::HTTP::HTTP_ERRORS => e
......
...@@ -64,7 +64,7 @@ module DeclarativeEnum ...@@ -64,7 +64,7 @@ module DeclarativeEnum
end end
def define(&block) def define(&block)
raise LocalJumpError.new('No block given') unless block raise LocalJumpError, 'No block given' unless block
@definition = Builder.new(definition, block).build @definition = Builder.new(definition, block).build
end end
......
...@@ -17,7 +17,7 @@ module Flowdock ...@@ -17,7 +17,7 @@ module Flowdock
end end
def initialize(ref, from, to, options = {}) def initialize(ref, from, to, options = {})
raise TokenError.new("Flowdock API token not found") unless options[:token] raise TokenError, "Flowdock API token not found" unless options[:token]
@ref = ref @ref = ref
@from = from @from = from
......
...@@ -160,7 +160,7 @@ module Gitlab ...@@ -160,7 +160,7 @@ module Gitlab
case AccessTokenValidationService.new(access_token, request: request).validate(scopes: scopes) case AccessTokenValidationService.new(access_token, request: request).validate(scopes: scopes)
when AccessTokenValidationService::INSUFFICIENT_SCOPE when AccessTokenValidationService::INSUFFICIENT_SCOPE
raise InsufficientScopeError.new(scopes) raise InsufficientScopeError, scopes
when AccessTokenValidationService::EXPIRED when AccessTokenValidationService::EXPIRED
raise ExpiredError raise ExpiredError
when AccessTokenValidationService::REVOKED when AccessTokenValidationService::REVOKED
......
...@@ -59,7 +59,7 @@ module Gitlab ...@@ -59,7 +59,7 @@ module Gitlab
end end
def self.invalid_provider(provider) def self.invalid_provider(provider)
raise InvalidProvider.new("Unknown provider (#{provider}). Available providers: #{providers}") raise InvalidProvider, "Unknown provider (#{provider}). Available providers: #{providers}"
end end
def self.encrypted_secrets def self.encrypted_secrets
......
...@@ -13,7 +13,7 @@ module Gitlab ...@@ -13,7 +13,7 @@ module Gitlab
repos_to_import = Dir.glob(import_path + '**/*.git') repos_to_import = Dir.glob(import_path + '**/*.git')
unless user = User.admins.order_id_asc.first unless user = User.admins.order_id_asc.first
raise NoAdminError.new('No admin user found to import repositories') raise NoAdminError, 'No admin user found to import repositories'
end end
repos_to_import.each do |repo_path| repos_to_import.each do |repo_path|
......
...@@ -55,7 +55,7 @@ module Gitlab ...@@ -55,7 +55,7 @@ module Gitlab
result = service.execute result = service.execute
raise Error.new(result[:message]) if result[:status] != :success raise Error, result[:message] if result[:status] != :success
end end
end end
......
...@@ -169,7 +169,7 @@ module Gitlab ...@@ -169,7 +169,7 @@ module Gitlab
# We raise a custom error so it's easier to catch different changelog # We raise a custom error so it's easier to catch different changelog
# related errors. In addition, this ensures the caller of this method # related errors. In addition, this ensures the caller of this method
# doesn't depend on a Parslet specific error class. # doesn't depend on a Parslet specific error class.
raise Error.new("Failed to parse the template: #{ex.message}") raise Error, "Failed to parse the template: #{ex.message}"
end end
end end
end end
......
...@@ -31,7 +31,7 @@ module Gitlab ...@@ -31,7 +31,7 @@ module Gitlab
def ensure_test_cases_limited!(total_parsed, limit) def ensure_test_cases_limited!(total_parsed, limit)
return unless limit > 0 && total_parsed > limit return unless limit > 0 && total_parsed > limit
raise JunitParserError.new("number of test cases exceeded the limit of #{limit}") raise JunitParserError, "number of test cases exceeded the limit of #{limit}"
end end
def all_cases(root, parent = nil, &blk) def all_cases(root, parent = nil, &blk)
......
...@@ -141,7 +141,7 @@ module Gitlab ...@@ -141,7 +141,7 @@ module Gitlab
end end
def error!(message) def error!(message)
raise ValidationError.new(message) raise ValidationError, message
end end
end end
end end
......
...@@ -11,11 +11,11 @@ module Gitlab ...@@ -11,11 +11,11 @@ module Gitlab
end end
def title def title
raise NotImplementedError.new("Expected #{self.name} to implement title") raise NotImplementedError, "Expected #{self.name} to implement title"
end end
def value def value
raise NotImplementedError.new("Expected #{self.name} to implement value") raise NotImplementedError, "Expected #{self.name} to implement value"
end end
end end
end end
......
...@@ -94,7 +94,7 @@ module Gitlab ...@@ -94,7 +94,7 @@ module Gitlab
elsif column.is_a?(Arel::Attributes::Attribute) elsif column.is_a?(Arel::Attributes::Attribute)
column column
else else
raise ColumnConfigurationError.new("Cannot transform the column: #{column.inspect}, please provide the column name as string") raise ColumnConfigurationError, "Cannot transform the column: #{column.inspect}, please provide the column name as string"
end end
end end
end end
......
...@@ -65,7 +65,7 @@ module Gitlab ...@@ -65,7 +65,7 @@ module Gitlab
contents = deserialize(read) contents = deserialize(read)
raise InvalidConfigError.new unless contents.is_a?(Hash) raise InvalidConfigError unless contents.is_a?(Hash)
@config = contents.deep_symbolize_keys @config = contents.deep_symbolize_keys
end end
...@@ -115,7 +115,7 @@ module Gitlab ...@@ -115,7 +115,7 @@ module Gitlab
end end
def handle_missing_key! def handle_missing_key!
raise MissingKeyError.new if @key.nil? raise MissingKeyError if @key.nil?
end end
end end
end end
...@@ -150,7 +150,7 @@ module Gitlab ...@@ -150,7 +150,7 @@ module Gitlab
elsif subject.respond_to?(:to_s) elsif subject.respond_to?(:to_s)
subject.to_s subject.to_s
else else
raise ArgumentError.new('Subject must respond to `to_global_id` or `to_s`') raise ArgumentError, 'Subject must respond to `to_global_id` or `to_s`'
end end
end end
end end
......
...@@ -24,7 +24,7 @@ module Gitlab ...@@ -24,7 +24,7 @@ module Gitlab
) )
::Gitlab::ExternalAuthorization::Response.new(response) ::Gitlab::ExternalAuthorization::Response.new(response)
rescue *Gitlab::HTTP::HTTP_ERRORS => e rescue *Gitlab::HTTP::HTTP_ERRORS => e
raise ::Gitlab::ExternalAuthorization::RequestFailed.new(e) raise ::Gitlab::ExternalAuthorization::RequestFailed, e
end end
private private
......
...@@ -20,9 +20,9 @@ module Gitlab ...@@ -20,9 +20,9 @@ module Gitlab
gitaly_conflicts_client(@target_repository).list_conflict_files.to_a gitaly_conflicts_client(@target_repository).list_conflict_files.to_a
end end
rescue GRPC::FailedPrecondition => e rescue GRPC::FailedPrecondition => e
raise Gitlab::Git::Conflict::Resolver::ConflictSideMissing.new(e.message) raise Gitlab::Git::Conflict::Resolver::ConflictSideMissing, e.message
rescue GRPC::BadStatus => e rescue GRPC::BadStatus => e
raise Gitlab::Git::CommandError.new(e) raise Gitlab::Git::CommandError, e
end end
def resolve_conflicts(source_repository, resolution, source_branch:, target_branch:) def resolve_conflicts(source_repository, resolution, source_branch:, target_branch:)
......
...@@ -89,9 +89,9 @@ module Gitlab ...@@ -89,9 +89,9 @@ module Gitlab
def root_ref def root_ref
gitaly_ref_client.default_branch_name gitaly_ref_client.default_branch_name
rescue GRPC::NotFound => e rescue GRPC::NotFound => e
raise NoRepository.new(e.message) raise NoRepository, e.message
rescue GRPC::Unknown => e rescue GRPC::Unknown => e
raise Gitlab::Git::CommandError.new(e.message) raise Gitlab::Git::CommandError, e.message
end end
def exists? def exists?
...@@ -348,7 +348,7 @@ module Gitlab ...@@ -348,7 +348,7 @@ module Gitlab
limit = options[:limit] limit = options[:limit]
if limit == 0 || !limit.is_a?(Integer) if limit == 0 || !limit.is_a?(Integer)
raise ArgumentError.new("invalid Repository#log limit: #{limit.inspect}") raise ArgumentError, "invalid Repository#log limit: #{limit.inspect}"
end end
wrapped_gitaly_errors do wrapped_gitaly_errors do
...@@ -414,7 +414,7 @@ module Gitlab ...@@ -414,7 +414,7 @@ module Gitlab
end end
end end
rescue ArgumentError => e rescue ArgumentError => e
raise Gitlab::Git::Repository::GitError.new(e) raise Gitlab::Git::Repository::GitError, e
end end
# Returns the SHA of the most recent common ancestor of +from+ and +to+ # Returns the SHA of the most recent common ancestor of +from+ and +to+
...@@ -836,7 +836,7 @@ module Gitlab ...@@ -836,7 +836,7 @@ module Gitlab
def fsck def fsck
msg, status = gitaly_repository_client.fsck msg, status = gitaly_repository_client.fsck
raise GitError.new("Could not fsck repository: #{msg}") unless status == 0 raise GitError, "Could not fsck repository: #{msg}" unless status == 0
end end
def create_from_bundle(bundle_path) def create_from_bundle(bundle_path)
......
...@@ -31,7 +31,7 @@ module Gitlab ...@@ -31,7 +31,7 @@ module Gitlab
def rugged def rugged
@rugged ||= ::Rugged::Repository.new(path, alternates: alternate_object_directories) @rugged ||= ::Rugged::Repository.new(path, alternates: alternate_object_directories)
rescue ::Rugged::RepositoryError, ::Rugged::OSError rescue ::Rugged::RepositoryError, ::Rugged::OSError
raise ::Gitlab::Git::Repository::NoRepository.new('no repository for such path') raise ::Gitlab::Git::Repository::NoRepository, 'no repository for such path'
end end
def cleanup def cleanup
......
...@@ -6,13 +6,13 @@ module Gitlab ...@@ -6,13 +6,13 @@ module Gitlab
def wrapped_gitaly_errors(&block) def wrapped_gitaly_errors(&block)
yield block yield block
rescue GRPC::NotFound => e rescue GRPC::NotFound => e
raise Gitlab::Git::Repository::NoRepository.new(e) raise Gitlab::Git::Repository::NoRepository, e
rescue GRPC::InvalidArgument => e rescue GRPC::InvalidArgument => e
raise ArgumentError.new(e) raise ArgumentError, e
rescue GRPC::DeadlineExceeded => e rescue GRPC::DeadlineExceeded => e
raise Gitlab::Git::CommandTimedOut.new(e) raise Gitlab::Git::CommandTimedOut, e
rescue GRPC::BadStatus => e rescue GRPC::BadStatus => e
raise Gitlab::Git::CommandError.new(e) raise Gitlab::Git::CommandError, e
end end
end end
end end
......
...@@ -59,7 +59,7 @@ module Gitlab ...@@ -59,7 +59,7 @@ module Gitlab
:user_create_branch, request, timeout: GitalyClient.long_timeout) :user_create_branch, request, timeout: GitalyClient.long_timeout)
if response.pre_receive_error.present? if response.pre_receive_error.present?
raise Gitlab::Git::PreReceiveError.new(response.pre_receive_error) raise Gitlab::Git::PreReceiveError, response.pre_receive_error
end end
branch = response.branch branch = response.branch
...@@ -159,7 +159,7 @@ module Gitlab ...@@ -159,7 +159,7 @@ module Gitlab
branch_update = second_response.branch_update branch_update = second_response.branch_update
return if branch_update.nil? return if branch_update.nil?
raise Gitlab::Git::CommitError.new('failed to apply merge to branch') unless branch_update.commit_id.present? raise Gitlab::Git::CommitError, 'failed to apply merge to branch' unless branch_update.commit_id.present?
Gitlab::Git::OperationService::BranchUpdate.from_gitaly(branch_update) Gitlab::Git::OperationService::BranchUpdate.from_gitaly(branch_update)
ensure ensure
......
...@@ -292,7 +292,7 @@ module Gitlab ...@@ -292,7 +292,7 @@ module Gitlab
end end
def invalid_ref!(message) def invalid_ref!(message)
raise Gitlab::Git::Repository::InvalidRef.new(message) raise Gitlab::Git::Repository::InvalidRef, message
end end
end end
end end
......
...@@ -319,7 +319,7 @@ module Gitlab ...@@ -319,7 +319,7 @@ module Gitlab
response = GitalyClient.call(@storage, :repository_service, :calculate_checksum, request, timeout: GitalyClient.fast_timeout) response = GitalyClient.call(@storage, :repository_service, :calculate_checksum, request, timeout: GitalyClient.fast_timeout)
response.checksum.presence response.checksum.presence
rescue GRPC::DataLoss => e rescue GRPC::DataLoss => e
raise Gitlab::Git::Repository::InvalidRepository.new(e) raise Gitlab::Git::Repository::InvalidRepository, e
end end
def raw_changes_between(from, to) def raw_changes_between(from, to)
......
...@@ -69,13 +69,13 @@ module Gitlab ...@@ -69,13 +69,13 @@ module Gitlab
# Error messages are based on the responses of proxy.golang.org # Error messages are based on the responses of proxy.golang.org
# Verify that the SHA fragment references a commit # Verify that the SHA fragment references a commit
raise ArgumentError.new 'invalid pseudo-version: unknown commit' unless commit raise ArgumentError, 'invalid pseudo-version: unknown commit' unless commit
# Require the SHA fragment to be 12 characters long # Require the SHA fragment to be 12 characters long
raise ArgumentError.new 'invalid pseudo-version: revision is shorter than canonical' unless version.commit_id.length == 12 raise ArgumentError, 'invalid pseudo-version: revision is shorter than canonical' unless version.commit_id.length == 12
# Require the timestamp to match that of the commit # Require the timestamp to match that of the commit
raise ArgumentError.new 'invalid pseudo-version: does not match version-control timestamp' unless commit.committed_date.strftime('%Y%m%d%H%M%S') == version.timestamp raise ArgumentError, 'invalid pseudo-version: does not match version-control timestamp' unless commit.committed_date.strftime('%Y%m%d%H%M%S') == version.timestamp
commit commit
end end
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment