Commit 9f05e97a authored by Nick Thomas's avatar Nick Thomas

Run rubocop -a

parent 5ed9c5f7
......@@ -286,13 +286,11 @@ class MergeRequestDiff < ActiveRecord::Base
return yield(@external_diff_file) if @external_diff_file
external_diff.open do |file|
begin
@external_diff_file = file
@external_diff_file = file
yield(@external_diff_file)
ensure
@external_diff_file = nil
end
yield(@external_diff_file)
ensure
@external_diff_file = nil
end
end
......
......@@ -23,6 +23,6 @@ class MergeRequestDiffFile < ActiveRecord::Base
super
end
binary? ? content.unpack('m0').first : content
binary? ? content.unpack1('m0') : content
end
end
......@@ -1209,11 +1209,9 @@ class Project < ActiveRecord::Base
def repo_exists?
strong_memoize(:repo_exists) do
begin
repository.exists?
rescue
false
end
repository.exists?
rescue
false
end
end
......
......@@ -205,12 +205,10 @@ class JiraService < IssueTrackerService
# if any transition fails it will log the error message and stop the transition sequence
def transition_issue(issue)
jira_issue_transition_id.scan(Gitlab::Regex.jira_transition_id_regex).each do |transition_id|
begin
issue.transitions.build.save!(transition: { id: transition_id })
rescue => error
log_error("Issue transition failed", error: error.message, client_url: client_url)
return false
end
issue.transitions.build.save!(transition: { id: transition_id })
rescue => error
log_error("Issue transition failed", error: error.message, client_url: client_url)
return false
end
end
......
......@@ -265,16 +265,14 @@ class Repository
# to avoid unnecessary syncing.
def keep_around(*shas)
shas.each do |sha|
begin
next unless sha.present? && commit_by(oid: sha)
next unless sha.present? && commit_by(oid: sha)
next if kept_around?(sha)
next if kept_around?(sha)
# This will still fail if the file is corrupted (e.g. 0 bytes)
raw_repository.write_ref(keep_around_ref_name(sha), sha)
rescue Gitlab::Git::CommandError => ex
Rails.logger.error "Unable to create keep-around reference for repository #{disk_path}: #{ex}"
end
# This will still fail if the file is corrupted (e.g. 0 bytes)
raw_repository.write_ref(keep_around_ref_name(sha), sha)
rescue Gitlab::Git::CommandError => ex
Rails.logger.error "Unable to create keep-around reference for repository #{disk_path}: #{ex}"
end
end
......
......@@ -26,16 +26,14 @@ class UserInteractedProject < ActiveRecord::Base
cached_exists?(attributes) do
transaction(requires_new: true) do
begin
where(attributes).select(1).first || create!(attributes)
true # not caching the whole record here for now
rescue ActiveRecord::RecordNotUnique
# Note, above queries are not atomic and prone
# to race conditions (similar like #find_or_create!).
# In the case where we hit this, the record we want
# already exists - shortcut and return.
true
end
where(attributes).select(1).first || create!(attributes)
true # not caching the whole record here for now
rescue ActiveRecord::RecordNotUnique
# Note, above queries are not atomic and prone
# to race conditions (similar like #find_or_create!).
# In the case where we hit this, the record we want
# already exists - shortcut and return.
true
end
end
end
......
......@@ -9,16 +9,14 @@ class DetailedStatusEntity < Grape::Entity
expose :details_path
expose :illustration do |status|
begin
illustration = {
image: ActionController::Base.helpers.image_path(status.illustration[:image])
}
illustration = status.illustration.merge(illustration)
illustration = {
image: ActionController::Base.helpers.image_path(status.illustration[:image])
}
illustration = status.illustration.merge(illustration)
illustration
rescue NotImplementedError
# ignored
end
illustration
rescue NotImplementedError
# ignored
end
expose :favicon do |status|
......
......@@ -42,17 +42,15 @@ module Projects
def parse_response_links(objects_response)
objects_response.each_with_object([]) do |entry, link_list|
begin
link = entry.dig('actions', DOWNLOAD_ACTION, 'href')
link = entry.dig('actions', DOWNLOAD_ACTION, 'href')
raise DownloadLinkNotFound unless link
raise DownloadLinkNotFound unless link
link_list << LfsDownloadObject.new(oid: entry['oid'],
size: entry['size'],
link: add_credentials(link))
rescue DownloadLinkNotFound, Addressable::URI::InvalidURIError
log_error("Link for Lfs Object with oid #{entry['oid']} not found or invalid.")
end
link_list << LfsDownloadObject.new(oid: entry['oid'],
size: entry['size'],
link: add_credentials(link))
rescue DownloadLinkNotFound, Addressable::URI::InvalidURIError
log_error("Link for Lfs Object with oid #{entry['oid']} not found or invalid.")
end
end
......
......@@ -75,17 +75,15 @@ module Projects
create_tmp_storage_dir
File.open(tmp_filename, 'wb') do |file|
begin
yield file
rescue StandardError => e
# If the lfs file is successfully downloaded it will be removed
# when it is added to the project's lfs files.
# Nevertheless if any excetion raises the file would remain
# in the file system. Here we ensure to remove it
File.unlink(file) if File.exist?(file)
raise e
end
yield file
rescue StandardError => e
# If the lfs file is successfully downloaded it will be removed
# when it is added to the project's lfs files.
# Nevertheless if any excetion raises the file would remain
# in the file system. Here we ensure to remove it
File.unlink(file) if File.exist?(file)
raise e
end
end
......
......@@ -25,11 +25,9 @@ module WaitableWorker
failed = []
args_list.each do |args|
begin
new.perform(*args)
rescue
failed << args
end
new.perform(*args)
rescue
failed << args
end
bulk_perform_async(failed) if failed.present?
......
......@@ -20,11 +20,9 @@ class CreateGpgSignatureWorker
# This calculates and caches the signature in the database
commits.each do |commit|
begin
Gitlab::Gpg::Commit.new(commit).signature
rescue => e
Rails.logger.error("Failed to create signature for commit #{commit.id}. Error: #{e.message}")
end
Gitlab::Gpg::Commit.new(commit).signature
rescue => e
Rails.logger.error("Failed to create signature for commit #{commit.id}. Error: #{e.message}")
end
end
# rubocop: enable CodeReuse/ActiveRecord
......
......@@ -52,24 +52,22 @@ class EmailsOnPushWorker
end
valid_recipients(recipients).each do |recipient|
begin
send_email(
recipient,
project_id,
author_id: author_id,
ref: ref,
action: action,
compare: compare,
reverse_compare: reverse_compare,
diff_refs: diff_refs,
send_from_committer_email: send_from_committer_email,
disable_diffs: disable_diffs
)
# These are input errors and won't be corrected even if Sidekiq retries
rescue Net::SMTPFatalError, Net::SMTPSyntaxError => e
logger.info("Failed to send e-mail for project '#{project.full_name}' to #{recipient}: #{e}")
end
send_email(
recipient,
project_id,
author_id: author_id,
ref: ref,
action: action,
compare: compare,
reverse_compare: reverse_compare,
diff_refs: diff_refs,
send_from_committer_email: send_from_committer_email,
disable_diffs: disable_diffs
)
# These are input errors and won't be corrected even if Sidekiq retries
rescue Net::SMTPFatalError, Net::SMTPSyntaxError => e
logger.info("Failed to send e-mail for project '#{project.full_name}' to #{recipient}: #{e}")
end
ensure
@email = nil
......
......@@ -126,11 +126,9 @@ module ObjectStorage
def process_uploader(uploader)
MigrationResult.new(uploader.upload).tap do |result|
begin
uploader.migrate!(@to_store)
rescue => e
result.error = e
end
uploader.migrate!(@to_store)
rescue => e
result.error = e
end
end
end
......
......@@ -8,16 +8,15 @@ class PipelineScheduleWorker
def perform
Ci::PipelineSchedule.active.where("next_run_at < ?", Time.now)
.preload(:owner, :project).find_each do |schedule|
begin
Ci::CreatePipelineService.new(schedule.project,
schedule.owner,
ref: schedule.ref)
.execute!(:schedule, ignore_skip_ci: true, save_on_errors: true, schedule: schedule)
rescue => e
error(schedule, e)
ensure
schedule.schedule_next_run!
end
Ci::CreatePipelineService.new(schedule.project,
schedule.owner,
ref: schedule.ref)
.execute!(:schedule, ignore_skip_ci: true, save_on_errors: true, schedule: schedule)
rescue => e
error(schedule, e)
ensure
schedule.schedule_next_run!
end
end
# rubocop: enable CodeReuse/ActiveRecord
......
......@@ -6,11 +6,9 @@ class RemoveExpiredMembersWorker
def perform
Member.expired.find_each do |member|
begin
Members::DestroyService.new.execute(member, skip_authorization: true)
rescue => ex
logger.error("Expired Member ID=#{member.id} cannot be removed - #{ex}")
end
Members::DestroyService.new.execute(member, skip_authorization: true)
rescue => ex
logger.error("Expired Member ID=#{member.id} cannot be removed - #{ex}")
end
end
end
......@@ -14,10 +14,8 @@ module Rack
end
gitlab_trusted_proxies = Array(Gitlab.config.gitlab.trusted_proxies).map do |proxy|
begin
IPAddr.new(proxy)
rescue IPAddr::InvalidAddressError
end
IPAddr.new(proxy)
rescue IPAddr::InvalidAddressError
end.compact
Rails.application.config.action_dispatch.trusted_proxies = (
......
......@@ -126,11 +126,10 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration[4.2]
queues.each do |queue|
# Stealing is racy so it's possible a pop might be called on an
# already-empty queue.
begin
remove_orphans(*queue.pop(true))
stolen = true
rescue ThreadError
end
remove_orphans(*queue.pop(true))
stolen = true
rescue ThreadError
end
break unless stolen
......
......@@ -103,17 +103,15 @@ module API
detail 'This feature was introduced in GitLab 11.9'
end
post ':id/milestones/:milestone_id/promote' do
begin
authorize! :admin_milestone, user_project
authorize! :admin_milestone, user_project.group
authorize! :admin_milestone, user_project
authorize! :admin_milestone, user_project.group
milestone = user_project.milestones.find(params[:milestone_id])
Milestones::PromoteService.new(user_project, current_user).execute(milestone)
milestone = user_project.milestones.find(params[:milestone_id])
Milestones::PromoteService.new(user_project, current_user).execute(milestone)
status(200)
rescue Milestones::PromoteService::PromoteMilestoneError => error
render_api_error!(error.message, 400)
end
status(200)
rescue Milestones::PromoteService::PromoteMilestoneError => error
render_api_error!(error.message, 400)
end
end
end
......
......@@ -89,11 +89,9 @@ module API
optional :format, type: String, desc: 'The archive format'
end
get ':id/repository/archive', requirements: { format: Gitlab::PathRegex.archive_formats_regex } do
begin
send_git_archive user_project.repository, ref: params[:sha], format: params[:format], append_sha: true
rescue
not_found!('File')
end
send_git_archive user_project.repository, ref: params[:sha], format: params[:format], append_sha: true
rescue
not_found!('File')
end
desc 'Compare two branches, tags, or commits' do
......@@ -118,12 +116,10 @@ module API
optional :sort, type: String, values: %w[asc desc], default: 'asc', desc: 'Sort by asc (ascending) or desc (descending)'
end
get ':id/repository/contributors' do
begin
contributors = ::Kaminari.paginate_array(user_project.repository.contributors(order_by: params[:order_by], sort: params[:sort]))
present paginate(contributors), with: Entities::Contributor
rescue
not_found!
end
contributors = ::Kaminari.paginate_array(user_project.repository.contributors(order_by: params[:order_by], sort: params[:sort]))
present paginate(contributors), with: Entities::Contributor
rescue
not_found!
end
desc 'Get the common ancestor between commits' do
......
......@@ -11,11 +11,10 @@ module Gitlab
# So we chose a way to use ::Ci::Build directly and we don't change the `archive!` method until 11.1
::Ci::Build.finished.without_archived_trace
.where(id: start_id..stop_id).find_each do |build|
begin
build.trace.archive!
rescue => e
Rails.logger.error "Failed to archive live trace. id: #{build.id} message: #{e.message}"
end
build.trace.archive!
rescue => e
Rails.logger.error "Failed to archive live trace. id: #{build.id} message: #{e.message}"
end
end
end
......
......@@ -302,14 +302,12 @@ module Gitlab
ldap_identities = Identity.where("provider like 'ldap%'").where(id: start_id..end_id)
ldap_identities.each do |identity|
begin
identity.extern_uid = Gitlab::Auth::LDAP::DN.new(identity.extern_uid).to_normalized_s
unless identity.save
Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\". Skipping."
end
rescue Gitlab::Auth::LDAP::DN::FormatError => e
Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\" due to \"#{e.message}\". Skipping."
identity.extern_uid = Gitlab::Auth::LDAP::DN.new(identity.extern_uid).to_normalized_s
unless identity.save
Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\". Skipping."
end
rescue Gitlab::Auth::LDAP::DN::FormatError => e
Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\" due to \"#{e.message}\". Skipping."
end
end
......
......@@ -34,18 +34,16 @@ module Gitlab
def filter_error_files(files)
files.partition do |file|
begin
file.to_h
true
rescue => e
msg = <<~MSG
file.to_h
true
rescue => e
msg = <<~MSG
Error parsing path "#{file.path}":
#{e.message}
#{e.backtrace.join("\n ")}
MSG
Rails.logger.error(msg)
false
end
Rails.logger.error(msg)
false
end
end
......
......@@ -79,31 +79,29 @@ module Gitlab
create_labels
client.issues(repo).each do |issue|
begin
description = ''
description += @formatter.author_line(issue.author) unless find_user_id(issue.author)
description += issue.description
label_name = issue.kind
milestone = issue.milestone ? project.milestones.find_or_create_by(title: issue.milestone) : nil
gitlab_issue = project.issues.create!(
iid: issue.iid,
title: issue.title,
description: description,
state: issue.state,
author_id: gitlab_user_id(project, issue.author),
milestone: milestone,
created_at: issue.created_at,
updated_at: issue.updated_at
)
gitlab_issue.labels << @labels[label_name]
import_issue_comments(issue, gitlab_issue) if gitlab_issue.persisted?
rescue StandardError => e
errors << { type: :issue, iid: issue.iid, errors: e.message }
end
description = ''
description += @formatter.author_line(issue.author) unless find_user_id(issue.author)
description += issue.description
label_name = issue.kind
milestone = issue.milestone ? project.milestones.find_or_create_by(title: issue.milestone) : nil
gitlab_issue = project.issues.create!(
iid: issue.iid,
title: issue.title,
description: description,
state: issue.state,
author_id: gitlab_user_id(project, issue.author),
milestone: milestone,
created_at: issue.created_at,
updated_at: issue.updated_at
)
gitlab_issue.labels << @labels[label_name]
import_issue_comments(issue, gitlab_issue) if gitlab_issue.persisted?
rescue StandardError => e
errors << { type: :issue, iid: issue.iid, errors: e.message }
end
end
# rubocop: enable CodeReuse/ActiveRecord
......@@ -150,37 +148,35 @@ module Gitlab
pull_requests = client.pull_requests(repo)
pull_requests.each do |pull_request|
begin
description = ''
description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author)
description += pull_request.description
source_branch_sha = pull_request.source_branch_sha
target_branch_sha = pull_request.target_branch_sha
source_branch_sha = project.repository.commit(source_branch_sha)&.sha || source_branch_sha
target_branch_sha = project.repository.commit(target_branch_sha)&.sha || target_branch_sha
merge_request = project.merge_requests.create!(
iid: pull_request.iid,
title: pull_request.title,
description: description,
source_project: project,
source_branch: pull_request.source_branch_name,
source_branch_sha: source_branch_sha,
target_project: project,
target_branch: pull_request.target_branch_name,
target_branch_sha: target_branch_sha,
state: pull_request.state,
author_id: gitlab_user_id(project, pull_request.author),
assignee_id: nil,
created_at: pull_request.created_at,
updated_at: pull_request.updated_at
)
import_pull_request_comments(pull_request, merge_request) if merge_request.persisted?
rescue StandardError => e
errors << { type: :pull_request, iid: pull_request.iid, errors: e.message, trace: e.backtrace.join("\n"), raw_response: pull_request.raw }
end
description = ''
description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author)
description += pull_request.description
source_branch_sha = pull_request.source_branch_sha
target_branch_sha = pull_request.target_branch_sha
source_branch_sha = project.repository.commit(source_branch_sha)&.sha || source_branch_sha
target_branch_sha = project.repository.commit(target_branch_sha)&.sha || target_branch_sha
merge_request = project.merge_requests.create!(
iid: pull_request.iid,
title: pull_request.title,
description: description,
source_project: project,
source_branch: pull_request.source_branch_name,
source_branch_sha: source_branch_sha,
target_project: project,
target_branch: pull_request.target_branch_name,
target_branch_sha: target_branch_sha,
state: pull_request.state,
author_id: gitlab_user_id(project, pull_request.author),
assignee_id: nil,
created_at: pull_request.created_at,
updated_at: pull_request.updated_at
)
import_pull_request_comments(pull_request, merge_request) if merge_request.persisted?
rescue StandardError => e
errors << { type: :pull_request, iid: pull_request.iid, errors: e.message, trace: e.backtrace.join("\n"), raw_response: pull_request.raw }
end
end
......@@ -211,23 +207,21 @@ module Gitlab
end
inline_comments.each do |comment|
begin
attributes = pull_request_comment_attributes(comment)
attributes[:discussion_id] = discussion_map[comment.parent_id] if comment.has_parent?
attributes = pull_request_comment_attributes(comment)
attributes[:discussion_id] = discussion_map[comment.parent_id] if comment.has_parent?
attributes.merge!(
position: position_map[comment.iid],
type: 'DiffNote')
attributes.merge!(
position: position_map[comment.iid],
type: 'DiffNote')
note = merge_request.notes.create!(attributes)
note = merge_request.notes.create!(attributes)
# We can't store a discussion ID until a note is created, so if
# replies are created before the parent the discussion ID won't be
# linked properly.
discussion_map[comment.iid] = note.discussion_id
rescue StandardError => e
errors << { type: :pull_request, iid: comment.iid, errors: e.message }
end
# We can't store a discussion ID until a note is created, so if
# replies are created before the parent the discussion ID won't be
# linked properly.
discussion_map[comment.iid] = note.discussion_id
rescue StandardError => e
errors << { type: :pull_request, iid: comment.iid, errors: e.message }
end
end
......@@ -245,11 +239,9 @@ module Gitlab
def import_standalone_pr_comments(pr_comments, merge_request)
pr_comments.each do |comment|
begin
merge_request.notes.create!(pull_request_comment_attributes(comment))
rescue StandardError => e
errors << { type: :pull_request, iid: comment.iid, errors: e.message }
end
merge_request.notes.create!(pull_request_comment_attributes(comment))
rescue StandardError => e
errors << { type: :pull_request, iid: comment.iid, errors: e.message }
end
end
......
......@@ -162,27 +162,23 @@ module Gitlab
restore_branches(batch) if recover_missing_commits
batch.each do |pull_request|
begin
import_bitbucket_pull_request(pull_request)
rescue StandardError => e
backtrace = Gitlab::Profiler.clean_backtrace(e.backtrace)
log_error(stage: 'import_pull_requests', iid: pull_request.iid, error: e.message, backtrace: backtrace)
errors << { type: :pull_request, iid: pull_request.iid, errors: e.message, backtrace: backtrace.join("\n"), raw_response: pull_request.raw }
end
import_bitbucket_pull_request(pull_request)
rescue StandardError => e
backtrace = Gitlab::Profiler.clean_backtrace(e.backtrace)
log_error(stage: 'import_pull_requests', iid: pull_request.iid, error: e.message, backtrace: backtrace)
errors << { type: :pull_request, iid: pull_request.iid, errors: e.message, backtrace: backtrace.join("\n"), raw_response: pull_request.raw }
end
end
end
def delete_temp_branches
@temp_branches.each do |branch|
begin
client.delete_branch(project_key, repository_slug, branch.name, branch.sha)
project.repository.delete_branch(branch.name)
rescue BitbucketServer::Connection::ConnectionError => e
log_error(stage: 'delete_temp_branches', branch: branch.name, error: e.message)
@errors << { type: :delete_temp_branches, branch_name: branch.name, errors: e.message }
end
client.delete_branch(project_key, repository_slug, branch.name, branch.sha)
project.repository.delete_branch(branch.name)
rescue BitbucketServer::Connection::ConnectionError => e
log_error(stage: 'delete_temp_branches', branch: branch.name, error: e.message)
@errors << { type: :delete_temp_branches, branch_name: branch.name, errors: e.message }
end
end
......@@ -323,16 +319,14 @@ module Gitlab
def import_standalone_pr_comments(pr_comments, merge_request)
pr_comments.each do |comment|
begin
merge_request.notes.create!(pull_request_comment_attributes(comment))
merge_request.notes.create!(pull_request_comment_attributes(comment))
comment.comments.each do |replies|
merge_request.notes.create!(pull_request_comment_attributes(replies))
end
rescue StandardError => e
log_error(stage: 'import_standalone_pr_comments', merge_request_id: merge_request.id, comment_id: comment.id, error: e.message)
errors << { type: :pull_request, comment_id: comment.id, errors: e.message }
comment.comments.each do |replies|
merge_request.notes.create!(pull_request_comment_attributes(replies))
end
rescue StandardError => e
log_error(stage: 'import_standalone_pr_comments', merge_request_id: merge_request.id, comment_id: comment.id, error: e.message)
errors << { type: :pull_request, comment_id: comment.id, errors: e.message }
end
end
......
......@@ -98,7 +98,7 @@ module Gitlab
def read_uint32(gz)
binary = gz.read(4)
binary.unpack('L>')[0] if binary
binary.unpack1('L>') if binary
end
def read_string(gz)
......
......@@ -35,12 +35,10 @@ module Gitlab
threads = Array.new(thread_count) do
Thread.new do
pool.with_connection do |connection|
begin
Thread.current[MULTI_THREAD_AR_CONNECTION] = connection
yield
ensure
Thread.current[MULTI_THREAD_AR_CONNECTION] = nil
end
Thread.current[MULTI_THREAD_AR_CONNECTION] = connection
yield
ensure
Thread.current[MULTI_THREAD_AR_CONNECTION] = nil
end
end
end
......
......@@ -22,7 +22,7 @@ module Gitlab
# Casts binary data to a SHA1 in hexadecimal.
def deserialize(value)
value = super(value)
value ? value.unpack(PACK_FORMAT)[0] : nil
value ? value.unpack1(PACK_FORMAT) : nil
end
# Casts a SHA1 in hexadecimal to the proper binary format.
......
......@@ -75,13 +75,11 @@ module Gitlab
@certs = stub_cert_paths.flat_map do |cert_file|
File.read(cert_file).scan(PEM_REGEX).map do |cert|
begin
OpenSSL::X509::Certificate.new(cert).to_pem
rescue OpenSSL::OpenSSLError => e
Rails.logger.error "Could not load certificate #{cert_file} #{e}"
Gitlab::Sentry.track_exception(e, extra: { cert_file: cert_file })
nil
end
OpenSSL::X509::Certificate.new(cert).to_pem
rescue OpenSSL::OpenSSLError => e
Rails.logger.error "Could not load certificate #{cert_file} #{e}"
Gitlab::Sentry.track_exception(e, extra: { cert_file: cert_file })
nil
end.compact
end.uniq.join("\n")
end
......
......@@ -13,17 +13,15 @@ module Gitlab
current_blob_data = nil
@rpc_response.each do |msg|
begin
if msg.oid.blank? && msg.data.blank?
next
elsif msg.oid.present?
yield new_blob(current_blob_data) if current_blob_data
current_blob_data = msg.to_h.slice(:oid, :path, :size, :revision, :mode)
current_blob_data[:data] = msg.data.dup
else
current_blob_data[:data] << msg.data
end
if msg.oid.blank? && msg.data.blank?
next
elsif msg.oid.present?
yield new_blob(current_blob_data) if current_blob_data
current_blob_data = msg.to_h.slice(:oid, :path, :size, :revision, :mode)
current_blob_data[:data] = msg.data.dup
else
current_blob_data[:data] << msg.data
end
end
......
......@@ -89,12 +89,10 @@ module Gitlab
def import_labels
fetch_resources(:labels, repo, per_page: 100) do |labels|
labels.each do |raw|
begin
gh_label = LabelFormatter.new(project, raw)
gh_label.create!
rescue => e
errors << { type: :label, url: Gitlab::UrlSanitizer.sanitize(gh_label.url), errors: e.message }
end
gh_label = LabelFormatter.new(project, raw)
gh_label.create!
rescue => e
errors << { type: :label, url: Gitlab::UrlSanitizer.sanitize(gh_label.url), errors: e.message }
end
end
......@@ -104,12 +102,10 @@ module Gitlab
def import_milestones
fetch_resources(:milestones, repo, state: :all, per_page: 100) do |milestones|
milestones.each do |raw|
begin
gh_milestone = MilestoneFormatter.new(project, raw)
gh_milestone.create!
rescue => e
errors << { type: :milestone, url: Gitlab::UrlSanitizer.sanitize(gh_milestone.url), errors: e.message }
end
gh_milestone = MilestoneFormatter.new(project, raw)
gh_milestone.create!
rescue => e
errors << { type: :milestone, url: Gitlab::UrlSanitizer.sanitize(gh_milestone.url), errors: e.message }
end
end
end
......@@ -223,24 +219,22 @@ module Gitlab
def create_comments(comments)
ActiveRecord::Base.no_touching do
comments.each do |raw|
begin
comment = CommentFormatter.new(project, raw, client)
comment = CommentFormatter.new(project, raw, client)
# GH does not return info about comment's parent, so we guess it by checking its URL!
*_, parent, iid = URI(raw.html_url).path.split('/')
# GH does not return info about comment's parent, so we guess it by checking its URL!
*_, parent, iid = URI(raw.html_url).path.split('/')
issuable = if parent == 'issues'
Issue.find_by(project_id: project.id, iid: iid)
else
MergeRequest.find_by(target_project_id: project.id, iid: iid)
end
issuable = if parent == 'issues'
Issue.find_by(project_id: project.id, iid: iid)
else
MergeRequest.find_by(target_project_id: project.id, iid: iid)
end
next unless issuable
next unless issuable
issuable.notes.create!(comment.attributes)
rescue => e
errors << { type: :comment, url: Gitlab::UrlSanitizer.sanitize(raw.url), errors: e.message }
end
issuable.notes.create!(comment.attributes)
rescue => e
errors << { type: :comment, url: Gitlab::UrlSanitizer.sanitize(raw.url), errors: e.message }
end
end
end
......@@ -281,12 +275,10 @@ module Gitlab
def import_releases
fetch_resources(:releases, repo, per_page: 100) do |releases|
releases.each do |raw|
begin
gh_release = ReleaseFormatter.new(project, raw)
gh_release.create! if gh_release.valid?
rescue => e
errors << { type: :release, url: Gitlab::UrlSanitizer.sanitize(gh_release.url), errors: e.message }
end
gh_release = ReleaseFormatter.new(project, raw)
gh_release.create! if gh_release.valid?
rescue => e
errors << { type: :release, url: Gitlab::UrlSanitizer.sanitize(gh_release.url), errors: e.message }
end
end
end
......
......@@ -52,10 +52,8 @@ module Gitlab
pool&.with do |connection|
prepared.each_slice(settings[:packet_size]) do |slice|
begin
connection.write_points(slice)
rescue StandardError
end
connection.write_points(slice)
rescue StandardError
end
end
rescue Errno::EADDRNOTAVAIL, SocketError => ex
......
......@@ -11,14 +11,13 @@ namespace :gitlab do
Ci::Build.joins(:project)
.with_artifacts_stored_locally
.find_each(batch_size: 10) do |build|
begin
build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred artifact ID #{build.id} with size #{build.artifacts_size} to object storage")
rescue => e
logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
end
build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred artifact ID #{build.id} with size #{build.artifacts_size} to object storage")
rescue => e
logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
end
end
end
......
......@@ -9,13 +9,12 @@ namespace :gitlab do
LfsObject.with_files_stored_locally
.find_each(batch_size: 10) do |lfs_object|
begin
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
rescue => e
logger.error("Failed to transfer LFS object #{lfs_object.oid} with error: #{e.message}")
end
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
rescue => e
logger.error("Failed to transfer LFS object #{lfs_object.oid} with error: #{e.message}")
end
end
end
......
......@@ -26,13 +26,12 @@ namespace :gitlab do
Ci::Build.joins(:project)
.with_archived_trace_stored_locally
.find_each(batch_size: 10) do |build|
begin
build.job_artifacts_trace.file.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred job trace of #{build.id} to object storage")
rescue => e
logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
end
build.job_artifacts_trace.file.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred job trace of #{build.id} to object storage")
rescue => e
logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
end
end
end
......
......@@ -19,11 +19,9 @@ unless Rails.env.production?
desc "GitLab | lint | Lint HAML files"
task :haml do
begin
Rake::Task['haml_lint'].invoke
rescue RuntimeError # The haml_lint tasks raise a RuntimeError
exit(1)
end
Rake::Task['haml_lint'].invoke
rescue RuntimeError # The haml_lint tasks raise a RuntimeError
exit(1)
end
desc "GitLab | lint | Run several lint checks"
......
......@@ -2,49 +2,43 @@ desc "GitLab | Build internal ids for issues and merge requests"
task migrate_iids: :environment do
puts 'Issues'.color(:yellow)
Issue.where(iid: nil).find_each(batch_size: 100) do |issue|
begin
issue.set_iid
issue.set_iid
if issue.update_attribute(:iid, issue.iid)
print '.'
else
print 'F'
end
rescue
if issue.update_attribute(:iid, issue.iid)
print '.'
else
print 'F'
end
rescue
print 'F'
end
puts 'done'
puts 'Merge Requests'.color(:yellow)
MergeRequest.where(iid: nil).find_each(batch_size: 100) do |mr|
begin
mr.set_iid
mr.set_iid
if mr.update_attribute(:iid, mr.iid)
print '.'
else
print 'F'
end
rescue
if mr.update_attribute(:iid, mr.iid)
print '.'
else
print 'F'
end
rescue
print 'F'
end
puts 'done'
puts 'Milestones'.color(:yellow)
Milestone.where(iid: nil).find_each(batch_size: 100) do |m|
begin
m.set_iid
m.set_iid
if m.update_attribute(:iid, m.iid)
print '.'
else
print 'F'
end
rescue
if m.update_attribute(:iid, m.iid)
print '.'
else
print 'F'
end
rescue
print 'F'
end
puts 'done'
......
......@@ -43,11 +43,9 @@ module QA
def create_new_file_from_template(file_name, template)
click_element :new_file
within_element(:template_list) do
begin
click_on file_name
rescue Capybara::ElementNotFound
raise ElementNotFound, %Q(Couldn't find file template named "#{file_name}". Please confirm that it is a valid option.)
end
click_on file_name
rescue Capybara::ElementNotFound
raise ElementNotFound, %Q(Couldn't find file template named "#{file_name}". Please confirm that it is a valid option.)
end
wait(reload: false) do
......
......@@ -79,15 +79,13 @@ module GitalyTest
socket = read_socket_path
Integer(timeout / delay).times do
begin
UNIXSocket.new(socket)
puts ' OK'
return
rescue Errno::ENOENT, Errno::ECONNREFUSED
print '.'
sleep delay
end
UNIXSocket.new(socket)
puts ' OK'
return
rescue Errno::ENOENT, Errno::ECONNREFUSED
print '.'
sleep delay
end
puts ' FAILED'
......
......@@ -12,11 +12,9 @@ module SimpleCov
def resultset_hashes
resultset_files.map do |path|
begin
JSON.parse(File.read(path))
rescue
{}
end
JSON.parse(File.read(path))
rescue
{}
end
end
......
......@@ -172,10 +172,8 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :m
let(:exception) { ActiveRecord::RecordNotFound }
let(:perform_ignoring_exceptions) do
begin
subject.perform(start_id, stop_id)
rescue described_class::Error
end
subject.perform(start_id, stop_id)
rescue described_class::Error
end
before do
......
......@@ -37,14 +37,12 @@ describe Gitlab::Git::RepositoryCleaner do
let(:object_map) { Gitlab::HttpIO.new(url, object_map_data.size) }
around do |example|
begin
tempfile.write(object_map_data)
tempfile.close
tempfile.write(object_map_data)
tempfile.close
example.run
ensure
tempfile.unlink
end
example.run
ensure
tempfile.unlink
end
it 'removes internal references' do
......
......@@ -44,11 +44,9 @@ describe 'Puma' do
end
after(:all) do
begin
WebMock.disable_net_connect!(allow_localhost: true)
Process.kill('TERM', @puma_master_pid)
rescue Errno::ESRCH
end
WebMock.disable_net_connect!(allow_localhost: true)
Process.kill('TERM', @puma_master_pid)
rescue Errno::ESRCH
end
def wait_puma_boot!(master_pid, ready_file)
......
......@@ -128,10 +128,8 @@ describe Projects::DestroyService do
it 'keeps project team intact upon an error' do
perform_enqueued_jobs do
begin
destroy_project(project, user, {})
rescue ::Redis::CannotConnectError
end
destroy_project(project, user, {})
rescue ::Redis::CannotConnectError
end
expect(project.team.members.count).to eq 2
......
......@@ -18,12 +18,10 @@ module GraphqlHelpers
# Runs a block inside a BatchLoader::Executor wrapper
def batch(max_queries: nil, &blk)
wrapper = proc do
begin
BatchLoader::Executor.ensure_current
yield
ensure
BatchLoader::Executor.clear_current
end
BatchLoader::Executor.ensure_current
yield
ensure
BatchLoader::Executor.clear_current
end
if max_queries
......
......@@ -23,15 +23,13 @@ module StubObjectStorage
Fog.mock!
::Fog::Storage.new(connection_params).tap do |connection|
begin
connection.directories.create(key: remote_directory)
connection.directories.create(key: remote_directory)
# Cleanup remaining files
connection.directories.each do |directory|
directory.files.map(&:destroy)
end
rescue Excon::Error::Conflict
# Cleanup remaining files
connection.directories.each do |directory|
directory.files.map(&:destroy)
end
rescue Excon::Error::Conflict
end
end
......
......@@ -202,12 +202,10 @@ module TestEnv
socket = Gitlab::GitalyClient.address('default').sub('unix:', '')
Integer(sleep_time / sleep_interval).times do
begin
Socket.unix(socket)
return
rescue
sleep sleep_interval
end
Socket.unix(socket)
return
rescue
sleep sleep_interval
end
raise "could not connect to gitaly at #{socket.inspect} after #{sleep_time} seconds"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment