Commit 39b4ad69 authored by Bob Van Landuyt's avatar Bob Van Landuyt

Merge branch 'enable_GlobalStdStream' into 'master'

Enable Style/GlobalStdStream

See merge request gitlab-org/gitlab!62853
parents 21cb9755 88a1c327
......@@ -742,11 +742,6 @@ Style/ExplicitBlockArgument:
Style/FormatString:
Enabled: false
# Offense count: 67
# Cop supports --auto-correct.
Style/GlobalStdStream:
Enabled: false
# Offense count: 897
# Configuration parameters: MinBodyLength.
Style/GuardClause:
......
......@@ -79,7 +79,7 @@ require 'json'
require 'mail'
# The incoming variables are in JSON format so we need to parse it first.
ARGS = JSON.parse(STDIN.read)
ARGS = JSON.parse($stdin.read)
# We only want to trigger this file hook on the event project_create
return unless ARGS['event_name'] == 'project_create'
......
......@@ -26,7 +26,7 @@ You can enable output of Active Record debug logging in the Rails console
session by running:
```ruby
ActiveRecord::Base.logger = Logger.new(STDOUT)
ActiveRecord::Base.logger = Logger.new($stdout)
```
This will show information about database queries triggered by any Ruby code
......
......@@ -100,7 +100,7 @@ Rails.cache.instance_variable_get(:@data).keys
```ruby
# Before 11.6.0
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
admin_token = User.find_by_username('ADMIN_USERNAME').personal_access_tokens.first.token
app.get("URL/?private_token=#{admin_token}")
......@@ -113,7 +113,7 @@ Gitlab::Profiler.with_user(admin) { app.get(url) }
## Using the GitLab profiler inside console (used as of 10.5)
```ruby
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
admin = User.find_by_username('ADMIN_USERNAME')
Gitlab::Profiler.profile('URL', logger: logger, user: admin)
```
......
......@@ -46,7 +46,7 @@ Let's enable debug logging for Active Record so we can see the underlying
database queries made:
```ruby
ActiveRecord::Base.logger = Logger.new(STDOUT)
ActiveRecord::Base.logger = Logger.new($stdout)
```
Now, let's try retrieving a user from the database:
......
......@@ -49,7 +49,7 @@ ActiveRecord and ActionController log output to that logger. Further options are
documented with the method source.
```ruby
Gitlab::Profiler.profile('/gitlab-org/gitlab-test', user: User.first, logger: Logger.new(STDOUT))
Gitlab::Profiler.profile('/gitlab-org/gitlab-test', user: User.first, logger: Logger.new($stdout))
```
There is also a RubyProf printer available:
......
......@@ -51,7 +51,7 @@ namespace :gitlab do
desc "GitLab | Elasticsearch | Index all snippets"
task index_snippets: :environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
logger.info("Indexing snippets...")
Snippet.es_import
......
#!/usr/bin/env ruby
x = STDIN.read
x = $stdin.read
File.write('/tmp/rb-data.txt', x)
......@@ -170,7 +170,7 @@ module Gitlab
def self.print_by_total_time(result, options = {})
default_options = { sort_method: :total_time, filter_by: :total_time }
RubyProf::FlatPrinter.new(result).print(STDOUT, default_options.merge(options))
RubyProf::FlatPrinter.new(result).print($stdout, default_options.merge(options))
end
end
end
......@@ -22,7 +22,7 @@ module Gitlab
CommandError = Class.new(StandardError)
def initialize(log_output = STDERR)
def initialize(log_output = $stderr)
require_relative '../../../lib/gitlab/sidekiq_logging/json_formatter'
# As recommended by https://github.com/mperham/sidekiq/wiki/Advanced-Options#concurrency
......
......@@ -61,7 +61,7 @@ module Gitlab
def prompt(message, choices = nil)
begin
print(message)
answer = STDIN.gets.chomp
answer = $stdin.gets.chomp
end while choices.present? && !choices.include?(answer)
answer
end
......@@ -70,12 +70,12 @@ module Gitlab
#
# message - custom message to display before input
def prompt_for_password(message = 'Enter password: ')
unless STDIN.tty?
unless $stdin.tty?
print(message)
return STDIN.gets.chomp
return $stdin.gets.chomp
end
STDIN.getpass(message)
$stdin.getpass(message)
end
# Runs the given command and matches the output against the given pattern
......
......@@ -9,7 +9,7 @@ module Gitlab
attr_writer :logger
def logger
@logger ||= Logger.new(STDOUT)
@logger ||= Logger.new($stdout)
end
end
......@@ -67,7 +67,7 @@ module Gitlab
def log_info(details)
details = base_log_data.merge(details)
details = details.to_yaml if ActiveSupport::Logger.logger_outputs_to?(Measuring.logger, STDOUT)
details = details.to_yaml if ActiveSupport::Logger.logger_outputs_to?(Measuring.logger, $stdout)
Measuring.logger.info(details)
end
end
......
......@@ -7,7 +7,7 @@ desc 'GitLab | Artifacts | Migrate files for artifacts to comply with new storag
namespace :gitlab do
namespace :artifacts do
task migrate: :environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
helper = Gitlab::LocalAndRemoteStorageMigration::ArtifactMigrater.new(logger)
......@@ -19,7 +19,7 @@ namespace :gitlab do
end
task migrate_to_local: :environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
helper = Gitlab::LocalAndRemoteStorageMigration::ArtifactMigrater.new(logger)
......
......@@ -178,7 +178,7 @@ namespace :gitlab do
return @logger if defined?(@logger)
@logger = if Rails.env.development? || Rails.env.production?
Logger.new(STDOUT).tap do |stdout_logger|
Logger.new($stdout).tap do |stdout_logger|
stdout_logger.extend(ActiveSupport::Logger.broadcast(Rails.logger))
stdout_logger.level = debug? ? Logger::DEBUG : Logger::INFO
end
......
......@@ -209,7 +209,7 @@ namespace :gitlab do
raise "Index not found or not supported: #{args[:index_name]}" if indexes.empty?
end
ActiveRecord::Base.logger = Logger.new(STDOUT) if Gitlab::Utils.to_boolean(ENV['LOG_QUERIES_TO_CONSOLE'], default: false)
ActiveRecord::Base.logger = Logger.new($stdout) if Gitlab::Utils.to_boolean(ENV['LOG_QUERIES_TO_CONSOLE'], default: false)
Gitlab::Database::Reindexing.perform(indexes)
rescue StandardError => e
......
......@@ -14,14 +14,14 @@ namespace :gitlab do
old_path = args.old_path
else
puts '=> Enter the path of the OLD file:'
old_path = STDIN.gets.chomp
old_path = $stdin.gets.chomp
end
if args.new_path
new_path = args.new_path
else
puts '=> Enter the path of the NEW file:'
new_path = STDIN.gets.chomp
new_path = $stdin.gets.chomp
end
#
......
......@@ -4,7 +4,7 @@ namespace :gitlab do
namespace :doctor do
desc "GitLab | Check if the database encrypted values can be decrypted using current secrets"
task secrets: :gitlab_environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
logger.level = Gitlab::Utils.to_boolean(ENV['VERBOSE']) ? Logger::DEBUG : Logger::INFO
......
......@@ -42,7 +42,7 @@ namespace :gitlab do
namespace :secret do
desc 'GitLab | LDAP | Secret | Write LDAP secrets'
task write: [:environment] do
content = STDIN.tty? ? STDIN.gets : STDIN.read
content = $stdin.tty? ? $stdin.gets : $stdin.read
Gitlab::EncryptedLdapCommand.write(content)
end
......
......@@ -6,7 +6,7 @@ desc "GitLab | LFS | Migrate LFS objects to remote storage"
namespace :gitlab do
namespace :lfs do
task migrate: :environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
logger.info('Starting transfer of LFS files to object storage')
LfsObject.with_files_stored_locally
......@@ -20,7 +20,7 @@ namespace :gitlab do
end
task migrate_to_local: :environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
logger.info('Starting transfer of LFS files to local storage')
LfsObject.with_files_stored_remotely
......
......@@ -6,7 +6,7 @@ desc "GitLab | Packages | Build composer cache"
namespace :gitlab do
namespace :packages do
task build_composer_cache: :environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
logger.info('Starting to build composer cache files')
::Packages::Package.composer.find_in_batches do |packages|
......
......@@ -14,7 +14,7 @@ namespace :gitlab do
end
task generate_counts: :environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
logger.info('Building list of package events...')
path = Gitlab::UsageDataCounters::PackageEventCounter::KNOWN_EVENTS_PATH
......@@ -26,7 +26,7 @@ namespace :gitlab do
end
task generate_unique: :environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
logger.info('Building list of package events...')
path = File.join(File.dirname(Gitlab::UsageDataCounters::HLLRedisCounter::KNOWN_EVENTS_PATH), 'package_events.yml')
......
......@@ -6,7 +6,7 @@ desc "GitLab | Packages | Migrate packages files to remote storage"
namespace :gitlab do
namespace :packages do
task migrate: :environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
logger.info('Starting transfer of package files to object storage')
unless ::Packages::PackageFileUploader.object_store_enabled?
......
......@@ -35,7 +35,7 @@ namespace :gitlab do
end
def logger
@logger ||= Logger.new(STDOUT)
@logger ||= Logger.new($stdout)
end
def migration_threads
......@@ -60,7 +60,7 @@ namespace :gitlab do
namespace :deployments do
task migrate_to_object_storage: :gitlab_environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
helper = Gitlab::LocalAndRemoteStorageMigration::PagesDeploymentMigrater.new(logger)
......@@ -72,7 +72,7 @@ namespace :gitlab do
end
task migrate_to_local: :gitlab_environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
helper = Gitlab::LocalAndRemoteStorageMigration::PagesDeploymentMigrater.new(logger)
......
......@@ -6,7 +6,7 @@ desc "GitLab | Terraform | Migrate Terraform states to remote storage"
namespace :gitlab do
namespace :terraform_states do
task migrate: :environment do
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
logger.info('Starting transfer of Terraform states to object storage')
begin
......
......@@ -16,7 +16,7 @@ namespace :gitlab do
# category to object storage
desc 'GitLab | Uploads | Migrate the uploaded files of specified type to object storage'
task :migrate, [:uploader_class, :model_class, :mounted_as] => :environment do |_t, args|
Gitlab::Uploads::MigrationHelper.new(args, Logger.new(STDOUT)).migrate_to_remote_storage
Gitlab::Uploads::MigrationHelper.new(args, Logger.new($stdout)).migrate_to_remote_storage
end
namespace :migrate_to_local do
......@@ -31,7 +31,7 @@ namespace :gitlab do
desc 'GitLab | Uploads | Migrate the uploaded files of specified type to local storage'
task :migrate_to_local, [:uploader_class, :model_class, :mounted_as] => :environment do |_t, args|
Gitlab::Uploads::MigrationHelper.new(args, Logger.new(STDOUT)).migrate_to_local_storage
Gitlab::Uploads::MigrationHelper.new(args, Logger.new($stdout)).migrate_to_local_storage
end
end
end
......@@ -8,7 +8,7 @@ namespace :gitlab do
args.with_defaults(dry_run: 'true')
args.with_defaults(sleep_time: 0.3)
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
sanitizer = Gitlab::Sanitizers::Exif.new(logger: logger)
sanitizer.batch_clean(start_id: args.start_id, stop_id: args.stop_id,
......
......@@ -10,7 +10,7 @@ namespace :gitlab do
end
def update_certificates
logger = Logger.new(STDOUT)
logger = Logger.new($stdout)
unless X509CommitSignature.exists?
logger.info("Unable to find any x509 commit signatures. Exiting.")
......
......@@ -38,7 +38,7 @@ class GithubImport
puts "This will import GitHub #{@repo.full_name.bright} into GitLab #{@project_path.bright} as #{@current_user.name}"
puts "Permission checks are ignored. Press any key to continue.".color(:red)
STDIN.getch
$stdin.getch
puts 'Starting the import (this could take a while)'.color(:green)
end
......@@ -131,7 +131,7 @@ class GithubRepos
end
def repo_id
@repo_id ||= STDIN.gets.chomp.to_i
@repo_id ||= $stdin.gets.chomp.to_i
end
def repos
......
......@@ -19,7 +19,7 @@ namespace :tokens do
def reset_all_users_token(reset_token_method)
TmpUser.find_in_batches do |batch|
puts "Processing batch starting with user ID: #{batch.first.id}"
STDOUT.flush
$stdout.flush
batch.each(&reset_token_method)
end
......
......@@ -20,7 +20,7 @@ module QA
end
def run
STDOUT.puts 'Running...'
$stdout.puts 'Running...'
# Fetch group's id
group_id = fetch_group_id
......@@ -30,16 +30,16 @@ module QA
# Do not delete projects that are less than 4 days old (for debugging purposes)
project_ids = fetch_project_ids(group_id, total_project_pages)
STDOUT.puts "Number of projects to be deleted: #{project_ids.length}"
$stdout.puts "Number of projects to be deleted: #{project_ids.length}"
delete_projects(project_ids) unless project_ids.empty?
STDOUT.puts "\nDone"
$stdout.puts "\nDone"
end
private
def delete_projects(project_ids)
STDOUT.puts "Deleting #{project_ids.length} projects..."
$stdout.puts "Deleting #{project_ids.length} projects..."
project_ids.each do |project_id|
delete_response = delete Runtime::API::Request.new(@api_client, "/projects/#{project_id}").url
dot_or_f = delete_response.code.between?(200, 300) ? "\e[32m.\e[0m" : "\e[31mF\e[0m"
......
......@@ -20,7 +20,7 @@ module QA
end
def run
STDOUT.puts 'Running...'
$stdout.puts 'Running...'
# Fetch group's id
group_id = fetch_group_id
......@@ -29,16 +29,16 @@ module QA
total_sub_group_pages = sub_groups_head_response.headers[:x_total_pages]
sub_group_ids = fetch_subgroup_ids(group_id, total_sub_group_pages)
STDOUT.puts "Number of Sub Groups not already marked for deletion: #{sub_group_ids.length}"
$stdout.puts "Number of Sub Groups not already marked for deletion: #{sub_group_ids.length}"
delete_subgroups(sub_group_ids) unless sub_group_ids.empty?
STDOUT.puts "\nDone"
$stdout.puts "\nDone"
end
private
def delete_subgroups(sub_group_ids)
STDOUT.puts "Deleting #{sub_group_ids.length} subgroups..."
$stdout.puts "Deleting #{sub_group_ids.length} subgroups..."
sub_group_ids.each do |subgroup_id|
delete_response = delete Runtime::API::Request.new(@api_client, "/groups/#{subgroup_id}").url
dot_or_f = delete_response.code == 202 ? "\e[32m.\e[0m" : "\e[31mF\e[0m"
......
......@@ -30,18 +30,18 @@ module QA
end
def run
STDOUT.puts 'Running...'
$stdout.puts 'Running...'
keys_head_response = head Runtime::API::Request.new(@api_client, "/user/keys", per_page: ITEMS_PER_PAGE).url
total_pages = keys_head_response.headers[:x_total_pages]
test_ssh_key_ids = fetch_test_ssh_key_ids(total_pages)
STDOUT.puts "Number of test ssh keys to be deleted: #{test_ssh_key_ids.length}"
$stdout.puts "Number of test ssh keys to be deleted: #{test_ssh_key_ids.length}"
return if dry_run?
delete_ssh_keys(test_ssh_key_ids) unless test_ssh_key_ids.empty?
STDOUT.puts "\nDone"
$stdout.puts "\nDone"
end
private
......@@ -50,7 +50,7 @@ module QA
alias_method :dry_run?, :dry_run
def delete_ssh_keys(ssh_key_ids)
STDOUT.puts "Deleting #{ssh_key_ids.length} ssh keys..."
$stdout.puts "Deleting #{ssh_key_ids.length} ssh keys..."
ssh_key_ids.each do |key_id|
delete_response = delete Runtime::API::Request.new(@api_client, "/user/keys/#{key_id}").url
dot_or_f = delete_response.code == 204 ? "\e[32m.\e[0m" : "\e[31mF\e[0m"
......
......@@ -26,7 +26,7 @@ module QA
end
def all
STDOUT.puts 'Running...'
$stdout.puts 'Running...'
group_id = create_group
create_project(group_id)
......@@ -50,23 +50,23 @@ module QA
end
threads_arr.each(&:join)
STDOUT.puts "\nURLs: #{@urls}"
$stdout.puts "\nURLs: #{@urls}"
File.open("urls.yml", "w") { |file| file.puts @urls.stringify_keys.to_yaml }
STDOUT.puts "\nDone"
$stdout.puts "\nDone"
end
def create_group
group_search_response = create_a_group_api_req(@group_name, @visibility)
group = JSON.parse(group_search_response.body)
@urls[:group_page] = group["web_url"]
STDOUT.puts "Created a group: #{@urls[:group_page]}"
$stdout.puts "Created a group: #{@urls[:group_page]}"
group["id"]
end
def create_project(group_id)
create_project_response = create_a_project_api_req(@project_name, group_id, @visibility)
@urls[:project_page] = JSON.parse(create_project_response.body)["web_url"]
STDOUT.puts "Created a project: #{@urls[:project_page]}"
$stdout.puts "Created a project: #{@urls[:project_page]}"
end
def create_many_issues
......@@ -74,7 +74,7 @@ module QA
create_an_issue_api_req("#{@group_name}%2F#{@project_name}", "issue#{i}", "desc#{i}")
end
@urls[:issues_list_page] = @urls[:project_page] + "/issues"
STDOUT.puts "Created many issues: #{@urls[:issues_list_page]}"
$stdout.puts "Created many issues: #{@urls[:issues_list_page]}"
end
def create_many_todos
......@@ -82,7 +82,7 @@ module QA
create_a_todo_api_req("#{@group_name}%2F#{@project_name}", "#{i + 1}")
end
@urls[:todos_page] = ENV['GITLAB_ADDRESS'] + "/dashboard/todos"
STDOUT.puts "Created many todos: #{@urls[:todos_page]}"
$stdout.puts "Created many todos: #{@urls[:todos_page]}"
end
def create_many_labels
......@@ -90,7 +90,7 @@ module QA
create_a_label_api_req("#{@group_name}%2F#{@project_name}", "label#{i}", "#{Faker::Color.hex_color}")
end
@urls[:labels_page] = @urls[:project_page] + "/labels"
STDOUT.puts "Created many labels: #{@urls[:labels_page]}"
$stdout.puts "Created many labels: #{@urls[:labels_page]}"
end
def create_many_merge_requests
......@@ -98,7 +98,7 @@ module QA
create_a_merge_request_api_req("#{@group_name}%2F#{@project_name}", "branch#{i}", Runtime::Env.default_branch, "MR#{i}")
end
@urls[:mr_list_page] = @urls[:project_page] + "/merge_requests"
STDOUT.puts "Created many MRs: #{@urls[:mr_list_page]}"
$stdout.puts "Created many MRs: #{@urls[:mr_list_page]}"
end
def create_many_new_files
......@@ -109,7 +109,7 @@ module QA
end
@urls[:files_page] = @urls[:project_page] + "/tree/#{Runtime::Env.default_branch}"
STDOUT.puts "Added many new files: #{@urls[:files_page]}"
$stdout.puts "Added many new files: #{@urls[:files_page]}"
end
def create_many_branches
......@@ -117,7 +117,7 @@ module QA
create_a_branch_api_req("branch#{i}", "#{@group_name}%2F#{@project_name}")
end
@urls[:branches_page] = @urls[:project_page] + "/-/branches"
STDOUT.puts "Created many branches: #{@urls[:branches_page]}"
$stdout.puts "Created many branches: #{@urls[:branches_page]}"
end
def create_an_issue_with_many_discussions
......@@ -130,7 +130,7 @@ module QA
# Add description and labels
update_an_issue_api_req("#{@group_name}%2F#{@project_name}", issue_id, "#{Faker::Lorem.sentences(500).join(" ")}", labels_list)
@urls[:large_issue] = @urls[:project_page] + "/issues/#{issue_id}"
STDOUT.puts "Created an issue with many discussions: #{@urls[:large_issue]}"
$stdout.puts "Created an issue with many discussions: #{@urls[:large_issue]}"
end
def create_an_mr_with_large_files_and_many_mr_discussions
......@@ -178,7 +178,7 @@ module QA
create_a_discussion_on_mr_api_req("#{@group_name}%2F#{@project_name}", iid, "Let us discuss")
end
@urls[:large_mr] = JSON.parse(create_mr_response.body)["web_url"]
STDOUT.puts "Created an MR with many discussions and many very large Files: #{@urls[:large_mr]}"
$stdout.puts "Created an MR with many discussions and many very large Files: #{@urls[:large_mr]}"
end
def create_diff_note(iid, file_count, line_count, head_sha, start_sha, base_sha, line_type)
......@@ -205,7 +205,7 @@ module QA
100.times do |i|
update_file_api_req(file_name, branch_name, project_path, Faker::Lorem.sentences(5).join(" "), Faker::Lorem.sentences(500).join("\n"))
end
STDOUT.puts "Using branch: #{branch_name}, created an MR with many commits: #{@urls[:mr_with_many_commits]}"
$stdout.puts "Using branch: #{branch_name}, created an MR with many commits: #{@urls[:mr_with_many_commits]}"
end
private
......
......@@ -12,7 +12,7 @@ module QA
def run
do_run
rescue Net::ReadTimeout
STDOUT.puts 'Net::ReadTimeout during run. Trying again'
$stdout.puts 'Net::ReadTimeout during run. Trying again'
run
end
......@@ -23,7 +23,7 @@ module QA
raise ArgumentError, "Please provide GITLAB_PASSWORD" unless ENV['GITLAB_PASSWORD']
raise ArgumentError, "Please provide GITLAB_ADDRESS" unless ENV['GITLAB_ADDRESS']
STDOUT.puts 'Running...'
$stdout.puts 'Running...'
Runtime::Browser.visit(ENV['GITLAB_ADDRESS'], Page::Main::Login)
Page::Main::Login.perform(&:sign_in_using_credentials)
......
......@@ -419,7 +419,7 @@ module Trigger
raise "#{self.class.unscoped_class_name} did not succeed!"
end
STDOUT.flush
$stdout.flush
end
raise "#{self.class.unscoped_class_name} timed out after waiting for #{duration} minutes!"
......
......@@ -9,7 +9,7 @@ RSpec.describe Gitlab::FileHook do
let(:file_hook_source) do
<<~EOS
#!/usr/bin/env ruby
x = STDIN.read
x = $stdin.read
File.write('#{tmp_file.path}', x)
EOS
end
......
......@@ -206,8 +206,12 @@ RSpec.describe Gitlab::Profiler do
end
end
before do
stub_const('STDOUT', stdout)
around do |example|
original_stdout = $stdout
$stdout = stdout # rubocop: disable RSpec/ExpectOutput
example.run
$stdout = original_stdout # rubocop: disable RSpec/ExpectOutput
end
it 'prints a profile result sorted by total time' do
......
......@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Utils::Measuring do
let(:result) { "result" }
before do
allow(ActiveSupport::Logger).to receive(:logger_outputs_to?).with(Gitlab::Utils::Measuring.logger, STDOUT).and_return(false)
allow(ActiveSupport::Logger).to receive(:logger_outputs_to?).with(Gitlab::Utils::Measuring.logger, $stdout).and_return(false)
end
let(:measurement) { described_class.new(base_log_data) }
......
......@@ -15,7 +15,7 @@ module GitalySetup
default_name = ENV['CI'] ? 'DEBUG' : 'WARN'
level_name = ENV['GITLAB_TESTING_LOG_LEVEL']&.upcase
level = Logger.const_get(level_name || default_name, true) # rubocop: disable Gitlab/ConstGetInheritFalse
Logger.new(STDOUT, level: level, formatter: ->(_, _, _, msg) { msg })
Logger.new($stdout, level: level, formatter: ->(_, _, _, msg) { msg })
end
def tmp_tests_gitaly_dir
......@@ -153,7 +153,7 @@ module GitalySetup
end
LOGGER.debug "Checking gitaly-ruby bundle...\n"
out = ENV['CI'] ? STDOUT : '/dev/null'
out = ENV['CI'] ? $stdout : '/dev/null'
abort 'bundle check failed' unless system(env, 'bundle', 'check', out: out, chdir: File.dirname(gemfile))
end
......
......@@ -93,8 +93,8 @@ RSpec.describe 'gitlab:ldap:secret rake tasks' do
describe 'write' do
before do
allow(STDIN).to receive(:tty?).and_return(false)
allow(STDIN).to receive(:read).and_return('testvalue')
allow($stdin).to receive(:tty?).and_return(false)
allow($stdin).to receive(:read).and_return('testvalue')
end
it 'creates encrypted file from stdin' do
......
......@@ -13,7 +13,7 @@ RSpec.describe 'gitlab:terraform_states' do
end
before do
allow(Logger).to receive(:new).with(STDOUT).and_return(logger)
allow(Logger).to receive(:new).with($stdout).and_return(logger)
end
describe 'gitlab:terraform_states:migrate' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment