Commit 5781a496 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 4d48b3cf
Please view this file on the master branch, on stable branches it's out of date.
## 12.8.6 (2020-03-11)
- No changes.
## 12.8.5
- No changes.
......
......@@ -10,7 +10,7 @@ module ConfirmEmailWarning
protected
def show_confirm_warning?
html_request? && request.get?
html_request? && request.get? && Feature.enabled?(:soft_email_confirmation)
end
def set_confirm_warning
......
......@@ -11,6 +11,8 @@ class ConfirmationsController < Devise::ConfirmationsController
protected
def after_resending_confirmation_instructions_path_for(resource)
return users_almost_there_path unless Feature.enabled?(:soft_email_confirmation)
stored_location_for(resource) || dashboard_projects_path
end
......
......@@ -54,7 +54,7 @@ class RegistrationsController < Devise::RegistrationsController
def welcome
return redirect_to new_user_registration_path unless current_user
return redirect_to stored_location_or_dashboard(current_user) if current_user.role.present? && !current_user.setup_for_company.nil?
return redirect_to path_for_signed_in_user(current_user) if current_user.role.present? && !current_user.setup_for_company.nil?
end
def update_registration
......@@ -64,7 +64,7 @@ class RegistrationsController < Devise::RegistrationsController
if result[:status] == :success
track_experiment_event(:signup_flow, 'end') # We want this event to be tracked when the user is _in_ the experimental group
set_flash_message! :notice, :signed_up
redirect_to stored_location_or_dashboard(current_user)
redirect_to path_for_signed_in_user(current_user)
else
render :welcome
end
......@@ -111,14 +111,12 @@ class RegistrationsController < Devise::RegistrationsController
return users_sign_up_welcome_path if experiment_enabled?(:signup_flow)
stored_location_or_dashboard(user)
path_for_signed_in_user(user)
end
def after_inactive_sign_up_path_for(resource)
# With the current `allow_unconfirmed_access_for` Devise setting in config/initializers/8_devise.rb,
# this method is never called. Leaving this here in case that value is set to 0.
Gitlab::AppLogger.info(user_created_message)
users_almost_there_path
Feature.enabled?(:soft_email_confirmation) ? dashboard_projects_path : users_almost_there_path
end
private
......@@ -180,9 +178,21 @@ class RegistrationsController < Devise::RegistrationsController
Gitlab::Utils.to_boolean(params[:terms_opt_in])
end
def stored_location_or_dashboard(user)
def path_for_signed_in_user(user)
if requires_confirmation?(user)
users_almost_there_path
else
stored_location_for(user) || dashboard_projects_path
end
end
def requires_confirmation?(user)
return false if user.confirmed?
return false if Feature.enabled?(:soft_email_confirmation)
return false if experiment_enabled?(:signup_flow)
true
end
def load_recaptcha
Gitlab::Recaptcha.load_configurations!
......
......@@ -242,14 +242,6 @@ class Commit
data
end
# Discover issues should be closed when this commit is pushed to a project's
# default branch.
def closes_issues(current_user = self.committer)
return unless repository.repo_type.project?
Gitlab::ClosingIssueExtractor.new(project, current_user).closed_by_message(safe_message)
end
def lazy_author
BatchLoader.for(author_email.downcase).batch do |emails, loader|
users = User.by_any_email(emails, confirmed: true).includes(:emails)
......@@ -299,14 +291,6 @@ class Commit
notes.includes(:author, :award_emoji)
end
def merge_requests
strong_memoize(:merge_requests) do
next MergeRequest.none unless repository.repo_type.project? && project
project.merge_requests.by_commit_sha(sha)
end
end
def method_missing(method, *args, &block)
@raw.__send__(method, *args, &block) # rubocop:disable GitlabSecurity/PublicSend
end
......
......@@ -257,6 +257,10 @@ class MergeRequest < ApplicationRecord
with_state(:opened).where(auto_merge_enabled: true)
end
scope :including_metrics, -> do
includes(:metrics)
end
ignore_column :state, remove_with: '12.7', remove_after: '2019-12-22'
after_save :keep_around_commit, unless: :importing?
......
......@@ -290,6 +290,19 @@ class Note < ApplicationRecord
@commit ||= project.commit(commit_id) if commit_id.present?
end
# Notes on merge requests and commits can be traced back to one or several
# MRs. This method returns a relation if the note is for one of these types,
# or nil if it is a note on some other object.
def merge_requests
if for_commit?
project.merge_requests.by_commit_sha(commit_id)
elsif for_merge_request?
MergeRequest.id_in(noteable_id)
else
nil
end
end
# override to return commits, which are not active record
def noteable
return commit if for_commit?
......
......@@ -1787,8 +1787,10 @@ class Project < ApplicationRecord
# rubocop:enable Gitlab/RailsLogger
def after_import
repository.after_import
wiki.repository.after_import
repository.expire_content_cache
wiki.repository.expire_content_cache
DetectRepositoryLanguagesWorker.perform_async(id)
# The import assigns iid values on its own, e.g. by re-using GitHub ids.
# Flush existing InternalId records for this project for consistency reasons.
......
......@@ -437,15 +437,6 @@ class Repository
expire_all_method_caches
end
# Runs code after a repository has been forked/imported.
def after_import
expire_content_cache
return unless repo_type.project?
DetectRepositoryLanguagesWorker.perform_async(project.id)
end
# Runs code after a new commit has been pushed.
def after_push_commit(branch_name)
expire_statistics_caches
......
......@@ -1683,6 +1683,13 @@ class User < ApplicationRecord
super
end
# override from Devise::Confirmable
def confirmation_period_valid?
return false if Feature.disabled?(:soft_email_confirmation)
super
end
private
def default_private_profile_to_false
......
......@@ -52,12 +52,12 @@ module Issues
end
def store_first_mentioned_in_commit_at(issue, merge_request)
return unless Feature.enabled?(:store_first_mentioned_in_commit_on_issue_close, issue.project)
return unless Feature.enabled?(:store_first_mentioned_in_commit_on_issue_close, issue.project, default_enabled: true)
metrics = issue.metrics
return if metrics.nil? || metrics.first_mentioned_in_commit_at
first_commit_timestamp = merge_request.commits(limit: 1).first&.date
first_commit_timestamp = merge_request.commits(limit: 1).first.try(:authored_date)
return unless first_commit_timestamp
metrics.update!(first_mentioned_in_commit_at: first_commit_timestamp)
......
......@@ -19,13 +19,12 @@ class ProcessCommitWorker # rubocop:disable Scalability/IdempotentWorker
# commit_hash - Hash containing commit details to use for constructing a
# Commit object without having to use the Git repository.
# default - The data was pushed to the default branch.
# rubocop: disable CodeReuse/ActiveRecord
def perform(project_id, user_id, commit_hash, default = false)
project = Project.find_by(id: project_id)
project = Project.id_in(project_id).first
return unless project
user = User.find_by(id: user_id)
user = User.id_in(user_id).first
return unless user
......@@ -35,12 +34,11 @@ class ProcessCommitWorker # rubocop:disable Scalability/IdempotentWorker
process_commit_message(project, commit, user, author, default)
update_issue_metrics(commit, author)
end
# rubocop: enable CodeReuse/ActiveRecord
def process_commit_message(project, commit, user, author, default = false)
# Ignore closing references from GitLab-generated commit messages.
find_closing_issues = default && !commit.merged_merge_request?(user)
closed_issues = find_closing_issues ? commit.closes_issues(user) : []
closed_issues = find_closing_issues ? issues_to_close(project, commit, user) : []
close_issues(project, user, author, commit, closed_issues) if closed_issues.any?
commit.create_cross_references!(author, closed_issues)
......@@ -56,6 +54,12 @@ class ProcessCommitWorker # rubocop:disable Scalability/IdempotentWorker
end
end
def issues_to_close(project, commit, user)
Gitlab::ClosingIssueExtractor
.new(project, user)
.closed_by_message(commit.safe_message)
end
def update_issue_metrics(commit, author)
mentioned_issues = commit.all_references(author).issues
......
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'optparse'
require_relative '../lib/gitlab'
require_relative '../lib/gitlab/utils'
require_relative '../lib/gitlab/sidekiq_config/cli_methods'
require_relative '../lib/gitlab/sidekiq_cluster'
require_relative '../lib/gitlab/sidekiq_cluster/cli'
Thread.abort_on_exception = true
cli = Gitlab::SidekiqCluster::CLI.new
begin
cli.run
rescue Gitlab::SidekiqCluster::CLI::CommandError => error
abort error.message
end
---
title: Store first commit's authored_date for value stream calculation on merge
merge_request: 26885
author:
type: changed
---
title: Replace several temporary indexes with a single one to save time when running mentions migration
merge_request:
author:
type: performance
---
title: Move sidekiq-cluster script to Core
merge_request: 26703
author:
type: other
# frozen_string_literal: true
class AddTemporaryIndexForNotesWithMentions < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
INDEX_CONDITION = "note LIKE '%@%'::text"
INDEX_NAME = 'note_mentions_temp_index'
EPIC_MENTIONS_INDEX_NAME = 'epic_mentions_temp_index'
DESIGN_MENTIONS_INDEX_NAME = 'design_mentions_temp_index'
def up
# create temporary index for notes with mentions, may take well over 1h
add_concurrent_index(:notes, [:id, :noteable_type], where: INDEX_CONDITION, name: INDEX_NAME)
# cleanup previous temporary indexes, as we'll be usig the single one
remove_concurrent_index(:notes, :id, name: EPIC_MENTIONS_INDEX_NAME)
remove_concurrent_index(:notes, :id, name: DESIGN_MENTIONS_INDEX_NAME)
end
def down
remove_concurrent_index(:notes, :id, name: INDEX_NAME)
add_concurrent_index(:notes, :id, where: "#{INDEX_CONDITION} AND noteable_type='Epic'", name: EPIC_MENTIONS_INDEX_NAME)
add_concurrent_index(:notes, :id, where: "#{INDEX_CONDITION} AND noteable_type='DesignManagement::Design'", name: DESIGN_MENTIONS_INDEX_NAME)
end
end
......@@ -2840,8 +2840,7 @@ ActiveRecord::Schema.define(version: 2020_03_10_135823) do
t.index ["commit_id"], name: "index_notes_on_commit_id"
t.index ["created_at"], name: "index_notes_on_created_at"
t.index ["discussion_id"], name: "index_notes_on_discussion_id"
t.index ["id"], name: "design_mentions_temp_index", where: "((note ~~ '%@%'::text) AND ((noteable_type)::text = 'DesignManagement::Design'::text))"
t.index ["id"], name: "epic_mentions_temp_index", where: "((note ~~ '%@%'::text) AND ((noteable_type)::text = 'Epic'::text))"
t.index ["id", "noteable_type"], name: "note_mentions_temp_index", where: "(note ~~ '%@%'::text)"
t.index ["line_code"], name: "index_notes_on_line_code"
t.index ["note"], name: "index_notes_on_note_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["note"], name: "tmp_idx_on_promoted_notes", where: "(((noteable_type)::text = 'Issue'::text) AND (system IS TRUE) AND (note ~~ 'promoted to epic%'::text))"
......
......@@ -139,6 +139,8 @@ do this manually.
sudo gitlab-pg-ctl promote
```
In GitLab 12.8 and earlier, see [Message: "sudo: gitlab-pg-ctl: command not found"](../replication/troubleshooting.md#message-sudo-gitlab-pg-ctl-command-not-found).
1. Edit `/etc/gitlab/gitlab.rb` on every machine in the **secondary** to
reflect its new status as **primary** by removing any lines that enabled the
`geo_secondary_role`:
......
......@@ -514,6 +514,27 @@ or `gitlab-ctl promote-to-primary-node`, either:
bug](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22021) was
fixed.
### Message: "sudo: gitlab-pg-ctl: command not found"
When
[promoting a **secondary** node with HA](../disaster_recovery/index.md#promoting-a-secondary-node-with-ha),
you need to run the `gitlab-pg-ctl` command to promote the PostgreSQL
read-replica database.
In GitLab 12.8 and earlier, this command will fail with the message:
```plaintext
sudo: gitlab-pg-ctl: command not found
```
In this case, the workaround is to use the full path to the binary, for example:
```shell
sudo /opt/gitlab/embedded/bin/gitlab-pg-ctl promote
```
GitLab 12.9 and later are [unaffected by this error](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5147).
## Fixing Foreign Data Wrapper errors
This section documents ways to fix potential Foreign Data Wrapper errors.
......
......@@ -311,11 +311,11 @@ If you experience a problem, you should contact GitLab support. Use the command
line at your own risk.
For debugging purposes, you can start extra Sidekiq processes by using the command
`/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster`. This command
`/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster`. This command
takes arguments using the following syntax:
```shell
/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster [QUEUE,QUEUE,...] [QUEUE, ...]
/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster [QUEUE,QUEUE,...] [QUEUE, ...]
```
Each separate argument denotes a group of queues that have to be processed by a
......@@ -333,14 +333,14 @@ For example, say you want to start 2 extra processes: one to process the
done as follows:
```shell
/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster process_commit post_receive
/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster process_commit post_receive
```
If you instead want to start one process processing both queues, you'd use the
following syntax:
```shell
/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster process_commit,post_receive
/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster process_commit,post_receive
```
If you want to have one Sidekiq process dealing with the `process_commit` and
......@@ -348,7 +348,7 @@ If you want to have one Sidekiq process dealing with the `process_commit` and
you'd use the following:
```shell
/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster process_commit,post_receive gitlab_shell
/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster process_commit,post_receive gitlab_shell
```
### Monitoring the `sidekiq-cluster` command
......@@ -380,7 +380,7 @@ file is written, but this can be changed by passing the `--pidfile` option to
`sidekiq-cluster`. For example:
```shell
/opt/gitlab/embedded/service/gitlab-rails/ee/bin/sidekiq-cluster --pidfile /var/run/gitlab/sidekiq_cluster.pid process_commit
/opt/gitlab/embedded/service/gitlab-rails/bin/sidekiq-cluster --pidfile /var/run/gitlab/sidekiq_cluster.pid process_commit
```
Keep in mind that the PID file will contain the PID of the `sidekiq-cluster`
......
......@@ -5,12 +5,8 @@ type: howto
# User email confirmation at sign-up
GitLab can be configured to require confirmation of a user's email address when
the user signs up. When this setting is enabled:
- For GitLab 12.7 and earlier, the user is unable to sign in until they confirm their
email address.
- For GitLab 12.8 and later, the user [has 30 days to confirm their email address](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/31245).
After 30 days, they will be unable to log in and access GitLab features.
the user signs up. When this setting is enabled, the user is unable to sign in until
they confirm their email address.
In **Admin Area > Settings** (`/admin/application_settings/general`), go to the section
**Sign-up Restrictions** and look for the **Send confirmation email on sign-up** option.
......
......@@ -666,6 +666,10 @@ To use Auto Deploy on a Kubernetes 1.16+ cluster, you must:
This will opt-in to using a version of the PostgreSQL chart that supports Kubernetes
1.16 and higher.
CAUTION: **Caution:** Opting into `AUTO_DEVOPS_POSTGRES_CHANNEL` version `2` will delete
the version `1` PostgreSQL database. Please backup the contents of the PostgreSQL database
first before opting into version `2`, so that you can restore into the version `2` database.
#### Migrations
> [Introduced][ce-21955] in GitLab 11.4
......
......@@ -37,12 +37,7 @@ email domains to prevent malicious users from creating accounts.
## Require email confirmation
You can send confirmation emails during sign-up and require that users confirm
their email address. If this setting is selected:
- For GitLab 12.7 and earlier, the user is unable to sign in until they confirm their
email address.
- For GitLab 12.8 and later, the user [has 30 days to confirm their email address](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/31245).
After 30 days, they will be unable to log in and access GitLab features.
their email address before they are allowed to sign in.
![Email confirmation](img/email_confirmation_v12_7.png)
......
......@@ -62,6 +62,8 @@ However, users will not be prompted to log via SSO on each visit. GitLab will ch
We intend to add a similar SSO requirement for [Git and API activity](https://gitlab.com/gitlab-org/gitlab/issues/9152) in the future.
When SSO enforcement is enabled for a group, users cannot share a project in the group outside the top-level group, even if the project is forked.
#### Group-managed accounts
> [Introduced in GitLab 12.1](https://gitlab.com/groups/gitlab-org/-/epics/709).
......@@ -74,6 +76,7 @@ When this option is enabled:
- All existing and new users in the group will be required to log in via the SSO URL associated with the group.
- After the group-managed account has been created, group activity will require the use of this user account.
- Users can't share a project in the group outside the top-level group (also applies to forked projects).
Upon successful authentication, GitLab prompts the user with options, based on the email address received from the configured identity provider:
......@@ -107,6 +110,16 @@ Groups with enabled group-managed accounts can allow or disallow forking of proj
by using separate toggle. If forking is disallowed any project of given root group or its subgroups can be forked to
a subgroup of the same root group only.
##### Other restrictions for Group-managed accounts
> [Introduced in GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/issues/12420)
Projects within groups with enabled group-managed accounts are not to be shared with:
- Groups outside of the parent group
- Members who are not users managed by this group
This restriction also applies to projects forked from or to those groups.
#### Assertions
When using group-managed accounts, the following user details need to be passed to GitLab as SAML
......
# frozen_string_literal: true
module Gitlab
module SidekiqCluster
# The signals that should terminate both the master and workers.
TERMINATE_SIGNALS = %i(INT TERM).freeze
# The signals that should simply be forwarded to the workers.
FORWARD_SIGNALS = %i(TTIN USR1 USR2 HUP).freeze
# Traps the given signals and yields the block whenever these signals are
# received.
#
# The block is passed the name of the signal.
#
# Example:
#
# trap_signals(%i(HUP TERM)) do |signal|
# ...
# end
def self.trap_signals(signals)
signals.each do |signal|
trap(signal) do
yield signal
end
end
end
def self.trap_terminate(&block)
trap_signals(TERMINATE_SIGNALS, &block)
end
def self.trap_forward(&block)
trap_signals(FORWARD_SIGNALS, &block)
end
def self.signal(pid, signal)
Process.kill(signal, pid)
true
rescue Errno::ESRCH
false
end
def self.signal_processes(pids, signal)
pids.each { |pid| signal(pid, signal) }
end
# Starts Sidekiq workers for the pairs of processes.
#
# Example:
#
# start([ ['foo'], ['bar', 'baz'] ], :production)
#
# This would start two Sidekiq processes: one processing "foo", and one
# processing "bar" and "baz". Each one is placed in its own process group.
#
# queues - An Array containing Arrays. Each sub Array should specify the
# queues to use for a single process.
#
# directory - The directory of the Rails application.
#
# Returns an Array containing the PIDs of the started processes.
def self.start(queues, env: :development, directory: Dir.pwd, max_concurrency: 50, min_concurrency: 0, dryrun: false)
queues.map.with_index do |pair, index|
start_sidekiq(pair, env: env, directory: directory, max_concurrency: max_concurrency, min_concurrency: min_concurrency, worker_id: index, dryrun: dryrun)
end
end
# Starts a Sidekiq process that processes _only_ the given queues.
#
# Returns the PID of the started process.
def self.start_sidekiq(queues, env:, directory:, max_concurrency:, min_concurrency:, worker_id:, dryrun:)
counts = count_by_queue(queues)
cmd = %w[bundle exec sidekiq]
cmd << "-c #{self.concurrency(queues, min_concurrency, max_concurrency)}"
cmd << "-e#{env}"
cmd << "-gqueues: #{proc_details(counts)}"
cmd << "-r#{directory}"
counts.each do |queue, count|
cmd << "-q#{queue},#{count}"
end
if dryrun
puts "Sidekiq command: #{cmd}" # rubocop:disable Rails/Output
return
end
pid = Process.spawn(
{ 'ENABLE_SIDEKIQ_CLUSTER' => '1',
'SIDEKIQ_WORKER_ID' => worker_id.to_s },
*cmd,
pgroup: true,
err: $stderr,
out: $stdout
)
wait_async(pid)
pid
end
def self.count_by_queue(queues)
queues.each_with_object(Hash.new(0)) { |element, hash| hash[element] += 1 }
end
def self.proc_details(counts)
counts.map do |queue, count|
if count == 1
queue
else
"#{queue} (#{count})"
end
end.join(', ')
end
def self.concurrency(queues, min_concurrency, max_concurrency)
concurrency_from_queues = queues.length + 1
max = max_concurrency.positive? ? max_concurrency : concurrency_from_queues
min = [min_concurrency, max].min
concurrency_from_queues.clamp(min, max)
end
# Waits for the given process to complete using a separate thread.
def self.wait_async(pid)
Thread.new do
Process.wait(pid) rescue Errno::ECHILD
end
end
# Returns true if all the processes are alive.
def self.all_alive?(pids)
pids.each do |pid|
return false unless process_alive?(pid)
end
true
end
def self.any_alive?(pids)
pids_alive(pids).any?
end
def self.pids_alive(pids)
pids.select { |pid| process_alive?(pid) }
end
def self.process_alive?(pid)
# Signal 0 tests whether the process exists and we have access to send signals
# but is otherwise a noop (doesn't actually send a signal to the process)
signal(pid, 0)
end
def self.write_pid(path)
File.open(path, 'w') do |handle|
handle.write(Process.pid.to_s)
end
end
end
end
# frozen_string_literal: true
require 'optparse'
require 'logger'
require 'time'
module Gitlab
module SidekiqCluster
class CLI
CHECK_TERMINATE_INTERVAL_SECONDS = 1
# How long to wait in total when asking for a clean termination
# Sidekiq default to self-terminate is 25s
TERMINATE_TIMEOUT_SECONDS = 30
CommandError = Class.new(StandardError)
def initialize(log_output = STDERR)
require_relative '../../../lib/gitlab/sidekiq_logging/json_formatter'
# As recommended by https://github.com/mperham/sidekiq/wiki/Advanced-Options#concurrency
@max_concurrency = 50
@min_concurrency = 0
@environment = ENV['RAILS_ENV'] || 'development'
@pid = nil
@interval = 5
@alive = true
@processes = []
@logger = Logger.new(log_output)
@logger.formatter = ::Gitlab::SidekiqLogging::JSONFormatter.new
@rails_path = Dir.pwd
@dryrun = false
end
def run(argv = ARGV)
if argv.empty?
raise CommandError,
'You must specify at least one queue to start a worker for'
end
option_parser.parse!(argv)
all_queues = SidekiqConfig::CliMethods.all_queues(@rails_path)
queue_names = SidekiqConfig::CliMethods.worker_queues(@rails_path)
queue_groups = argv.map do |queues|
next queue_names if queues == '*'
# When using the experimental queue query syntax, we treat
# each queue group as a worker attribute query, and resolve
# the queues for the queue group using this query.
if @experimental_queue_selector
SidekiqConfig::CliMethods.query_workers(queues, all_queues)
else
SidekiqConfig::CliMethods.expand_queues(queues.split(','), queue_names)
end
end
if @negate_queues
queue_groups.map! { |queues| queue_names - queues }
end
if queue_groups.all?(&:empty?)
raise CommandError,
'No queues found, you must select at least one queue'
end
@logger.info("Starting cluster with #{queue_groups.length} processes")
@processes = SidekiqCluster.start(
queue_groups,
env: @environment,
directory: @rails_path,
max_concurrency: @max_concurrency,
min_concurrency: @min_concurrency,
dryrun: @dryrun
)
return if @dryrun
write_pid
trap_signals
start_loop
end
def write_pid
SidekiqCluster.write_pid(@pid) if @pid
end
def monotonic_time
Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_second)
end
def continue_waiting?(deadline)
SidekiqCluster.any_alive?(@processes) && monotonic_time < deadline
end
def hard_stop_stuck_pids
SidekiqCluster.signal_processes(SidekiqCluster.pids_alive(@processes), :KILL)
end
def wait_for_termination
deadline = monotonic_time + TERMINATE_TIMEOUT_SECONDS
sleep(CHECK_TERMINATE_INTERVAL_SECONDS) while continue_waiting?(deadline)
hard_stop_stuck_pids
end
def trap_signals
SidekiqCluster.trap_terminate do |signal|
@alive = false
SidekiqCluster.signal_processes(@processes, signal)
wait_for_termination
end
SidekiqCluster.trap_forward do |signal|
SidekiqCluster.signal_processes(@processes, signal)
end
end
def start_loop
while @alive
sleep(@interval)
unless SidekiqCluster.all_alive?(@processes)
# If a child process died we'll just terminate the whole cluster. It's up to
# runit and such to then restart the cluster.
@logger.info('A worker terminated, shutting down the cluster')
SidekiqCluster.signal_processes(@processes, :TERM)
break
end
end
end
def option_parser
OptionParser.new do |opt|
opt.banner = "#{File.basename(__FILE__)} [QUEUE,QUEUE] [QUEUE] ... [OPTIONS]"
opt.separator "\nOptions:\n"
opt.on('-h', '--help', 'Shows this help message') do
abort opt.to_s
end
opt.on('-m', '--max-concurrency INT', 'Maximum threads to use with Sidekiq (default: 50, 0 to disable)') do |int|
@max_concurrency = int.to_i
end
opt.on('--min-concurrency INT', 'Minimum threads to use with Sidekiq (default: 0)') do |int|
@min_concurrency = int.to_i
end
opt.on('-e', '--environment ENV', 'The application environment') do |env|
@environment = env
end
opt.on('-P', '--pidfile PATH', 'Path to the PID file') do |pid|
@pid = pid
end
opt.on('-r', '--require PATH', 'Location of the Rails application') do |path|
@rails_path = path
end
opt.on('--experimental-queue-selector', 'EXPERIMENTAL: Run workers based on the provided selector') do |experimental_queue_selector|
@experimental_queue_selector = experimental_queue_selector
end
opt.on('-n', '--negate', 'Run workers for all queues in sidekiq_queues.yml except the given ones') do
@negate_queues = true
end
opt.on('-i', '--interval INT', 'The number of seconds to wait between worker checks') do |int|
@interval = int.to_i
end
opt.on('-d', '--dryrun', 'Print commands that would be run without this flag, and quit') do |int|
@dryrun = true
end
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe 'bin/sidekiq-cluster' do
using RSpec::Parameterized::TableSyntax
context 'when selecting some queues and excluding others' do
where(:args, :included, :excluded) do
%w[--negate cronjob] | '-qdefault,1' | '-qcronjob,1'
%w[--experimental-queue-selector resource_boundary=cpu] | '-qupdate_merge_requests,1' | '-qdefault,1'
end
with_them do
it 'runs successfully', :aggregate_failures do
cmd = %w[bin/sidekiq-cluster --dryrun] + args
output, status = Gitlab::Popen.popen(cmd, Rails.root.to_s)
expect(status).to be(0)
expect(output).to include('"bundle", "exec", "sidekiq"')
expect(output).to include(included)
expect(output).not_to include(excluded)
end
end
end
context 'when selecting all queues' do
[
%w[*],
%w[--experimental-queue-selector *]
].each do |args|
it "runs successfully with `#{args}`", :aggregate_failures do
cmd = %w[bin/sidekiq-cluster --dryrun] + args
output, status = Gitlab::Popen.popen(cmd, Rails.root.to_s)
expect(status).to be(0)
expect(output).to include('"bundle", "exec", "sidekiq"')
expect(output).to include('-qdefault,1')
expect(output).to include('-qcronjob:ci_archive_traces_cron,1')
end
end
end
end
......@@ -3,6 +3,10 @@
require 'spec_helper'
describe ConfirmEmailWarning do
before do
stub_feature_flags(soft_email_confirmation: true)
end
controller(ApplicationController) do
# `described_class` is not available in this context
include ConfirmEmailWarning
......
......@@ -79,29 +79,31 @@ describe RegistrationsController do
stub_application_setting(send_user_confirmation_email: true)
end
context 'when a grace period is active for confirming the email address' do
context 'when soft email confirmation is not enabled' do
before do
allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
stub_feature_flags(soft_email_confirmation: false)
allow(User).to receive(:allow_unconfirmed_access_for).and_return 0
end
it 'sends a confirmation email and redirects to the dashboard' do
it 'does not authenticate the user and sends a confirmation email' do
post(:create, params: user_params)
expect(ActionMailer::Base.deliveries.last.to.first).to eq(user_params[:user][:email])
expect(response).to redirect_to(dashboard_projects_path)
expect(subject.current_user).to be_nil
end
end
context 'when no grace period is active for confirming the email address' do
context 'when soft email confirmation is enabled' do
before do
allow(User).to receive(:allow_unconfirmed_access_for).and_return 0
stub_feature_flags(soft_email_confirmation: true)
allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
end
it 'sends a confirmation email and redirects to the almost there page' do
it 'authenticates the user and sends a confirmation email' do
post(:create, params: user_params)
expect(ActionMailer::Base.deliveries.last.to.first).to eq(user_params[:user][:email])
expect(response).to redirect_to(users_almost_there_path)
expect(response).to redirect_to(dashboard_projects_path)
end
end
end
......
......@@ -135,7 +135,9 @@ describe 'Invites' do
expect(current_path).to eq(dashboard_projects_path)
expect(page).to have_content(project.full_name)
visit group_path(group)
expect(page).to have_content(group.full_name)
end
......@@ -153,6 +155,25 @@ describe 'Invites' do
context 'email confirmation enabled' do
let(:send_email_confirmation) { true }
context 'when soft email confirmation is not enabled' do
before do
allow(User).to receive(:allow_unconfirmed_access_for).and_return 0
end
it 'signs up and redirects to root page with all the project/groups invitation automatically accepted' do
fill_in_sign_up_form(new_user)
confirm_email(new_user)
fill_in_sign_in_form(new_user)
expect(current_path).to eq(root_path)
expect(page).to have_content(project.full_name)
visit group_path(group)
expect(page).to have_content(group.full_name)
end
end
context 'when soft email confirmation is enabled' do
before do
allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
......@@ -164,7 +185,9 @@ describe 'Invites' do
expect(current_path).to eq(root_path)
expect(page).to have_content(project.full_name)
visit group_path(group)
expect(page).to have_content(group.full_name)
end
end
......@@ -180,7 +203,24 @@ describe 'Invites' do
context 'the user sign-up using a different email address' do
let(:invite_email) { build_stubbed(:user).email }
context 'when soft email confirmation is not enabled' do
before do
stub_feature_flags(soft_email_confirmation: false)
allow(User).to receive(:allow_unconfirmed_access_for).and_return 0
end
it 'signs up and redirects to the invitation page' do
fill_in_sign_up_form(new_user)
confirm_email(new_user)
fill_in_sign_in_form(new_user)
expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
end
end
context 'when soft email confirmation is enabled' do
before do
stub_feature_flags(soft_email_confirmation: true)
allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
end
......@@ -192,4 +232,5 @@ describe 'Invites' do
end
end
end
end
end
......@@ -61,10 +61,6 @@ describe 'Merge request > User sees diff', :js do
let(:merge_request) { create(:merge_request_with_diffs, source_project: forked_project, target_project: project, author: author_user) }
let(:changelog_id) { Digest::SHA1.hexdigest("CHANGELOG") }
before do
forked_project.repository.after_import
end
context 'as author' do
it 'shows direct edit link', :sidekiq_might_not_need_inline do
sign_in(author_user)
......
......@@ -797,6 +797,7 @@ describe 'Login' do
before do
stub_application_setting(send_user_confirmation_email: true)
stub_feature_flags(soft_email_confirmation: true)
allow(User).to receive(:allow_unconfirmed_access_for).and_return grace_period
end
......
......@@ -129,6 +129,39 @@ shared_examples 'Signup' do
stub_application_setting(send_user_confirmation_email: true)
end
context 'when soft email confirmation is not enabled' do
before do
stub_feature_flags(soft_email_confirmation: false)
end
it 'creates the user account and sends a confirmation email' do
visit new_user_registration_path
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
if Gitlab::Experimentation.enabled?(:signup_flow)
fill_in 'new_user_first_name', with: new_user.first_name
fill_in 'new_user_last_name', with: new_user.last_name
else
fill_in 'new_user_name', with: new_user.name
fill_in 'new_user_email_confirmation', with: new_user.email
end
fill_in 'new_user_password', with: new_user.password
expect { click_button 'Register' }.to change { User.count }.by(1)
expect(current_path).to eq users_almost_there_path
expect(page).to have_content('Please check your email to confirm your account')
end
end
context 'when soft email confirmation is enabled' do
before do
stub_feature_flags(soft_email_confirmation: true)
end
it 'creates the user account and sends a confirmation email' do
visit new_user_registration_path
......@@ -155,6 +188,7 @@ shared_examples 'Signup' do
end
end
end
end
context "when sigining up with different cased emails" do
it "creates the user successfully" do
......
# frozen_string_literal: true
require 'fast_spec_helper'
require 'rspec-parameterized'
describe Gitlab::SidekiqCluster::CLI do
let(:cli) { described_class.new('/dev/null') }
let(:default_options) do
{ env: 'test', directory: Dir.pwd, max_concurrency: 50, min_concurrency: 0, dryrun: false }
end
before do
stub_env('RAILS_ENV', 'test')
end
describe '#run' do
context 'without any arguments' do
it 'raises CommandError' do
expect { cli.run([]) }.to raise_error(described_class::CommandError)
end
end
context 'with arguments' do
before do
allow(cli).to receive(:write_pid)
allow(cli).to receive(:trap_signals)
allow(cli).to receive(:start_loop)
end
it 'starts the Sidekiq workers' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options)
.and_return([])
cli.run(%w(foo))
end
it 'allows the special * selector' do
expect(Gitlab::SidekiqCluster)
.to receive(:start).with(
[Gitlab::SidekiqConfig::CliMethods.worker_queues],
default_options
)
cli.run(%w(*))
end
context 'with --negate flag' do
it 'starts Sidekiq workers for all queues in all_queues.yml except the ones in argv' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['baz'])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['baz']], default_options)
.and_return([])
cli.run(%w(foo -n))
end
end
context 'with --max-concurrency flag' do
it 'starts Sidekiq workers for specified queues with a max concurrency' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz))
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([%w(foo bar baz), %w(solo)], default_options.merge(max_concurrency: 2))
.and_return([])
cli.run(%w(foo,bar,baz solo -m 2))
end
end
context 'with --min-concurrency flag' do
it 'starts Sidekiq workers for specified queues with a min concurrency' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz))
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([%w(foo bar baz), %w(solo)], default_options.merge(min_concurrency: 2))
.and_return([])
cli.run(%w(foo,bar,baz solo --min-concurrency 2))
end
end
context 'queue namespace expansion' do
it 'starts Sidekiq workers for all queues in all_queues.yml with a namespace in argv' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['cronjob:foo', 'cronjob:bar'])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['cronjob', 'cronjob:foo', 'cronjob:bar']], default_options)
.and_return([])
cli.run(%w(cronjob))
end
end
context 'with --experimental-queue-selector' do
where do
{
'memory-bound queues' => {
query: 'resource_boundary=memory',
included_queues: %w(project_export),
excluded_queues: %w(merge)
},
'memory- or CPU-bound queues' => {
query: 'resource_boundary=memory,cpu',
included_queues: %w(auto_merge:auto_merge_process project_export),
excluded_queues: %w(merge)
},
'high urgency CI queues' => {
query: 'feature_category=continuous_integration&urgency=high',
included_queues: %w(pipeline_cache:expire_job_cache pipeline_cache:expire_pipeline_cache),
excluded_queues: %w(merge)
},
'CPU-bound high urgency CI queues' => {
query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu',
included_queues: %w(pipeline_cache:expire_pipeline_cache),
excluded_queues: %w(pipeline_cache:expire_job_cache merge)
},
'CPU-bound high urgency non-CI queues' => {
query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu',
included_queues: %w(new_issue),
excluded_queues: %w(pipeline_cache:expire_pipeline_cache)
},
'CI and SCM queues' => {
query: 'feature_category=continuous_integration|feature_category=source_code_management',
included_queues: %w(pipeline_cache:expire_job_cache merge),
excluded_queues: %w(mailers)
}
}
end
with_them do
it 'expands queues by attributes' do
expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
expect(opts).to eq(default_options)
expect(queues.first).to include(*included_queues)
expect(queues.first).not_to include(*excluded_queues)
[]
end
cli.run(%W(--experimental-queue-selector #{query}))
end
it 'works when negated' do
expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
expect(opts).to eq(default_options)
expect(queues.first).not_to include(*included_queues)
expect(queues.first).to include(*excluded_queues)
[]
end
cli.run(%W(--negate --experimental-queue-selector #{query}))
end
end
it 'expands multiple queue groups correctly' do
expect(Gitlab::SidekiqCluster)
.to receive(:start)
.with([['chat_notification'], ['project_export']], default_options)
.and_return([])
cli.run(%w(--experimental-queue-selector feature_category=chatops&urgency=high resource_boundary=memory&feature_category=importers))
end
it 'allows the special * selector' do
expect(Gitlab::SidekiqCluster)
.to receive(:start).with(
[Gitlab::SidekiqConfig::CliMethods.worker_queues],
default_options
)
cli.run(%w(--experimental-queue-selector *))
end
it 'errors when the selector matches no queues' do
expect(Gitlab::SidekiqCluster).not_to receive(:start)
expect { cli.run(%w(--experimental-queue-selector has_external_dependencies=true&has_external_dependencies=false)) }
.to raise_error(described_class::CommandError)
end
it 'errors on an invalid query multiple queue groups correctly' do
expect(Gitlab::SidekiqCluster).not_to receive(:start)
expect { cli.run(%w(--experimental-queue-selector unknown_field=chatops)) }
.to raise_error(Gitlab::SidekiqConfig::CliMethods::QueryError)
end
end
end
end
describe '#write_pid' do
context 'when a PID is specified' do
it 'writes the PID to a file' do
expect(Gitlab::SidekiqCluster).to receive(:write_pid).with('/dev/null')
cli.option_parser.parse!(%w(-P /dev/null))
cli.write_pid
end
end
context 'when no PID is specified' do
it 'does not write a PID' do
expect(Gitlab::SidekiqCluster).not_to receive(:write_pid)
cli.write_pid
end
end
end
describe '#wait_for_termination' do
it 'waits for termination of all sub-processes and succeeds after 3 checks' do
expect(Gitlab::SidekiqCluster).to receive(:any_alive?)
.with(an_instance_of(Array)).and_return(true, true, true, false)
expect(Gitlab::SidekiqCluster).to receive(:pids_alive)
.with([]).and_return([])
expect(Gitlab::SidekiqCluster).to receive(:signal_processes)
.with([], :KILL)
stub_const("Gitlab::SidekiqCluster::CLI::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1)
stub_const("Gitlab::SidekiqCluster::CLI::TERMINATE_TIMEOUT_SECONDS", 1)
cli.wait_for_termination
end
context 'with hanging workers' do
before do
expect(cli).to receive(:write_pid)
expect(cli).to receive(:trap_signals)
expect(cli).to receive(:start_loop)
end
it 'hard kills workers after timeout expires' do
worker_pids = [101, 102, 103]
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options)
.and_return(worker_pids)
expect(Gitlab::SidekiqCluster).to receive(:any_alive?)
.with(worker_pids).and_return(true).at_least(10).times
expect(Gitlab::SidekiqCluster).to receive(:pids_alive)
.with(worker_pids).and_return([102])
expect(Gitlab::SidekiqCluster).to receive(:signal_processes)
.with([102], :KILL)
cli.run(%w(foo))
stub_const("Gitlab::SidekiqCluster::CLI::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1)
stub_const("Gitlab::SidekiqCluster::CLI::TERMINATE_TIMEOUT_SECONDS", 1)
cli.wait_for_termination
end
end
end
describe '#trap_signals' do
it 'traps the termination and forwarding signals' do
expect(Gitlab::SidekiqCluster).to receive(:trap_terminate)
expect(Gitlab::SidekiqCluster).to receive(:trap_forward)
cli.trap_signals
end
end
describe '#start_loop' do
it 'runs until one of the processes has been terminated' do
allow(cli).to receive(:sleep).with(a_kind_of(Numeric))
expect(Gitlab::SidekiqCluster).to receive(:all_alive?)
.with(an_instance_of(Array)).and_return(false)
expect(Gitlab::SidekiqCluster).to receive(:signal_processes)
.with(an_instance_of(Array), :TERM)
cli.start_loop
end
end
end
# frozen_string_literal: true
require 'fast_spec_helper'
require 'rspec-parameterized'
describe Gitlab::SidekiqCluster do
describe '.trap_signals' do
it 'traps the given signals' do
expect(described_class).to receive(:trap).ordered.with(:INT)
expect(described_class).to receive(:trap).ordered.with(:HUP)
described_class.trap_signals(%i(INT HUP))
end
end
describe '.trap_terminate' do
it 'traps the termination signals' do
expect(described_class).to receive(:trap_signals)
.with(described_class::TERMINATE_SIGNALS)
described_class.trap_terminate { }
end
end
describe '.trap_forward' do
it 'traps the signals to forward' do
expect(described_class).to receive(:trap_signals)
.with(described_class::FORWARD_SIGNALS)
described_class.trap_forward { }
end
end
describe '.signal' do
it 'sends a signal to the given process' do
allow(Process).to receive(:kill).with(:INT, 4)
expect(described_class.signal(4, :INT)).to eq(true)
end
it 'returns false when the process does not exist' do
allow(Process).to receive(:kill).with(:INT, 4).and_raise(Errno::ESRCH)
expect(described_class.signal(4, :INT)).to eq(false)
end
end
describe '.signal_processes' do
it 'sends a signal to every thread' do
expect(described_class).to receive(:signal).with(1, :INT)
described_class.signal_processes([1], :INT)
end
end
describe '.start' do
it 'starts Sidekiq with the given queues, environment and options' do
expected_options = {
env: :production,
directory: 'foo/bar',
max_concurrency: 20,
min_concurrency: 10,
dryrun: true
}
expect(described_class).to receive(:start_sidekiq).ordered.with(%w(foo), expected_options.merge(worker_id: 0))
expect(described_class).to receive(:start_sidekiq).ordered.with(%w(bar baz), expected_options.merge(worker_id: 1))
described_class.start([%w(foo), %w(bar baz)], env: :production, directory: 'foo/bar', max_concurrency: 20, min_concurrency: 10, dryrun: true)
end
it 'starts Sidekiq with the given queues and sensible default options' do
expected_options = {
env: :development,
directory: an_instance_of(String),
max_concurrency: 50,
min_concurrency: 0,
worker_id: an_instance_of(Integer),
dryrun: false
}
expect(described_class).to receive(:start_sidekiq).ordered.with(%w(foo bar baz), expected_options)
expect(described_class).to receive(:start_sidekiq).ordered.with(%w(solo), expected_options)
described_class.start([%w(foo bar baz), %w(solo)])
end
end
describe '.start_sidekiq' do
let(:first_worker_id) { 0 }
let(:options) do
{ env: :production, directory: 'foo/bar', max_concurrency: 20, min_concurrency: 0, worker_id: first_worker_id, dryrun: false }
end
let(:env) { { "ENABLE_SIDEKIQ_CLUSTER" => "1", "SIDEKIQ_WORKER_ID" => first_worker_id.to_s } }
let(:args) { ['bundle', 'exec', 'sidekiq', anything, '-eproduction', *([anything] * 5)] }
it 'starts a Sidekiq process' do
allow(Process).to receive(:spawn).and_return(1)
expect(described_class).to receive(:wait_async).with(1)
expect(described_class.start_sidekiq(%w(foo), options)).to eq(1)
end
it 'handles duplicate queue names' do
allow(Process)
.to receive(:spawn)
.with(env, *args, anything)
.and_return(1)
expect(described_class).to receive(:wait_async).with(1)
expect(described_class.start_sidekiq(%w(foo foo bar baz), options)).to eq(1)
end
it 'runs the sidekiq process in a new process group' do
expect(Process)
.to receive(:spawn)
.with(anything, *args, a_hash_including(pgroup: true))
.and_return(1)
allow(described_class).to receive(:wait_async)
expect(described_class.start_sidekiq(%w(foo bar baz), options)).to eq(1)
end
end
describe '.concurrency' do
using RSpec::Parameterized::TableSyntax
where(:queue_count, :min, :max, :expected) do
2 | 0 | 0 | 3 # No min or max specified
2 | 0 | 9 | 3 # No min specified, value < max
2 | 1 | 4 | 3 # Value between min and max
2 | 4 | 5 | 4 # Value below range
5 | 2 | 3 | 3 # Value above range
2 | 1 | 1 | 1 # Value above explicit setting (min == max)
0 | 3 | 3 | 3 # Value below explicit setting (min == max)
1 | 4 | 3 | 3 # Min greater than max
end
with_them do
let(:queues) { Array.new(queue_count) }
it { expect(described_class.concurrency(queues, min, max)).to eq(expected) }
end
end
describe '.wait_async' do
it 'waits for a process in a separate thread' do
thread = described_class.wait_async(Process.spawn('true'))
# Upon success Process.wait just returns the PID.
expect(thread.value).to be_a_kind_of(Numeric)
end
end
# In the X_alive? checks, we check negative PIDs sometimes as a simple way
# to be sure the pids are definitely for non-existent processes.
# Note that -1 is special, and sends the signal to every process we have permission
# for, so we use -2, -3 etc
describe '.all_alive?' do
it 'returns true if all processes are alive' do
processes = [Process.pid]
expect(described_class.all_alive?(processes)).to eq(true)
end
it 'returns false when a thread was not alive' do
processes = [-2]
expect(described_class.all_alive?(processes)).to eq(false)
end
end
describe '.any_alive?' do
it 'returns true if at least one process is alive' do
processes = [Process.pid, -2]
expect(described_class.any_alive?(processes)).to eq(true)
end
it 'returns false when all threads are dead' do
processes = [-2, -3]
expect(described_class.any_alive?(processes)).to eq(false)
end
end
describe '.write_pid' do
it 'writes the PID of the current process to the given file' do
handle = double(:handle)
allow(File).to receive(:open).with('/dev/null', 'w').and_yield(handle)
expect(handle).to receive(:write).with(Process.pid.to_s)
described_class.write_pid('/dev/null')
end
end
end
......@@ -444,61 +444,6 @@ eos
it { is_expected.to respond_to(:id) }
end
describe '#closes_issues' do
let(:issue) { create :issue, project: project }
let(:other_project) { create(:project, :public) }
let(:other_issue) { create :issue, project: other_project }
let(:committer) { create :user }
before do
project.add_developer(committer)
other_project.add_developer(committer)
end
it 'detects issues that this commit is marked as closing' do
ext_ref = "#{other_project.full_path}##{other_issue.iid}"
allow(commit).to receive_messages(
safe_message: "Fixes ##{issue.iid} and #{ext_ref}",
committer_email: committer.email
)
expect(commit.closes_issues).to include(issue)
expect(commit.closes_issues).to include(other_issue)
end
it 'ignores referenced issues when auto-close is disabled' do
project.update!(autoclose_referenced_issues: false)
allow(commit).to receive_messages(
safe_message: "Fixes ##{issue.iid}",
committer_email: committer.email
)
expect(commit.closes_issues).to be_empty
end
context 'with personal snippet' do
let(:commit) { personal_snippet.commit }
it 'does not call Gitlab::ClosingIssueExtractor' do
expect(Gitlab::ClosingIssueExtractor).not_to receive(:new)
commit.closes_issues
end
end
context 'with project snippet' do
let(:commit) { project_snippet.commit }
it 'does not call Gitlab::ClosingIssueExtractor' do
expect(Gitlab::ClosingIssueExtractor).not_to receive(:new)
commit.closes_issues
end
end
end
it_behaves_like 'a mentionable' do
subject { create(:project, :repository).commit }
......@@ -775,32 +720,6 @@ eos
end
end
describe '#merge_requests' do
let!(:project) { create(:project, :repository) }
let!(:merge_request1) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'feature') }
let!(:merge_request2) { create(:merge_request, source_project: project, source_branch: 'merged-target', target_branch: 'feature') }
let(:commit1) { merge_request1.merge_request_diff.commits.last }
let(:commit2) { merge_request1.merge_request_diff.commits.first }
it 'returns merge_requests that introduced that commit' do
expect(commit1.merge_requests).to contain_exactly(merge_request1, merge_request2)
expect(commit2.merge_requests).to contain_exactly(merge_request1)
end
context 'with personal snippet' do
it 'returns empty relation' do
expect(personal_snippet.repository.commit.merge_requests).to eq MergeRequest.none
end
end
context 'with project snippet' do
it 'returns empty relation' do
expect(project_snippet.project).not_to receive(:merge_requests)
expect(project_snippet.repository.commit.merge_requests).to eq MergeRequest.none
end
end
end
describe 'signed commits' do
let(:gpg_signed_commit) { project.commit_by(oid: '0b4bc9a49b562e85de7cc9e834518ea6828729b9') }
let(:x509_signed_commit) { project.commit_by(oid: '189a6c924013fc3fe40d6f1ec1dc20214183bc97') }
......
......@@ -23,8 +23,7 @@ describe ProjectImportState, type: :model do
# Works around https://github.com/rspec/rspec-mocks/issues/910
allow(Project).to receive(:find).with(project.id).and_return(project)
expect(project.repository).to receive(:after_import).and_call_original
expect(project.wiki.repository).to receive(:after_import).and_call_original
expect(project).to receive(:after_import).and_call_original
end
it 'imports a project', :sidekiq_might_not_need_inline do
......
......@@ -4618,13 +4618,14 @@ describe Project do
let(:import_state) { create(:import_state, project: project) }
it 'runs the correct hooks' do
expect(project.repository).to receive(:after_import)
expect(project.wiki.repository).to receive(:after_import)
expect(project.repository).to receive(:expire_content_cache)
expect(project.wiki.repository).to receive(:expire_content_cache)
expect(import_state).to receive(:finish)
expect(project).to receive(:update_project_counter_caches)
expect(project).to receive(:after_create_default_branch)
expect(project).to receive(:refresh_markdown_cache!)
expect(InternalId).to receive(:flush_records!).with(project: project)
expect(DetectRepositoryLanguagesWorker).to receive(:perform_async).with(project.id)
project.after_import
end
......
......@@ -1929,32 +1929,6 @@ describe Repository do
end
end
describe '#after_import' do
subject { repository.after_import }
it 'flushes and builds the cache' do
expect(repository).to receive(:expire_content_cache)
subject
end
it 'calls DetectRepositoryLanguagesWorker' do
expect(DetectRepositoryLanguagesWorker).to receive(:perform_async)
subject
end
context 'with a wiki repository' do
let(:repository) { project.wiki.repository }
it 'does not call DetectRepositoryLanguagesWorker' do
expect(DetectRepositoryLanguagesWorker).not_to receive(:perform_async)
subject
end
end
end
describe '#after_push_commit' do
it 'expires statistics caches' do
expect(repository).to receive(:expire_statistics_caches)
......
......@@ -103,8 +103,8 @@ describe Issues::CloseService do
subject { described_class.new(project, user).close_issue(issue, closed_via: closing_merge_request) }
context 'when `metrics.first_mentioned_in_commit_at` is not set' do
it 'uses the first commit timestamp' do
expected = closing_merge_request.commits.first.date
it 'uses the first commit authored timestamp' do
expected = closing_merge_request.commits.first.authored_date
subject
......
......@@ -118,7 +118,7 @@ describe MergeRequests::MergeService do
it 'closes GitLab issue tracker issues' do
issue = create :issue, project: project
commit = double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.now)
commit = instance_double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.now, authored_date: Time.now)
allow(merge_request).to receive(:commits).and_return([commit])
merge_request.cache_merge_request_closes_issues!
......
......@@ -59,7 +59,7 @@ module ProjectForksHelper
bare_repo: TestEnv.forked_repo_path_bare,
refs: TestEnv::FORKED_BRANCH_SHA
)
forked_project.repository.after_import
forked_project.repository.expire_content_cache
forked_project
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment