Commit 5366964a authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 733befe9
...@@ -87,7 +87,7 @@ gem 'grape-entity', '~> 0.7.1' ...@@ -87,7 +87,7 @@ gem 'grape-entity', '~> 0.7.1'
gem 'rack-cors', '~> 1.0.6', require: 'rack/cors' gem 'rack-cors', '~> 1.0.6', require: 'rack/cors'
# GraphQL API # GraphQL API
gem 'graphql', '~> 1.9.11' gem 'graphql', '~> 1.9.12'
# NOTE: graphiql-rails v1.5+ doesn't work: https://gitlab.com/gitlab-org/gitlab/issues/31771 # NOTE: graphiql-rails v1.5+ doesn't work: https://gitlab.com/gitlab-org/gitlab/issues/31771
# TODO: remove app/views/graphiql/rails/editors/show.html.erb when https://github.com/rmosolgo/graphiql-rails/pull/71 is released: # TODO: remove app/views/graphiql/rails/editors/show.html.erb when https://github.com/rmosolgo/graphiql-rails/pull/71 is released:
# https://gitlab.com/gitlab-org/gitlab/issues/31747 # https://gitlab.com/gitlab-org/gitlab/issues/31747
......
...@@ -455,7 +455,7 @@ GEM ...@@ -455,7 +455,7 @@ GEM
graphiql-rails (1.4.10) graphiql-rails (1.4.10)
railties railties
sprockets-rails sprockets-rails
graphql (1.9.11) graphql (1.9.12)
graphql-docs (1.6.0) graphql-docs (1.6.0)
commonmarker (~> 0.16) commonmarker (~> 0.16)
escape_utils (~> 1.2) escape_utils (~> 1.2)
...@@ -1252,7 +1252,7 @@ DEPENDENCIES ...@@ -1252,7 +1252,7 @@ DEPENDENCIES
grape-path-helpers (~> 1.2) grape-path-helpers (~> 1.2)
grape_logging (~> 1.7) grape_logging (~> 1.7)
graphiql-rails (~> 1.4.10) graphiql-rails (~> 1.4.10)
graphql (~> 1.9.11) graphql (~> 1.9.12)
graphql-docs (~> 1.6.0) graphql-docs (~> 1.6.0)
grpc (~> 1.24.0) grpc (~> 1.24.0)
gssapi gssapi
......
...@@ -26,7 +26,7 @@ export default class NotificationsForm { ...@@ -26,7 +26,7 @@ export default class NotificationsForm {
.addClass('is-loading') .addClass('is-loading')
.find('.custom-notification-event-loading') .find('.custom-notification-event-loading')
.removeClass('fa-check') .removeClass('fa-check')
.addClass('fa-spin fa-spinner') .addClass('spinner align-middle')
.removeClass('is-done'); .removeClass('is-done');
} }
...@@ -41,12 +41,12 @@ export default class NotificationsForm { ...@@ -41,12 +41,12 @@ export default class NotificationsForm {
if (data.saved) { if (data.saved) {
$parent $parent
.find('.custom-notification-event-loading') .find('.custom-notification-event-loading')
.toggleClass('fa-spin fa-spinner fa-check is-done'); .toggleClass('spinner fa-check is-done align-middle');
setTimeout(() => { setTimeout(() => {
$parent $parent
.removeClass('is-loading') .removeClass('is-loading')
.find('.custom-notification-event-loading') .find('.custom-notification-event-loading')
.toggleClass('fa-spin fa-spinner fa-check is-done'); .toggleClass('spinner fa-check is-done align-middle');
}, 2000); }, 2000);
} }
}) })
......
...@@ -818,7 +818,7 @@ module Ci ...@@ -818,7 +818,7 @@ module Ci
depended_jobs = depends_on_builds depended_jobs = depends_on_builds
# find all jobs that are needed # find all jobs that are needed
if Feature.enabled?(:ci_dag_support, project, default_enabled: true) && needs.exists? if Feature.enabled?(:ci_dag_support, project, default_enabled: true) && scheduling_type_dag?
depended_jobs = depended_jobs.where(name: needs.artifacts.select(:name)) depended_jobs = depended_jobs.where(name: needs.artifacts.select(:name))
end end
......
...@@ -12,6 +12,18 @@ module Ci ...@@ -12,6 +12,18 @@ module Ci
scope :preload_needs, -> { preload(:needs) } scope :preload_needs, -> { preload(:needs) }
scope :with_needs, -> (names = nil) do
needs = Ci::BuildNeed.scoped_build.select(1)
needs = needs.where(name: names) if names
where('EXISTS (?)', needs).preload(:needs)
end
scope :without_needs, -> (names = nil) do
needs = Ci::BuildNeed.scoped_build.select(1)
needs = needs.where(name: names) if names
where('NOT EXISTS (?)', needs)
end
def self.select_with_aggregated_needs(project) def self.select_with_aggregated_needs(project)
return all unless Feature.enabled?(:ci_dag_support, project, default_enabled: true) return all unless Feature.enabled?(:ci_dag_support, project, default_enabled: true)
...@@ -26,6 +38,18 @@ module Ci ...@@ -26,6 +38,18 @@ module Ci
) )
end end
# Old processables may have scheduling_type as nil,
# so we need to ensure the data exists before using it.
def self.populate_scheduling_type!
needs = Ci::BuildNeed.scoped_build.select(1)
where(scheduling_type: nil).update_all(
"scheduling_type = CASE WHEN (EXISTS (#{needs.to_sql}))
THEN #{scheduling_types[:dag]}
ELSE #{scheduling_types[:stage]}
END"
)
end
validates :type, presence: true validates :type, presence: true
validates :scheduling_type, presence: true, on: :create, if: :validate_scheduling_type? validates :scheduling_type, presence: true, on: :create, if: :validate_scheduling_type?
...@@ -53,6 +77,11 @@ module Ci ...@@ -53,6 +77,11 @@ module Ci
raise NotImplementedError raise NotImplementedError
end end
# Overriding scheduling_type enum's method for nil `scheduling_type`s
def scheduling_type_dag?
super || find_legacy_scheduling_type == :dag
end
# scheduling_type column of previous builds/bridges have not been populated, # scheduling_type column of previous builds/bridges have not been populated,
# so we calculate this value on runtime when we need it. # so we calculate this value on runtime when we need it.
def find_legacy_scheduling_type def find_legacy_scheduling_type
......
...@@ -62,18 +62,6 @@ class CommitStatus < ApplicationRecord ...@@ -62,18 +62,6 @@ class CommitStatus < ApplicationRecord
preload(project: :namespace) preload(project: :namespace)
end end
scope :with_needs, -> (names = nil) do
needs = Ci::BuildNeed.scoped_build.select(1)
needs = needs.where(name: names) if names
where('EXISTS (?)', needs).preload(:needs)
end
scope :without_needs, -> (names = nil) do
needs = Ci::BuildNeed.scoped_build.select(1)
needs = needs.where(name: names) if names
where('NOT EXISTS (?)', needs)
end
scope :match_id_and_lock_version, -> (slice) do scope :match_id_and_lock_version, -> (slice) do
# it expects that items are an array of attributes to match # it expects that items are an array of attributes to match
# each hash needs to have `id` and `lock_version` # each hash needs to have `id` and `lock_version`
......
...@@ -6,9 +6,9 @@ class YoutrackService < IssueTrackerService ...@@ -6,9 +6,9 @@ class YoutrackService < IssueTrackerService
# {PROJECT-KEY}-{NUMBER} Examples: YT-1, PRJ-1, gl-030 # {PROJECT-KEY}-{NUMBER} Examples: YT-1, PRJ-1, gl-030
def self.reference_pattern(only_long: false) def self.reference_pattern(only_long: false)
if only_long if only_long
/(?<issue>\b[A-Za-z][A-Za-z0-9_]*-\d+)/ /(?<issue>\b[A-Za-z][A-Za-z0-9_]*-\d+\b)/
else else
/(?<issue>\b[A-Za-z][A-Za-z0-9_]*-\d+)|(#{Issue.reference_prefix}(?<issue>\d+))/ /(?<issue>\b[A-Za-z][A-Za-z0-9_]*-\d+\b)|(#{Issue.reference_prefix}(?<issue>\d+))/
end end
end end
......
...@@ -61,7 +61,7 @@ module Ci ...@@ -61,7 +61,7 @@ module Ci
Ci::ProcessPipelineService Ci::ProcessPipelineService
.new(pipeline) .new(pipeline)
.execute .execute(nil, initial_process: true)
end end
end end
......
...@@ -93,9 +93,9 @@ module Ci ...@@ -93,9 +93,9 @@ module Ci
end end
def processable_status(processable) def processable_status(processable)
if needs_names = processable.aggregated_needs_names if Feature.enabled?(:ci_dag_support, project, default_enabled: true) && processable.scheduling_type_dag?
# Processable uses DAG, get status of all dependent needs # Processable uses DAG, get status of all dependent needs
@collection.status_for_names(needs_names) @collection.status_for_names(processable.aggregated_needs_names.to_a)
else else
# Processable uses Stages, get status of prior stage # Processable uses Stages, get status of prior stage
@collection.status_for_prior_stage_position(processable.stage_idx.to_i) @collection.status_for_prior_stage_position(processable.stage_idx.to_i)
......
...@@ -11,12 +11,13 @@ module Ci ...@@ -11,12 +11,13 @@ module Ci
@pipeline = pipeline @pipeline = pipeline
end end
def execute(trigger_build_ids = nil) def execute(trigger_build_ids = nil, initial_process: false)
success = process_stages_without_needs success = process_stages_for_stage_scheduling
# we evaluate dependent needs, # we evaluate dependent needs,
# only when the another job has finished # only when the another job has finished
success = process_builds_with_needs(trigger_build_ids) || success success = process_dag_builds_without_needs || success if initial_process
success = process_dag_builds_with_needs(trigger_build_ids) || success
@pipeline.update_legacy_status @pipeline.update_legacy_status
...@@ -25,23 +26,31 @@ module Ci ...@@ -25,23 +26,31 @@ module Ci
private private
def process_stages_without_needs def process_stages_for_stage_scheduling
stage_indexes_of_created_processables_without_needs.flat_map do |index| stage_indexes_of_created_stage_scheduled_processables.flat_map do |index|
process_stage_without_needs(index) process_stage_for_stage_scheduling(index)
end.any? end.any?
end end
def process_stage_without_needs(index) def process_stage_for_stage_scheduling(index)
current_status = status_for_prior_stages(index) current_status = status_for_prior_stages(index)
return unless HasStatus::COMPLETED_STATUSES.include?(current_status) return unless HasStatus::COMPLETED_STATUSES.include?(current_status)
created_processables_in_stage_without_needs(index).find_each.select do |build| created_stage_scheduled_processables_in_stage(index).find_each.select do |build|
process_build(build, current_status) process_build(build, current_status)
end.any? end.any?
end end
def process_builds_with_needs(trigger_build_ids) def process_dag_builds_without_needs
return false unless Feature.enabled?(:ci_dag_support, project, default_enabled: true)
created_processables.scheduling_type_dag.without_needs.each do |build|
process_build(build, 'success')
end
end
def process_dag_builds_with_needs(trigger_build_ids)
return false unless trigger_build_ids.present? return false unless trigger_build_ids.present?
return false unless Feature.enabled?(:ci_dag_support, project, default_enabled: true) return false unless Feature.enabled?(:ci_dag_support, project, default_enabled: true)
...@@ -56,14 +65,15 @@ module Ci ...@@ -56,14 +65,15 @@ module Ci
# Each found processable is guaranteed here to have completed status # Each found processable is guaranteed here to have completed status
created_processables created_processables
.scheduling_type_dag
.with_needs(trigger_build_names) .with_needs(trigger_build_names)
.without_needs(incomplete_build_names) .without_needs(incomplete_build_names)
.find_each .find_each
.map(&method(:process_build_with_needs)) .map(&method(:process_dag_build_with_needs))
.any? .any?
end end
def process_build_with_needs(build) def process_dag_build_with_needs(build)
current_status = status_for_build_needs(build.needs.map(&:name)) current_status = status_for_build_needs(build.needs.map(&:name))
return unless HasStatus::COMPLETED_STATUSES.include?(current_status) return unless HasStatus::COMPLETED_STATUSES.include?(current_status)
...@@ -87,23 +97,23 @@ module Ci ...@@ -87,23 +97,23 @@ module Ci
end end
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def stage_indexes_of_created_processables_without_needs def stage_indexes_of_created_stage_scheduled_processables
created_processables_without_needs.order(:stage_idx) created_stage_scheduled_processables.order(:stage_idx)
.pluck(Arel.sql('DISTINCT stage_idx')) .pluck(Arel.sql('DISTINCT stage_idx'))
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
def created_processables_in_stage_without_needs(index) def created_stage_scheduled_processables_in_stage(index)
created_processables_without_needs created_stage_scheduled_processables
.with_preloads .with_preloads
.for_stage(index) .for_stage(index)
end end
def created_processables_without_needs def created_stage_scheduled_processables
if Feature.enabled?(:ci_dag_support, project, default_enabled: true) if Feature.enabled?(:ci_dag_support, project, default_enabled: true)
pipeline.processables.created.without_needs created_processables.scheduling_type_stage
else else
pipeline.processables.created created_processables
end end
end end
......
...@@ -8,8 +8,9 @@ module Ci ...@@ -8,8 +8,9 @@ module Ci
@pipeline = pipeline @pipeline = pipeline
end end
def execute(trigger_build_ids = nil) def execute(trigger_build_ids = nil, initial_process: false)
update_retried update_retried
ensure_scheduling_type_for_processables
if Feature.enabled?(:ci_atomic_processing, pipeline.project) if Feature.enabled?(:ci_atomic_processing, pipeline.project)
Ci::PipelineProcessing::AtomicProcessingService Ci::PipelineProcessing::AtomicProcessingService
...@@ -18,7 +19,7 @@ module Ci ...@@ -18,7 +19,7 @@ module Ci
else else
Ci::PipelineProcessing::LegacyProcessingService Ci::PipelineProcessing::LegacyProcessingService
.new(pipeline) .new(pipeline)
.execute(trigger_build_ids) .execute(trigger_build_ids, initial_process: initial_process)
end end
end end
...@@ -43,5 +44,17 @@ module Ci ...@@ -43,5 +44,17 @@ module Ci
.update_all(retried: true) if latest_statuses.any? .update_all(retried: true) if latest_statuses.any?
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
# Set scheduling type of processables if they were created before scheduling_type
# data was deployed (https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22246).
# Given that this service runs multiple times during the pipeline
# life cycle we need to ensure we populate the data once.
# See more: https://gitlab.com/gitlab-org/gitlab/issues/205426
def ensure_scheduling_type_for_processables
lease = Gitlab::ExclusiveLease.new("set-scheduling-types:#{pipeline.id}", timeout: 1.hour.to_i)
return unless lease.try_obtain
pipeline.processables.populate_scheduling_type!
end
end end
end end
...@@ -36,7 +36,7 @@ module Ci ...@@ -36,7 +36,7 @@ module Ci
Ci::ProcessPipelineService Ci::ProcessPipelineService
.new(pipeline) .new(pipeline)
.execute(completed_build_ids) .execute(completed_build_ids, initial_process: true)
end end
end end
end end
...@@ -27,6 +27,6 @@ ...@@ -27,6 +27,6 @@
- if build.has_trace? - if build.has_trace?
%td{ colspan: "2", style: "font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; padding: 0 0 16px;" } %td{ colspan: "2", style: "font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; padding: 0 0 16px;" }
%pre{ style: "font-family: Monaco,'Lucida Console','Courier New',Courier,monospace; background-color: #fafafa; border-radius: 4px; overflow: hidden; white-space: pre-wrap; word-break: break-all; font-size:13px; line-height: 1.4; padding: 16px 8px; color: #333333; margin: 0;" } %pre{ style: "font-family: Monaco,'Lucida Console','Courier New',Courier,monospace; background-color: #fafafa; border-radius: 4px; overflow: hidden; white-space: pre-wrap; word-break: break-all; font-size:13px; line-height: 1.4; padding: 16px 8px; color: #333333; margin: 0;" }
= build.trace.html(last_lines: 10).html_safe = build.trace.html(last_lines: 30).html_safe
- else - else
%td{ colspan: "2" } %td{ colspan: "2" }
...@@ -15,6 +15,6 @@ had <%= failed.size %> failed <%= 'build'.pluralize(failed.size) %>. ...@@ -15,6 +15,6 @@ had <%= failed.size %> failed <%= 'build'.pluralize(failed.size) %>.
Stage: <%= build.stage %> Stage: <%= build.stage %>
Name: <%= build.name %> Name: <%= build.name %>
<% if build.has_trace? -%> <% if build.has_trace? -%>
Trace: <%= build.trace.raw(last_lines: 10) %> Trace: <%= build.trace.raw(last_lines: 30) %>
<% end -%> <% end -%>
<% end -%> <% end -%>
...@@ -35,7 +35,7 @@ had <%= failed.size %> failed <%= 'build'.pluralize(failed.size) %>. ...@@ -35,7 +35,7 @@ had <%= failed.size %> failed <%= 'build'.pluralize(failed.size) %>.
Stage: <%= build.stage %> Stage: <%= build.stage %>
Name: <%= build.name %> Name: <%= build.name %>
<% if build.has_trace? -%> <% if build.has_trace? -%>
Trace: <%= build.trace.raw(last_lines: 10) %> Trace: <%= build.trace.raw(last_lines: 30) %>
<% end -%> <% end -%>
<% end -%> <% end -%>
...@@ -30,4 +30,4 @@ ...@@ -30,4 +30,4 @@
%label.form-check-label{ for: field_id } %label.form-check-label{ for: field_id }
%strong %strong
= notification_event_name(event) = notification_event_name(event)
= icon("spinner spin", class: "custom-notification-event-loading") .fa.custom-notification-event-loading.spinner
---
title: Add GET endpoint to LDAP group link API
merge_request: 24216
author:
type: added
---
title: Add delete identity endpoint on the users API
merge_request: 24122
author:
type: added
---
title: Implement allowing empty needs for jobs in DAG pipelines
merge_request: 22246
author:
type: added
---
title: Extend the list of excluded_attributes for group on Group Import
merge_request: 25031
author:
type: fixed
---
title: Increase pipeline email notification from 10 to 30 lines
merge_request: 21728
author: Philipp Hasper
type: changed
---
title: Avoid autolinking YouTrack issue numbers followed by letters
merge_request: 24770
author: Konrad Borowski
type: fixed
...@@ -68,11 +68,9 @@ categories = changes.keys - [:unknown] ...@@ -68,11 +68,9 @@ categories = changes.keys - [:unknown]
# Ensure to spin for database reviewer/maintainer when ~database is applied (e.g. to review SQL queries) # Ensure to spin for database reviewer/maintainer when ~database is applied (e.g. to review SQL queries)
categories << :database if gitlab.mr_labels.include?('database') && !categories.include?(:database) categories << :database if gitlab.mr_labels.include?('database') && !categories.include?(:database)
# Single codebase MRs are reviewed using a slightly different process, so we
# disable the review roulette for such MRs.
# CSS Clean up MRs are reviewed using a slightly different process, so we # CSS Clean up MRs are reviewed using a slightly different process, so we
# disable the review roulette for such MRs. # disable the review roulette for such MRs.
if changes.any? && !gitlab.mr_labels.include?('single codebase') && !gitlab.mr_labels.include?('CSS cleanup') if changes.any? && !gitlab.mr_labels.include?('CSS cleanup')
# Strip leading and trailing CE/EE markers # Strip leading and trailing CE/EE markers
canonical_branch_name = canonical_branch_name =
roulette.canonical_branch_name(gitlab.mr_json['source_branch']) roulette.canonical_branch_name(gitlab.mr_json['source_branch'])
......
FRONTEND_MAINTAINERS = %w[filipa iamphill].freeze
BACKEND_MAINTAINERS = %w[rspeicher rymai yorickpeterse godfat].freeze
NO_REVIEWER = 'No reviewer available'.freeze
def mention_single_codebase_approvers
canonical_branch_name =
roulette.canonical_branch_name(gitlab.mr_json['source_branch'])
random = roulette.new_random(canonical_branch_name)
frontend_maintainers = helper.new_teammates(FRONTEND_MAINTAINERS)
backend_maintainers = helper.new_teammates(BACKEND_MAINTAINERS)
rows = []
if gitlab.mr_labels.include?('frontend')
frontend_maintainer =
roulette.spin_for_person(frontend_maintainers, random: random)
rows << "| ~frontend | #{frontend_maintainer&.markdown_name || NO_REVIEWER}"
end
if gitlab.mr_labels.include?('backend')
backend_maintainer =
roulette.spin_for_person(backend_maintainers, random: random)
rows << "| ~backend | #{backend_maintainer&.markdown_name || NO_REVIEWER}"
end
if rows.empty?
backup_maintainer = backend_maintainers.sample
rows << "| ~frontend / ~backend | #{backup_maintainer.markdown_name}"
end
markdown(<<~MARKDOWN.strip)
## Single codebase changes
This merge request contains changes related to the work of moving towards a
[single codebase](https://gitlab.com/groups/gitlab-org/-/epics/802) for
Community Edition and Enterprise Edition. These changes will need to be
reviewed and approved by the following engineers:
| Category | Reviewer
|----------|---------
#{rows.join("\n")}
To make sure this happens, please follow these steps:
1. Add all of the mentioned users to the list of merge request approvals.
2. Assign the merge request to the first person in the above list.
If you are a reviewer, please follow these steps:
1. Review the merge request. If it is good to go, approve it.
2. Once approved, assign to the next person in the above list. If you are
the last person in the list, merge the merge request.
MARKDOWN
end
if gitlab.mr_labels.include?('single codebase')
mention_single_codebase_approvers
end
# frozen_string_literal: true
class AddFieldsToApplicationSettingsForMergeRequestsApprovals < ActiveRecord::Migration[5.2]
DOWNTIME = false
def up
add_column(:application_settings,
:disable_overriding_approvers_per_merge_request,
:boolean,
default: false,
null: false)
add_column(:application_settings,
:prevent_merge_requests_author_approval,
:boolean,
default: false,
null: false)
add_column(:application_settings,
:prevent_merge_requests_committers_approval,
:boolean,
default: false,
null: false)
end
def down
remove_column(:application_settings, :disable_overriding_approvers_per_merge_request)
remove_column(:application_settings, :prevent_merge_requests_author_approval)
remove_column(:application_settings, :prevent_merge_requests_committers_approval)
end
end
# frozen_string_literal: true
class RemovePackagesDeprecatedDependencies < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
execute('DELETE FROM packages_dependency_links WHERE dependency_type = 5')
end
def down
# There is nothing to do to reverse this migration
end
end
...@@ -346,6 +346,9 @@ ActiveRecord::Schema.define(version: 2020_02_13_204737) do ...@@ -346,6 +346,9 @@ ActiveRecord::Schema.define(version: 2020_02_13_204737) do
t.integer "elasticsearch_indexed_field_length_limit", default: 0, null: false t.integer "elasticsearch_indexed_field_length_limit", default: 0, null: false
t.integer "elasticsearch_max_bulk_size_mb", limit: 2, default: 10, null: false t.integer "elasticsearch_max_bulk_size_mb", limit: 2, default: 10, null: false
t.integer "elasticsearch_max_bulk_concurrency", limit: 2, default: 10, null: false t.integer "elasticsearch_max_bulk_concurrency", limit: 2, default: 10, null: false
t.boolean "disable_overriding_approvers_per_merge_request", default: false, null: false
t.boolean "prevent_merge_requests_author_approval", default: false, null: false
t.boolean "prevent_merge_requests_committers_approval", default: false, null: false
t.index ["custom_project_templates_group_id"], name: "index_application_settings_on_custom_project_templates_group_id" t.index ["custom_project_templates_group_id"], name: "index_application_settings_on_custom_project_templates_group_id"
t.index ["file_template_project_id"], name: "index_application_settings_on_file_template_project_id" t.index ["file_template_project_id"], name: "index_application_settings_on_file_template_project_id"
t.index ["instance_administration_project_id"], name: "index_applicationsettings_on_instance_administration_project_id" t.index ["instance_administration_project_id"], name: "index_applicationsettings_on_instance_administration_project_id"
......
...@@ -798,7 +798,7 @@ DELETE /groups/:id/hooks/:hook_id ...@@ -798,7 +798,7 @@ DELETE /groups/:id/hooks/:hook_id
Group audit events can be accessed via the [Group Audit Events API](audit_events.md#group-audit-events-starter) Group audit events can be accessed via the [Group Audit Events API](audit_events.md#group-audit-events-starter)
## Sync group with LDAP **(CORE ONLY)** ## Sync group with LDAP **(STARTER)**
Syncs the group with its linked LDAP group. Only available to group owners and administrators. Syncs the group with its linked LDAP group. Only available to group owners and administrators.
...@@ -814,7 +814,23 @@ Parameters: ...@@ -814,7 +814,23 @@ Parameters:
Please consult the [Group Members](members.md) documentation. Please consult the [Group Members](members.md) documentation.
### Add LDAP group link **(CORE ONLY)** ## LDAP Group Links
List, add, and delete LDAP group links.
### List LDAP group links **(STARTER)**
Lists LDAP group links.
```
GET /groups/:id/ldap_group_links
```
Parameters:
- `id` (required) - The ID of a group
### Add LDAP group link **(STARTER)**
Adds an LDAP group link. Adds an LDAP group link.
...@@ -829,7 +845,7 @@ Parameters: ...@@ -829,7 +845,7 @@ Parameters:
- `group_access` (required) - Minimum access level for members of the LDAP group - `group_access` (required) - Minimum access level for members of the LDAP group
- `provider` (required) - LDAP provider for the LDAP group - `provider` (required) - LDAP provider for the LDAP group
### Delete LDAP group link **(CORE ONLY)** ### Delete LDAP group link **(STARTER)**
Deletes an LDAP group link. Deletes an LDAP group link.
......
...@@ -437,6 +437,19 @@ Note, at the moment this method does only return a `404` error, ...@@ -437,6 +437,19 @@ Note, at the moment this method does only return a `404` error,
even in cases where a `409` (Conflict) would be more appropriate, even in cases where a `409` (Conflict) would be more appropriate,
e.g. when renaming the email address to some existing one. e.g. when renaming the email address to some existing one.
## Delete authentication identity from user
Deletes a user's authentication identity using the provider name associated with that identity. Available only for administrators.
```
DELETE /users/:id/identities/:provider
```
Parameters:
- `id` (required) - The ID of the user
- `provider` (required) - External provider name
## User deletion ## User deletion
Deletes a user. Available only for administrators. Deletes a user. Available only for administrators.
......
...@@ -2248,6 +2248,7 @@ and bring back the old behavior. ...@@ -2248,6 +2248,7 @@ and bring back the old behavior.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/47063) in GitLab 12.2. > - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/47063) in GitLab 12.2.
> - In GitLab 12.3, maximum number of jobs in `needs` array raised from five to 50. > - In GitLab 12.3, maximum number of jobs in `needs` array raised from five to 50.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/30631) in GitLab 12.8, `needs: []` lets jobs start immediately.
The `needs:` keyword enables executing jobs out-of-order, allowing you to implement The `needs:` keyword enables executing jobs out-of-order, allowing you to implement
a [directed acyclic graph](../directed_acyclic_graph/index.md) in your `.gitlab-ci.yml`. a [directed acyclic graph](../directed_acyclic_graph/index.md) in your `.gitlab-ci.yml`.
...@@ -2264,6 +2265,10 @@ linux:build: ...@@ -2264,6 +2265,10 @@ linux:build:
mac:build: mac:build:
stage: build stage: build
lint:
stage: test
needs: []
linux:rspec: linux:rspec:
stage: test stage: test
needs: ["linux:build"] needs: ["linux:build"]
...@@ -2284,7 +2289,9 @@ production: ...@@ -2284,7 +2289,9 @@ production:
stage: deploy stage: deploy
``` ```
This example creates three paths of execution: This example creates four paths of execution:
- Linter: the `lint` job will run immediately without waiting for the `build` stage to complete because it has no needs (`needs: []`).
- Linux path: the `linux:rspec` and `linux:rubocop` jobs will be run as soon - Linux path: the `linux:rspec` and `linux:rubocop` jobs will be run as soon
as the `linux:build` job finishes without waiting for `mac:build` to finish. as the `linux:build` job finishes without waiting for `mac:build` to finish.
...@@ -2308,9 +2315,6 @@ This example creates three paths of execution: ...@@ -2308,9 +2315,6 @@ This example creates three paths of execution:
- For self-managed instances, the limit is: - For self-managed instances, the limit is:
- 10, if the `ci_dag_limit_needs` feature flag is enabled (default). - 10, if the `ci_dag_limit_needs` feature flag is enabled (default).
- 50, if the `ci_dag_limit_needs` feature flag is disabled. - 50, if the `ci_dag_limit_needs` feature flag is disabled.
- It is impossible for now to have `needs: []` (empty needs), the job always needs to
depend on something, unless this is the job in the first stage. However, support for
an empty needs array [is planned](https://gitlab.com/gitlab-org/gitlab/issues/30631).
- If `needs:` refers to a job that is marked as `parallel:`. - If `needs:` refers to a job that is marked as `parallel:`.
the current job will depend on all parallel jobs created. the current job will depend on all parallel jobs created.
- `needs:` is similar to `dependencies:` in that it needs to use jobs from prior stages, - `needs:` is similar to `dependencies:` in that it needs to use jobs from prior stages,
......
...@@ -28,7 +28,7 @@ The Admin Area is made up of the following sections: ...@@ -28,7 +28,7 @@ The Admin Area is made up of the following sections:
| Abuse Reports | Manage [abuse reports](abuse_reports.md) submitted by your users. | | Abuse Reports | Manage [abuse reports](abuse_reports.md) submitted by your users. |
| License **(STARTER ONLY)** | Upload, display, and remove [licenses](license.md). | | License **(STARTER ONLY)** | Upload, display, and remove [licenses](license.md). |
| Kubernetes | Create and manage instance-level [Kubernetes clusters](../instance/clusters/index.md). | | Kubernetes | Create and manage instance-level [Kubernetes clusters](../instance/clusters/index.md). |
| Push Rules **(STARTER)** | Configure pre-defined Git [push rules](../../push_rules/push_rules.md) for projects. | Push Rules **(STARTER ONLY)** | Configure pre-defined Git [push rules](../../push_rules/push_rules.md) for projects. Also, configure [merge requests approvers rules](merge_requests_approvals.md). **(PREMIUM ONLY)** |
| Geo **(PREMIUM ONLY)** | Configure and maintain [Geo nodes](geo_nodes.md). | | Geo **(PREMIUM ONLY)** | Configure and maintain [Geo nodes](geo_nodes.md). |
| Deploy Keys | Create instance-wide [SSH deploy keys](../../ssh/README.md#deploy-keys). | | Deploy Keys | Create instance-wide [SSH deploy keys](../../ssh/README.md#deploy-keys). |
| Credentials **(ULTIMATE ONLY)** | View [credentials](credentials_inventory.md) that can be used to access your instance. | | Credentials **(ULTIMATE ONLY)** | View [credentials](credentials_inventory.md) that can be used to access your instance. |
......
---
type: reference, concepts
---
# Instance-level merge request approval rules **(PREMIUM ONLY)**
> Introduced in [GitLab Premium](https://gitlab.com/gitlab-org/gitlab/issues/39060) 12.8.
Merge request approvals rules prevent users overriding certain settings on a project
level. When configured, only administrators can change these settings on a project level
if they are enabled at an instance level.
To enable merge request approval rules for an instance:
1. Navigate to **{admin}** **Admin Area >** **{push-rules}** **Push Rules** and expand **Merge
requests approvals**.
1. Set the required rule.
1. Click **Save changes**.
GitLab administrators can later override these settings in a project’s settings.
## Available rules
Merge request approval rules that can be set at an instance level are:
- **Prevent approval of merge requests by merge request author**. Prevents non-admins
from allowing merge request authors to merge their own merge requests in individual
projects.
- **Prevent approval of merge requests by merge request committers**. Prevents
non-admins from allowing merge request committers to merge merge requests they were
committing to in individual projects.
- **Prevent users from modifying merge request approvers list**. Prevents non-admins
from modifying approvers list in project settings and in individual merge requests.
...@@ -225,6 +225,27 @@ module API ...@@ -225,6 +225,27 @@ module API
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
desc "Delete a user's identity. Available only for admins" do
success Entities::UserWithAdmin
end
params do
requires :id, type: Integer, desc: 'The ID of the user'
requires :provider, type: String, desc: 'The external provider'
end
# rubocop: disable CodeReuse/ActiveRecord
delete ":id/identities/:provider" do
authenticated_as_admin!
user = User.find_by(id: params[:id])
not_found!('User') unless user
identity = user.identities.find_by(provider: params[:provider])
not_found!('Identity') unless identity
destroy_conditionally!(identity)
end
# rubocop: enable CodeReuse/ActiveRecord
desc 'Add an SSH key to a specified user. Available only for admins.' do desc 'Add an SSH key to a specified user. Available only for admins.' do
success Entities::SSHKey success Entities::SSHKey
end end
......
...@@ -11,12 +11,14 @@ module Gitlab ...@@ -11,12 +11,14 @@ module Gitlab
include ::Gitlab::Config::Entry::Validatable include ::Gitlab::Config::Entry::Validatable
validations do validations do
validates :config, presence: true
validate do validate do
unless config.is_a?(Hash) || config.is_a?(Array) unless config.is_a?(Hash) || config.is_a?(Array)
errors.add(:config, 'can only be a Hash or an Array') errors.add(:config, 'can only be a Hash or an Array')
end end
if config.is_a?(Hash) && config.empty?
errors.add(:config, 'can not be an empty Hash')
end
end end
validate on: :composed do validate on: :composed do
......
...@@ -28,8 +28,15 @@ included_attributes: ...@@ -28,8 +28,15 @@ included_attributes:
excluded_attributes: excluded_attributes:
group: group:
- :id - :id
- :name
- :path
- :owner_id
- :parent_id
- :created_at
- :updated_at
- :runners_token - :runners_token
- :runners_token_encrypted - :runners_token_encrypted
- :saml_discovery_token
methods: methods:
labels: labels:
......
...@@ -18,7 +18,6 @@ class GitlabDanger ...@@ -18,7 +18,6 @@ class GitlabDanger
changelog changelog
specs specs
roulette roulette
single_codebase
gitlab_ui_wg gitlab_ui_wg
ce_ee_vue_templates ce_ee_vue_templates
].freeze ].freeze
......
...@@ -11917,6 +11917,9 @@ msgstr "" ...@@ -11917,6 +11917,9 @@ msgstr ""
msgid "Merge requests" msgid "Merge requests"
msgstr "" msgstr ""
msgid "Merge requests approvals"
msgstr ""
msgid "Merge requests are a place to propose changes you've made to a project and discuss those changes with others" msgid "Merge requests are a place to propose changes you've made to a project and discuss those changes with others"
msgstr "" msgstr ""
...@@ -14161,6 +14164,9 @@ msgstr "" ...@@ -14161,6 +14164,9 @@ msgstr ""
msgid "Prevent users from changing their profile name" msgid "Prevent users from changing their profile name"
msgstr "" msgstr ""
msgid "Prevent users from modifing merge request approvers list"
msgstr ""
msgid "Preview" msgid "Preview"
msgstr "" msgstr ""
...@@ -16385,6 +16391,9 @@ msgstr "" ...@@ -16385,6 +16391,9 @@ msgstr ""
msgid "Rook" msgid "Rook"
msgstr "" msgstr ""
msgid "Rules that define what git pushes are accepted for a project. All newly created projects will use these settings."
msgstr ""
msgid "Run CI/CD pipelines for external repositories" msgid "Run CI/CD pipelines for external repositories"
msgstr "" msgstr ""
...@@ -17427,6 +17436,9 @@ msgstr "" ...@@ -17427,6 +17436,9 @@ msgstr ""
msgid "Settings" msgid "Settings"
msgstr "" msgstr ""
msgid "Settings to prevent self-approval across all projects in the instance. Only an administrator can modify these settings."
msgstr ""
msgid "Severity: %{severity}" msgid "Severity: %{severity}"
msgstr "" msgstr ""
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
FactoryBot.define do FactoryBot.define do
factory :ci_build_need, class: 'Ci::BuildNeed' do factory :ci_build_need, class: 'Ci::BuildNeed' do
build factory: :ci_build build factory: :ci_build, scheduling_type: :dag
sequence(:name) { |n| "build_#{n}" } sequence(:name) { |n| "build_#{n}" }
end end
end end
{ {
"name": "ymg09t5704clnxnqfgaj2h098gz4r7gyx4wc3fzmlqj1en24zf", "name": "ymg09t5704clnxnqfgaj2h098gz4r7gyx4wc3fzmlqj1en24zf",
"path": "ymg09t5704clnxnqfgaj2h098gz4r7gyx4wc3fzmlqj1en24zf", "path": "ymg09t5704clnxnqfgaj2h098gz4r7gyx4wc3fzmlqj1en24zf",
"owner_id": null, "owner_id": 123,
"created_at": "2019-11-20 17:01:53 UTC", "created_at": "2019-11-20 17:01:53 UTC",
"updated_at": "2019-11-20 17:05:44 UTC", "updated_at": "2019-11-20 17:05:44 UTC",
"description": "Group Description", "description": "Group Description",
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
"ldap_sync_last_successful_update_at": null, "ldap_sync_last_successful_update_at": null,
"ldap_sync_last_sync_at": null, "ldap_sync_last_sync_at": null,
"lfs_enabled": null, "lfs_enabled": null,
"parent_id": null, "parent_id": 7,
"shared_runners_minutes_limit": null, "shared_runners_minutes_limit": null,
"repository_size_limit": null, "repository_size_limit": null,
"require_two_factor_authentication": false, "require_two_factor_authentication": false,
...@@ -33,6 +33,8 @@ ...@@ -33,6 +33,8 @@
"extra_shared_runners_minutes_limit": null, "extra_shared_runners_minutes_limit": null,
"last_ci_minutes_notification_at": null, "last_ci_minutes_notification_at": null,
"last_ci_minutes_usage_notification_level": null, "last_ci_minutes_usage_notification_level": null,
"runners_token": "token",
"runners_token_encrypted": "encrypted",
"subgroup_creation_level": 1, "subgroup_creation_level": 1,
"emails_disabled": null, "emails_disabled": null,
"max_pages_size": null, "max_pages_size": null,
......
...@@ -71,6 +71,44 @@ describe Gitlab::ImportExport::GroupTreeRestorer do ...@@ -71,6 +71,44 @@ describe Gitlab::ImportExport::GroupTreeRestorer do
end end
end end
context 'excluded attributes' do
let!(:source_user) { create(:user, id: 123) }
let!(:importer_user) { create(:user) }
let(:group) { create(:group) }
let(:shared) { Gitlab::ImportExport::Shared.new(group) }
let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group, group_hash: nil) }
let(:group_json) { ActiveSupport::JSON.decode(IO.read(File.join(shared.export_path, 'group.json'))) }
shared_examples 'excluded attributes' do
excluded_attributes = %w[
id
name
path
owner_id
parent_id
created_at
updated_at
runners_token
runners_token_encrypted
saml_discovery_token
]
before do
group.add_owner(importer_user)
setup_import_export_config('group_exports/complex')
end
excluded_attributes.each do |excluded_attribute|
it 'does not allow override of excluded attributes' do
expect(group_json[excluded_attribute]).not_to eq(group.public_send(excluded_attribute))
end
end
end
include_examples 'excluded attributes'
end
context 'group.json file access check' do context 'group.json file access check' do
let(:user) { create(:user) } let(:user) { create(:user) }
let!(:group) { create(:group, name: 'group2', path: 'group2') } let!(:group) { create(:group, name: 'group2', path: 'group2') }
......
...@@ -157,9 +157,28 @@ describe Gitlab::ImportExport::GroupTreeSaver do ...@@ -157,9 +157,28 @@ describe Gitlab::ImportExport::GroupTreeSaver do
end end
context 'group attributes' do context 'group attributes' do
it 'does not contain the runners token' do shared_examples 'excluded attributes' do
expect(saved_group_json).not_to include("runners_token" => 'token') excluded_attributes = %w[
id
name
path
owner_id
parent_id
created_at
updated_at
runners_token
runners_token_encrypted
saml_discovery_token
]
excluded_attributes.each do |excluded_attribute|
it 'does not contain excluded attribute' do
expect(saved_group_json).not_to include(excluded_attribute => group.public_send(excluded_attribute))
end
end
end end
include_examples 'excluded attributes'
end end
end end
end end
......
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200210135504_remove_packages_deprecated_dependencies.rb')
describe RemovePackagesDeprecatedDependencies, :migration do
let(:projects) { table(:projects) }
let(:packages) { table(:packages_packages) }
let(:dependency_links) { table(:packages_dependency_links) }
let(:dependencies) { table(:packages_dependencies) }
before do
projects.create!(id: 123, name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1)
packages.create!(id: 1, name: 'package', version: '1.0.0', package_type: 4, project_id: 123)
5.times do |i|
dependencies.create!(id: i, name: "pkg_dependency_#{i}", version_pattern: '~1.0.0')
dependency_links.create!(package_id: 1, dependency_id: i, dependency_type: 5)
end
dependencies.create!(id: 10, name: 'valid_pkg_dependency', version_pattern: '~2.5.0')
dependency_links.create!(package_id: 1, dependency_id: 10, dependency_type: 1)
end
it 'removes all dependency links with type 5' do
expect(dependency_links.count).to eq 6
migrate!
expect(dependency_links.count).to eq 1
end
end
...@@ -762,8 +762,10 @@ describe Ci::Build do ...@@ -762,8 +762,10 @@ describe Ci::Build do
let(:needs) { } let(:needs) { }
let!(:final) do let!(:final) do
scheduling_type = needs.present? ? :dag : :stage
create(:ci_build, create(:ci_build,
pipeline: pipeline, name: 'final', pipeline: pipeline, name: 'final', scheduling_type: scheduling_type,
stage_idx: 3, stage: 'deploy', options: { stage_idx: 3, stage: 'deploy', options: {
dependencies: dependencies dependencies: dependencies
} }
......
...@@ -120,4 +120,29 @@ describe Ci::Processable do ...@@ -120,4 +120,29 @@ describe Ci::Processable do
end end
end end
end end
describe '.populate_scheduling_type!' do
let!(:build_without_needs) { create(:ci_build, project: project, pipeline: pipeline) }
let!(:build_with_needs) { create(:ci_build, project: project, pipeline: pipeline) }
let!(:needs_relation) { create(:ci_build_need, build: build_with_needs) }
let!(:another_build) { create(:ci_build, project: project) }
before do
Ci::Processable.update_all(scheduling_type: nil)
end
it 'populates scheduling_type of processables' do
expect do
pipeline.processables.populate_scheduling_type!
end.to change(pipeline.processables.where(scheduling_type: nil), :count).from(2).to(0)
expect(build_without_needs.reload.scheduling_type).to eq('stage')
expect(build_with_needs.reload.scheduling_type).to eq('dag')
end
it 'does not affect processables from other pipelines' do
pipeline.processables.populate_scheduling_type!
expect(another_build.reload.scheduling_type).to be_nil
end
end
end end
...@@ -37,6 +37,10 @@ describe YoutrackService do ...@@ -37,6 +37,10 @@ describe YoutrackService do
it 'does allow project prefix on the reference' do it 'does allow project prefix on the reference' do
expect(described_class.reference_pattern.match('YT-123')[:issue]).to eq('YT-123') expect(described_class.reference_pattern.match('YT-123')[:issue]).to eq('YT-123')
end end
it 'does not allow issue number to be followed by a letter' do
expect(described_class.reference_pattern.match('YT-123A')).to eq(nil)
end
end end
context 'overriding properties' do context 'overriding properties' do
......
...@@ -949,6 +949,45 @@ describe API::Users do ...@@ -949,6 +949,45 @@ describe API::Users do
end end
end end
describe "DELETE /users/:id/identities/:provider" do
let(:test_user) { create(:omniauth_user, provider: 'ldapmain') }
context 'when unauthenticated' do
it 'returns authentication error' do
delete api("/users/#{test_user.id}/identities/ldapmain")
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'when authenticated' do
it 'deletes identity of given provider' do
expect do
delete api("/users/#{test_user.id}/identities/ldapmain", admin)
end.to change { test_user.identities.count }.by(-1)
expect(response).to have_gitlab_http_status(:no_content)
end
it_behaves_like '412 response' do
let(:request) { api("/users/#{test_user.id}/identities/ldapmain", admin) }
end
it 'returns 404 error if user not found' do
delete api("/users/0/identities/ldapmain", admin)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
end
it 'returns 404 error if identity not found' do
delete api("/users/#{test_user.id}/identities/saml", admin)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Identity Not Found')
end
end
end
describe "POST /users/:id/keys" do describe "POST /users/:id/keys" do
before do before do
admin admin
......
...@@ -175,5 +175,67 @@ describe Ci::CreatePipelineService do ...@@ -175,5 +175,67 @@ describe Ci::CreatePipelineService do
.to eq('jobs:test_a:needs:need artifacts should be a boolean value') .to eq('jobs:test_a:needs:need artifacts should be a boolean value')
end end
end end
context 'when needs is empty array' do
let(:config) do
<<~YAML
build_a:
stage: build
script: ls
test_a:
stage: test
script: ls
test_b:
stage: test
script: ls
needs: []
deploy_a:
stage: deploy
script: ls
needs: [test_a]
deploy_b:
stage: deploy
script: ls
when: manual
needs: []
YAML
end
it 'creates a pipeline with build_a and test_b pending; deploy_b manual' do
processables = pipeline.processables
build_a = processables.find { |processable| processable.name == 'build_a' }
test_a = processables.find { |processable| processable.name == 'test_a' }
test_b = processables.find { |processable| processable.name == 'test_b' }
deploy_a = processables.find { |processable| processable.name == 'deploy_a' }
deploy_b = processables.find { |processable| processable.name == 'deploy_b' }
expect(pipeline).to be_persisted
expect(build_a.status).to eq('pending')
expect(test_a.status).to eq('created')
expect(test_b.status).to eq('pending')
expect(deploy_a.status).to eq('created')
expect(deploy_b.status).to eq('manual')
end
end
context 'when needs is empty hash' do
let(:config) do
<<~YAML
regular_job:
stage: build
script: echo 'hello'
invalid_dag_job:
stage: test
script: ls
needs: {}
YAML
end
it 'raises error' do
expect(pipeline.yaml_errors)
.to eq('jobs:invalid_dag_job:needs config can not be an empty hash')
end
end
end end
end end
...@@ -9,4 +9,10 @@ describe Ci::PipelineProcessing::AtomicProcessingService do ...@@ -9,4 +9,10 @@ describe Ci::PipelineProcessing::AtomicProcessingService do
end end
it_behaves_like 'Pipeline Processing Service' it_behaves_like 'Pipeline Processing Service'
private
def process_pipeline(initial_process: false)
described_class.new(pipeline).execute
end
end end
...@@ -9,4 +9,10 @@ describe Ci::PipelineProcessing::LegacyProcessingService do ...@@ -9,4 +9,10 @@ describe Ci::PipelineProcessing::LegacyProcessingService do
end end
it_behaves_like 'Pipeline Processing Service' it_behaves_like 'Pipeline Processing Service'
private
def process_pipeline(initial_process: false)
described_class.new(pipeline).execute(initial_process: initial_process)
end
end end
...@@ -710,10 +710,10 @@ shared_examples 'Pipeline Processing Service' do ...@@ -710,10 +710,10 @@ shared_examples 'Pipeline Processing Service' do
context 'when pipeline with needs is created', :sidekiq_inline do context 'when pipeline with needs is created', :sidekiq_inline do
let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) } let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) } let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) } let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1) } let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1) } let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1) } let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) } let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') } let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
...@@ -795,7 +795,7 @@ shared_examples 'Pipeline Processing Service' do ...@@ -795,7 +795,7 @@ shared_examples 'Pipeline Processing Service' do
end end
context 'when one of the jobs is run on a failure' do context 'when one of the jobs is run on a failure' do
let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure') } let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure', scheduling_type: :dag) }
let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') } let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') }
...@@ -837,10 +837,47 @@ shared_examples 'Pipeline Processing Service' do ...@@ -837,10 +837,47 @@ shared_examples 'Pipeline Processing Service' do
end end
end end
end end
end
def process_pipeline context 'when there is a job scheduled with dag but no need (needs: [])' do
described_class.new(pipeline).execute let!(:deploy_pages) { create_build('deploy_pages', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
it 'runs deploy_pages without waiting prior stages' do
# Ci::PipelineProcessing::LegacyProcessingService requires :initial_process parameter
expect(process_pipeline(initial_process: true)).to be_truthy
expect(stages).to eq(%w(pending created pending))
expect(builds.pending).to contain_exactly(linux_build, mac_build, deploy_pages)
linux_build.reset.success!
deploy_pages.reset.success!
expect(stages).to eq(%w(running pending running))
expect(builds.success).to contain_exactly(linux_build, deploy_pages)
expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
linux_rspec.reset.success!
linux_rubocop.reset.success!
mac_build.reset.success!
mac_rspec.reset.success!
mac_rubocop.reset.success!
expect(stages).to eq(%w(success success running))
expect(builds.pending).to contain_exactly(deploy)
end
context 'when ci_dag_support is disabled' do
before do
stub_feature_flags(ci_dag_support: false)
end
it 'does run deploy_pages at the start' do
expect(process_pipeline).to be_truthy
expect(stages).to eq(%w(pending created created))
expect(builds.pending).to contain_exactly(linux_build, mac_build)
end
end
end
end end
def all_builds def all_builds
......
...@@ -33,6 +33,25 @@ describe Ci::ProcessPipelineService do ...@@ -33,6 +33,25 @@ describe Ci::ProcessPipelineService do
end end
end end
context 'with a pipeline which has processables with nil scheduling_type', :clean_gitlab_redis_shared_state do
let!(:build1) { create_build('build1') }
let!(:build2) { create_build('build2') }
let!(:build3) { create_build('build3', scheduling_type: :dag) }
let!(:build3_on_build2) { create(:ci_build_need, build: build3, name: 'build2') }
before do
pipeline.processables.update_all(scheduling_type: nil)
end
it 'populates scheduling_type before processing' do
process_pipeline
expect(build1.scheduling_type).to eq('stage')
expect(build2.scheduling_type).to eq('stage')
expect(build3.scheduling_type).to eq('dag')
end
end
def process_pipeline def process_pipeline
described_class.new(pipeline).execute described_class.new(pipeline).execute
end end
......
...@@ -95,7 +95,7 @@ describe Ci::RetryPipelineService, '#execute' do ...@@ -95,7 +95,7 @@ describe Ci::RetryPipelineService, '#execute' do
before do before do
create_build('build', :success, 0) create_build('build', :success, 0)
create_build('build2', :success, 0) create_build('build2', :success, 0)
test_build = create_build('test', :failed, 1) test_build = create_build('test', :failed, 1, scheduling_type: :dag)
create(:ci_build_need, build: test_build, name: 'build') create(:ci_build_need, build: test_build, name: 'build')
create(:ci_build_need, build: test_build, name: 'build2') create(:ci_build_need, build: test_build, name: 'build2')
end end
...@@ -108,6 +108,21 @@ describe Ci::RetryPipelineService, '#execute' do ...@@ -108,6 +108,21 @@ describe Ci::RetryPipelineService, '#execute' do
expect(build('test')).to be_pending expect(build('test')).to be_pending
expect(build('test').needs.map(&:name)).to match_array(%w(build build2)) expect(build('test').needs.map(&:name)).to match_array(%w(build build2))
end end
context 'when there is a failed DAG test without needs' do
before do
create_build('deploy', :failed, 2, scheduling_type: :dag)
end
it 'retries the test' do
service.execute(pipeline)
expect(build('build')).to be_success
expect(build('build2')).to be_success
expect(build('test')).to be_pending
expect(build('deploy')).to be_pending
end
end
end end
context 'when the last stage was skipepd' do context 'when the last stage was skipepd' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment