Commit c6b3ec3f authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 903ccf7c
...@@ -14,7 +14,7 @@ module Projects ...@@ -14,7 +14,7 @@ module Projects
token = extract_alert_manager_token(request) token = extract_alert_manager_token(request)
result = notify_service.execute(token) result = notify_service.execute(token)
head(response_status(result)) head result.http_status
end end
private private
...@@ -33,12 +33,6 @@ module Projects ...@@ -33,12 +33,6 @@ module Projects
.new(project, current_user, notification_payload) .new(project, current_user, notification_payload)
end end
def response_status(result)
return :ok if result.success?
result.http_status
end
def notification_payload def notification_payload
params.permit![:notification] params.permit![:notification]
end end
......
...@@ -26,12 +26,9 @@ module Projects ...@@ -26,12 +26,9 @@ module Projects
def notify def notify
token = extract_alert_manager_token(request) token = extract_alert_manager_token(request)
result = notify_service.execute(token)
if notify_service.execute(token) head result.http_status
head :ok
else
head :unprocessable_entity
end
end end
def create def create
......
...@@ -72,7 +72,7 @@ module SubmoduleHelper ...@@ -72,7 +72,7 @@ module SubmoduleHelper
project].join('') project].join('')
url_with_dotgit = url_no_dotgit + '.git' url_with_dotgit = url_no_dotgit + '.git'
url_with_dotgit == Gitlab::Shell.url_to_repo([namespace, '/', project].join('')) url_with_dotgit == Gitlab::RepositoryUrlBuilder.build([namespace, '/', project].join(''))
end end
def relative_self_url?(url) def relative_self_url?(url)
......
...@@ -87,26 +87,15 @@ module HasRepository ...@@ -87,26 +87,15 @@ module HasRepository
end end
def url_to_repo def url_to_repo
Gitlab::Shell.url_to_repo(full_path) ssh_url_to_repo
end end
def ssh_url_to_repo def ssh_url_to_repo
url_to_repo Gitlab::RepositoryUrlBuilder.build(repository.full_path, protocol: :ssh)
end end
def http_url_to_repo def http_url_to_repo
custom_root = Gitlab::CurrentSettings.custom_http_clone_url_root Gitlab::RepositoryUrlBuilder.build(repository.full_path, protocol: :http)
url = if custom_root.present?
Gitlab::Utils.append_path(
custom_root,
web_url(only_path: true)
)
else
web_url
end
"#{url}.git"
end end
def web_url(only_path: nil) def web_url(only_path: nil)
......
# frozen_string_literal: true
module UpdateHighestRole
extend ActiveSupport::Concern
HIGHEST_ROLE_LEASE_TIMEOUT = 10.minutes.to_i
HIGHEST_ROLE_JOB_DELAY = 10.minutes
included do
after_commit :update_highest_role
end
private
# Schedule a Sidekiq job to update the highest role for a User
#
# The job will be called outside of a transaction in order to ensure the changes
# to be commited before attempting to update the highest role.
# The exlusive lease will not be released after completion to prevent multiple jobs
# being executed during the defined timeout.
def update_highest_role
return unless update_highest_role?
run_after_commit_or_now do
lease_key = "update_highest_role:#{update_highest_role_attribute}"
lease = Gitlab::ExclusiveLease.new(lease_key, timeout: HIGHEST_ROLE_LEASE_TIMEOUT)
if lease.try_obtain
UpdateHighestRoleWorker.perform_in(HIGHEST_ROLE_JOB_DELAY, update_highest_role_attribute)
else
# use same logging as ExclusiveLeaseGuard
# rubocop:disable Gitlab/RailsLogger
Rails.logger.error('Cannot obtain an exclusive lease. There must be another instance already in execution.')
# rubocop:enable Gitlab/RailsLogger
end
end
end
end
...@@ -16,7 +16,6 @@ module VersionedDescription ...@@ -16,7 +16,6 @@ module VersionedDescription
def save_description_version def save_description_version
self.saved_description_version = nil self.saved_description_version = nil
return unless Feature.enabled?(:save_description_versions, issuing_parent, default_enabled: true)
return unless saved_change_to_description? return unless saved_change_to_description?
unless description_versions.exists? unless description_versions.exists?
......
...@@ -9,6 +9,7 @@ class Member < ApplicationRecord ...@@ -9,6 +9,7 @@ class Member < ApplicationRecord
include Presentable include Presentable
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
include FromUnion include FromUnion
include UpdateHighestRole
attr_accessor :raw_invite_token attr_accessor :raw_invite_token
...@@ -100,7 +101,6 @@ class Member < ApplicationRecord ...@@ -100,7 +101,6 @@ class Member < ApplicationRecord
after_destroy :destroy_notification_setting after_destroy :destroy_notification_setting
after_destroy :post_destroy_hook, unless: :pending? after_destroy :post_destroy_hook, unless: :pending?
after_commit :refresh_member_authorized_projects after_commit :refresh_member_authorized_projects
after_commit :update_highest_role
default_value_for :notification_level, NotificationSetting.levels[:global] default_value_for :notification_level, NotificationSetting.levels[:global]
...@@ -463,21 +463,15 @@ class Member < ApplicationRecord ...@@ -463,21 +463,15 @@ class Member < ApplicationRecord
end end
end end
# Triggers the service to schedule a Sidekiq job to update the highest role def update_highest_role?
# for a User
#
# The job will be called outside of a transaction in order to ensure the changes
# for a Member to be commited before attempting to update the highest role.
# rubocop: disable CodeReuse/ServiceClass
def update_highest_role
return unless user_id.present? return unless user_id.present?
return unless previous_changes[:access_level].present?
run_after_commit_or_now do previous_changes[:access_level].present?
Members::UpdateHighestRoleService.new(user_id).execute end
end
def update_highest_role_attribute
user_id
end end
# rubocop: enable CodeReuse/ServiceClass
end end
Member.prepend_if_ee('EE::Member') Member.prepend_if_ee('EE::Member')
...@@ -49,15 +49,15 @@ class ProjectWiki ...@@ -49,15 +49,15 @@ class ProjectWiki
end end
def url_to_repo def url_to_repo
Gitlab::Shell.url_to_repo(full_path) ssh_url_to_repo
end end
def ssh_url_to_repo def ssh_url_to_repo
url_to_repo Gitlab::RepositoryUrlBuilder.build(repository.full_path, protocol: :ssh)
end end
def http_url_to_repo def http_url_to_repo
@project.http_url_to_repo.sub(%r{git\z}, 'wiki.git') Gitlab::RepositoryUrlBuilder.build(repository.full_path, protocol: :http)
end end
def wiki_base_path def wiki_base_path
......
...@@ -258,10 +258,12 @@ class Snippet < ApplicationRecord ...@@ -258,10 +258,12 @@ class Snippet < ApplicationRecord
super super
end end
override :repository
def repository def repository
@repository ||= Repository.new(full_path, self, shard: repository_storage, disk_path: disk_path, repo_type: Gitlab::GlRepository::SNIPPET) @repository ||= Repository.new(full_path, self, shard: repository_storage, disk_path: disk_path, repo_type: Gitlab::GlRepository::SNIPPET)
end end
override :repository_size_checker
def repository_size_checker def repository_size_checker
strong_memoize(:repository_size_checker) do strong_memoize(:repository_size_checker) do
::Gitlab::RepositorySizeChecker.new( ::Gitlab::RepositorySizeChecker.new(
...@@ -271,6 +273,7 @@ class Snippet < ApplicationRecord ...@@ -271,6 +273,7 @@ class Snippet < ApplicationRecord
end end
end end
override :storage
def storage def storage
@storage ||= Storage::Hashed.new(self, prefix: Storage::Hashed::SNIPPET_REPOSITORY_PATH_PREFIX) @storage ||= Storage::Hashed.new(self, prefix: Storage::Hashed::SNIPPET_REPOSITORY_PATH_PREFIX)
end end
...@@ -278,6 +281,7 @@ class Snippet < ApplicationRecord ...@@ -278,6 +281,7 @@ class Snippet < ApplicationRecord
# This is the full_path used to identify the # This is the full_path used to identify the
# the snippet repository. It will be used mostly # the snippet repository. It will be used mostly
# for logging purposes. # for logging purposes.
override :full_path
def full_path def full_path
return unless persisted? return unless persisted?
...@@ -290,10 +294,6 @@ class Snippet < ApplicationRecord ...@@ -290,10 +294,6 @@ class Snippet < ApplicationRecord
end end
end end
def url_to_repo
Gitlab::Shell.url_to_repo(full_path.delete('@'))
end
def repository_storage def repository_storage
snippet_repository&.shard_name || self.class.pick_repository_storage snippet_repository&.shard_name || self.class.pick_repository_storage
end end
......
...@@ -23,6 +23,7 @@ class User < ApplicationRecord ...@@ -23,6 +23,7 @@ class User < ApplicationRecord
include BatchDestroyDependentAssociations include BatchDestroyDependentAssociations
include HasUniqueInternalUsers include HasUniqueInternalUsers
include IgnorableColumns include IgnorableColumns
include UpdateHighestRole
DEFAULT_NOTIFICATION_LEVEL = :participating DEFAULT_NOTIFICATION_LEVEL = :participating
...@@ -238,7 +239,6 @@ class User < ApplicationRecord ...@@ -238,7 +239,6 @@ class User < ApplicationRecord
end end
end end
end end
after_commit :update_highest_role, on: [:create, :update]
after_initialize :set_projects_limit after_initialize :set_projects_limit
...@@ -1854,20 +1854,15 @@ class User < ApplicationRecord ...@@ -1854,20 +1854,15 @@ class User < ApplicationRecord
last_active_at.to_i <= MINIMUM_INACTIVE_DAYS.days.ago.to_i last_active_at.to_i <= MINIMUM_INACTIVE_DAYS.days.ago.to_i
end end
# Triggers the service to schedule a Sidekiq job to update the highest role def update_highest_role?
# for a User return false unless persisted?
#
# The job will be called outside of a transaction in order to ensure the changes
# for a Member to be commited before attempting to update the highest role.
# rubocop: disable CodeReuse/ServiceClass
def update_highest_role
return unless (previous_changes.keys & %w(state user_type ghost)).any?
run_after_commit_or_now do (previous_changes.keys & %w(state user_type ghost)).any?
Members::UpdateHighestRoleService.new(id).execute end
end
def update_highest_role_attribute
id
end end
# rubocop: enable CodeReuse/ServiceClass
end end
User.prepend_if_ee('EE::User') User.prepend_if_ee('EE::User')
...@@ -96,7 +96,7 @@ class MergeRequestWidgetEntity < Grape::Entity ...@@ -96,7 +96,7 @@ class MergeRequestWidgetEntity < Grape::Entity
def can_add_ci_config_path?(merge_request) def can_add_ci_config_path?(merge_request)
merge_request.source_project&.uses_default_ci_config? && merge_request.source_project&.uses_default_ci_config? &&
merge_request.all_pipelines.none? && !merge_request.source_project.has_ci? &&
merge_request.commits_count.positive? && merge_request.commits_count.positive? &&
can?(current_user, :read_build, merge_request.source_project) && can?(current_user, :read_build, merge_request.source_project) &&
can?(current_user, :create_pipeline, merge_request.source_project) can?(current_user, :create_pipeline, merge_request.source_project)
......
# frozen_string_literal: true
module Members
class UpdateHighestRoleService < ::BaseService
include ExclusiveLeaseGuard
LEASE_TIMEOUT = 10.minutes.to_i
DELAY = 10.minutes
attr_reader :user_id
def initialize(user_id)
@user_id = user_id
end
def execute
try_obtain_lease do
UpdateHighestRoleWorker.perform_in(DELAY, user_id)
end
end
private
def lease_key
"update_highest_role:#{user_id}"
end
def lease_timeout
LEASE_TIMEOUT
end
# Do not release the lease before the timeout to
# prevent multiple jobs being executed during the
# defined timeout
def lease_release?
false
end
end
end
...@@ -46,15 +46,15 @@ module Projects ...@@ -46,15 +46,15 @@ module Projects
end end
def bad_request def bad_request
ServiceResponse.error(message: 'Bad Request', http_status: 400) ServiceResponse.error(message: 'Bad Request', http_status: :bad_request)
end end
def unauthorized def unauthorized
ServiceResponse.error(message: 'Unauthorized', http_status: 401) ServiceResponse.error(message: 'Unauthorized', http_status: :unauthorized)
end end
def forbidden def forbidden
ServiceResponse.error(message: 'Forbidden', http_status: 403) ServiceResponse.error(message: 'Forbidden', http_status: :forbidden)
end end
end end
end end
......
...@@ -58,12 +58,7 @@ module Projects ...@@ -58,12 +58,7 @@ module Projects
end end
def tree_saver_class def tree_saver_class
if ::Feature.enabled?(:streaming_serializer, project, default_enabled: true) Gitlab::ImportExport::Project::TreeSaver
Gitlab::ImportExport::Project::TreeSaver
else
# Once we remove :streaming_serializer feature flag, Project::LegacyTreeSaver should be removed as well
Gitlab::ImportExport::Project::LegacyTreeSaver
end
end end
def uploads_saver def uploads_saver
......
...@@ -8,15 +8,15 @@ module Projects ...@@ -8,15 +8,15 @@ module Projects
include IncidentManagement::Settings include IncidentManagement::Settings
def execute(token) def execute(token)
return false unless valid_payload_size? return bad_request unless valid_payload_size?
return false unless valid_version? return unprocessable_entity unless valid_version?
return false unless valid_alert_manager_token?(token) return unauthorized unless valid_alert_manager_token?(token)
persist_events persist_events
send_alert_email if send_email? send_alert_email if send_email?
process_incident_issues if process_issues? process_incident_issues if process_issues?
true ServiceResponse.success
end end
private private
...@@ -118,6 +118,18 @@ module Projects ...@@ -118,6 +118,18 @@ module Projects
def persist_events def persist_events
CreateEventsService.new(project, nil, params).execute CreateEventsService.new(project, nil, params).execute
end end
def bad_request
ServiceResponse.error(message: 'Bad Request', http_status: :bad_request)
end
def unauthorized
ServiceResponse.error(message: 'Unauthorized', http_status: :unauthorized)
end
def unprocessable_entity
ServiceResponse.error(message: 'Unprocessable Entity', http_status: :unprocessable_entity)
end
end end
end end
end end
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
.form-group .form-group
.collapse.js-well-changing-auth .collapse.js-well-changing-auth
.changing-auth-method= icon('spinner spin lg') .changing-auth-method
.well-password-auth.collapse.js-well-password-auth .well-password-auth.collapse.js-well-password-auth
= f.label :password, _("Password"), class: "label-bold" = f.label :password, _("Password"), class: "label-bold"
= f.password_field :password, value: mirror.password, class: 'form-control qa-password', autocomplete: 'new-password' = f.password_field :password, value: mirror.password, class: 'form-control qa-password', autocomplete: 'new-password'
...@@ -4,7 +4,7 @@ ...@@ -4,7 +4,7 @@
.form-group.js-ssh-host-keys-section{ class: ('collapse' unless mirror.ssh_mirror_url?) } .form-group.js-ssh-host-keys-section{ class: ('collapse' unless mirror.ssh_mirror_url?) }
%button.btn.btn-inverted.btn-secondary.inline.js-detect-host-keys.append-right-10{ type: 'button', data: { qa_selector: 'detect_host_keys' } } %button.btn.btn-inverted.btn-secondary.inline.js-detect-host-keys.append-right-10{ type: 'button', data: { qa_selector: 'detect_host_keys' } }
= icon('spinner spin', class: 'js-spinner d-none') .js-spinner.d-none.spinner.mr-1
= _('Detect host keys') = _('Detect host keys')
.fingerprint-ssh-info.js-fingerprint-ssh-info.prepend-top-10.append-bottom-10{ class: ('collapse' unless mirror.ssh_mirror_url?) } .fingerprint-ssh-info.js-fingerprint-ssh-info.prepend-top-10.append-bottom-10{ class: ('collapse' unless mirror.ssh_mirror_url?) }
%label.label-bold %label.label-bold
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
.row .row
.form-group.col-md-6 .form-group.col-md-6
= f.label :name, _('Name'), class: 'label-bold' = f.label :name, _('Name'), class: 'label-bold'
= f.text_field :name, class: "form-control qa-personal-access-token-name-field", required: true = f.text_field :name, class: "form-control", required: true, data: { qa_selector: 'personal_access_token_name_field' }
.row .row
.form-group.col-md-6 .form-group.col-md-6
...@@ -21,11 +21,11 @@ ...@@ -21,11 +21,11 @@
= render_if_exists 'personal_access_tokens/callout_max_personal_access_token_lifetime' = render_if_exists 'personal_access_tokens/callout_max_personal_access_token_lifetime'
= f.text_field :expires_at, class: "datepicker form-control", placeholder: 'YYYY-MM-DD' = f.text_field :expires_at, class: "datepicker form-control", placeholder: 'YYYY-MM-DD', data: { qa_selector: 'expiry_date_field' }
.form-group .form-group
= f.label :scopes, _('Scopes'), class: 'label-bold' = f.label :scopes, _('Scopes'), class: 'label-bold'
= render 'shared/tokens/scopes_form', prefix: 'personal_access_token', token: token, scopes: scopes = render 'shared/tokens/scopes_form', prefix: 'personal_access_token', token: token, scopes: scopes
.prepend-top-default .prepend-top-default
= f.submit _('Create %{type} token') % { type: type }, class: "btn btn-success qa-create-token-button" = f.submit _('Create %{type} token') % { type: type }, class: "btn btn-success", data: { qa_selector: 'create_token_button' }
---
title: Add added_lines and removed_lines columns to merge_request_metrics table
merge_request: 28658
author:
type: added
---
title: Use faster streaming serializer for project exports
merge_request: 28925
author:
type: performance
---
title: Migrate .fa-spinner to .spinner for app/views/projects/mirrors
merge_request: 25041
author: nuwe1
type: other
---
title: Use concern instead of service to update highest role
merge_request: 28791
author:
type: other
# frozen_string_literal: true
class AddLineMetricsToMrMetrics < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :merge_request_metrics, :added_lines, :integer
add_column :merge_request_metrics, :removed_lines, :integer
end
end
def down
with_lock_retries do
remove_column :merge_request_metrics, :added_lines, :integer
remove_column :merge_request_metrics, :removed_lines, :integer
end
end
end
...@@ -3749,7 +3749,9 @@ CREATE TABLE public.merge_request_metrics ( ...@@ -3749,7 +3749,9 @@ CREATE TABLE public.merge_request_metrics (
modified_paths_size integer, modified_paths_size integer,
commits_count integer, commits_count integer,
first_approved_at timestamp with time zone, first_approved_at timestamp with time zone,
first_reassigned_at timestamp with time zone first_reassigned_at timestamp with time zone,
added_lines integer,
removed_lines integer
); );
CREATE SEQUENCE public.merge_request_metrics_id_seq CREATE SEQUENCE public.merge_request_metrics_id_seq
...@@ -12936,6 +12938,7 @@ COPY "schema_migrations" (version) FROM STDIN; ...@@ -12936,6 +12938,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200330123739 20200330123739
20200330132913 20200330132913
20200331220930 20200331220930
20200402123926
20200402135250 20200402135250
20200403184110 20200403184110
20200403185127 20200403185127
......
--- ---
type: reference redirect_to: '../object_storage.md'
--- ---
# Cloud Object Storage This document was moved to [another location](../object_storage.md).
GitLab supports utilizing a Cloud Object Storage service rather than [NFS](nfs.md) for holding
numerous types of data. This is recommended in larger setups as object storage is
typically much more performant, reliable, and scalable.
For configuring GitLab to use Object Storage refer to the following guides:
1. Make sure the [`git` user home directory](https://docs.gitlab.com/omnibus/settings/configuration.html#moving-the-home-directory-for-a-user) is on local disk.
1. Configure [database lookup of SSH keys](../operations/fast_ssh_key_lookup.md)
to eliminate the need for a shared `authorized_keys` file.
1. Configure [object storage for backups](../../raketasks/backup_restore.md#uploading-backups-to-a-remote-cloud-storage).
1. Configure [object storage for job artifacts](../job_artifacts.md#using-object-storage)
including [incremental logging](../job_logs.md#new-incremental-logging-architecture).
1. Configure [object storage for LFS objects](../lfs/lfs_administration.md#storing-lfs-objects-in-remote-object-storage).
1. Configure [object storage for uploads](../uploads.md#using-object-storage-core-only).
1. Configure [object storage for merge request diffs](../merge_request_diffs.md#using-object-storage).
1. Configure [object storage for container registry](../packages/container_registry.md#container-registry-storage-driver) (optional feature).
1. Configure [object storage for Mattermost](https://docs.mattermost.com/administration/config-settings.html#file-storage) (optional feature).
1. Configure [object storage for packages](../packages/index.md#using-object-storage) (optional feature). **(PREMIUM ONLY)**
1. Configure [object storage for dependency proxy](../packages/dependency_proxy.md#using-object-storage) (optional feature). **(PREMIUM ONLY)**
1. Configure [object storage for Pseudonymizer](../pseudonymizer.md#configuration) (optional feature). **(ULTIMATE ONLY)**
NOTE: **Note:**
One current feature of GitLab that still requires a shared directory (NFS) is
[GitLab Pages](../../user/project/pages/index.md).
There is [work in progress](https://gitlab.com/gitlab-org/gitlab-pages/issues/196)
to eliminate the need for NFS to support GitLab Pages.
...@@ -90,7 +90,7 @@ This configuration relies on valid AWS credentials to be configured already. ...@@ -90,7 +90,7 @@ This configuration relies on valid AWS credentials to be configured already.
Use an object storage option like AWS S3 to store job artifacts. Use an object storage option like AWS S3 to store job artifacts.
DANGER: **Danger:** DANGER: **Danger:**
If you're enabling S3 in [GitLab HA](high_availability/README.md), you will need to have an [NFS mount set up for CI logs and artifacts](high_availability/nfs.md#a-single-nfs-mount) or enable [incremental logging](job_logs.md#new-incremental-logging-architecture). If these settings are not set, you will risk job logs disappearing or not being saved. If you configure GitLab to store CI logs and artifacts on object storage, you must also enable [incremental logging](job_logs.md#new-incremental-logging-architecture). Otherwise, job logs will disappear or not be saved.
#### Object Storage Settings #### Object Storage Settings
......
---
type: reference
---
# Object Storage
GitLab supports using an object storage service for holding numerous types of data.
In a high availability setup, it's recommended over [NFS](high_availability/nfs.md) and
in general it's better in larger setups as object storage is
typically much more performant, reliable, and scalable.
## Options
Object storage options that GitLab has tested, or is aware of customers using include:
- SaaS/Cloud solutions such as [Amazon S3](https://aws.amazon.com/s3/), [Google cloud storage](https://cloud.google.com/storage).
- On-premises hardware and appliances from various storage vendors.
- MinIO. We have [a guide to deploying this](https://docs.gitlab.com/charts/advanced/external-object-storage/minio.html) within our Helm Chart documentation.
## Configuration guides
For configuring GitLab to use Object Storage refer to the following guides:
1. Make sure the [`git` user home directory](https://docs.gitlab.com/omnibus/settings/configuration.html#moving-the-home-directory-for-a-user) is on local disk.
1. Configure [database lookup of SSH keys](operations/fast_ssh_key_lookup.md)
to eliminate the need for a shared `authorized_keys` file.
1. Configure [object storage for backups](../raketasks/backup_restore.md#uploading-backups-to-a-remote-cloud-storage).
1. Configure [object storage for job artifacts](job_artifacts.md#using-object-storage)
including [incremental logging](job_logs.md#new-incremental-logging-architecture).
1. Configure [object storage for LFS objects](lfs/lfs_administration.md#storing-lfs-objects-in-remote-object-storage).
1. Configure [object storage for uploads](uploads.md#using-object-storage-core-only).
1. Configure [object storage for merge request diffs](merge_request_diffs.md#using-object-storage).
1. Configure [object storage for Container Registry](packages/container_registry.md#container-registry-storage-driver) (optional feature).
1. Configure [object storage for Mattermost](https://docs.mattermost.com/administration/config-settings.html#file-storage) (optional feature).
1. Configure [object storage for packages](packages/index.md#using-object-storage) (optional feature). **(PREMIUM ONLY)**
1. Configure [object storage for Dependency Proxy](packages/dependency_proxy.md#using-object-storage) (optional feature). **(PREMIUM ONLY)**
1. Configure [object storage for Pseudonymizer](pseudonymizer.md#configuration) (optional feature). **(ULTIMATE ONLY)**
1. Configure [object storage for autoscale Runner caching](https://docs.gitlab.com/runner/configuration/autoscale.html#distributed-runners-caching) (optional - for improved performance).
## Warnings, limitations, and known issues
### Use separate buckets
Using separate buckets for each data type is the recommended approach for GitLab.
A limitation of our configuration is that each use of object storage is separately configured.
[We have an issue for improving this](https://gitlab.com/gitlab-org/gitlab/-/issues/23345)
and easily using one bucket with separate folders is one improvement that this might bring.
There is at least one specific issue with using the same bucket:
when GitLab is deployed with the Helm chart restore from backup
[will not properly function](https://docs.gitlab.com/charts/advanced/external-object-storage/#lfs-artifacts-uploads-packages-external-diffs-pseudonymizer)
unless separate buckets are used.
One risk of using a single bucket would be that if your organisation decided to
migrate GitLab to the Helm deployment in the future. GitLab would run, but the situation with
backups might not be realised until the organisation had a critical requirement for the backups to work.
### S3 API compatability issues
Not all S3 providers [are fully compatible](../raketasks/backup_restore.md#other-s3-providers)
with the Fog library that GitLab uses. Symptoms include:
```plaintext
411 Length Required
```
### GitLab Pages requires NFS
If you're working to [scale out](high_availability/README.md) your GitLab implementation and
one of your requirements is [GitLab Pages](../user/project/pages/index.md) this currently requires
NFS. There is [work in progress](https://gitlab.com/gitlab-org/gitlab-pages/issues/196)
to remove this dependency. In the future, GitLab Pages may use
[object storage](https://gitlab.com/gitlab-org/gitlab/-/issues/208135).
The dependency on disk storage also prevents Pages being deployed using the
[GitLab Helm chart](https://gitlab.com/gitlab-org/charts/gitlab/-/issues/37).
### Incremental logging is required for CI to use object storage
If you configure GitLab to use object storage for CI logs and artifacts,
[you must also enable incremental logging](job_artifacts.md#using-object-storage).
### Proxy Download
A number of the use cases for object storage allow client traffic to be redirected to the
object storage back end, like when Git clients request large files via LFS or when
downloading CI artifacts and logs.
When the files are stored on local block storage or NFS, GitLab has to act as a proxy.
This is not the default behaviour with object storage.
The `proxy_download` setting controls this behaviour: the default is generally `false`.
Verify this in the documentation for each use case. Set it to `true` so that GitLab proxies
the files.
When not proxying files, GitLab returns an
[HTTP 302 redirect with a pre-signed, time-limited object storage URL](https://gitlab.com/gitlab-org/gitlab/-/issues/32117#note_218532298).
This can result in some of the following problems:
- If GitLab is using non-secure HTTP to access the object storage, clients may generate
`https->http` downgrade errors and refuse to process the redirect. The solution to this
is for GitLab to use HTTPS. LFS, for example, will generate this error:
```plaintext
LFS: lfsapi/client: refusing insecure redirect, https->http
```
- Clients will need to trust the certificate authority that issued the object storage
certificate, or may return common TLS errors such as:
```plaintext
x509: certificate signed by unknown authority
```
- Clients will need network access to the object storage. Errors that might result
if this access is not in place include:
```plaintext
Received status code 403 from server: Forbidden
```
Getting a `403 Forbidden` response is specifically called out on the
[package repository documentation](packages/index.md#using-object-storage)
as a side effect of how some build tools work.
### ETag mismatch
Using the default GitLab settings, some object storage back-ends such as
[MinIO](https://gitlab.com/gitlab-org/gitlab/-/issues/23188)
and [Alibaba](https://gitlab.com/gitlab-org/charts/gitlab/-/issues/1564)
might generate `ETag mismatch` errors.
When using GitLab direct upload, the
[workaround for MinIO](https://gitlab.com/gitlab-org/charts/gitlab/-/issues/1564#note_244497658)
is to use the `--compat` parameter on the server.
We are working on a fix to GitLab component Workhorse, and also
a workaround, in the mean time, to
[allow ETag verification to be disabled](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18175).
...@@ -915,7 +915,7 @@ nicely on different mobile devices. ...@@ -915,7 +915,7 @@ nicely on different mobile devices.
- When providing a command without output, don't prefix the shell command with `$`. - When providing a command without output, don't prefix the shell command with `$`.
- If you need to include triple backticks inside a code block, use four backticks - If you need to include triple backticks inside a code block, use four backticks
for the codeblock fences instead of three. for the codeblock fences instead of three.
- For regular code blocks, always use a highlighting class corresponding to the - For regular fenced code blocks, always use a highlighting class corresponding to the
language for better readability. Examples: language for better readability. Examples:
````markdown ````markdown
...@@ -936,7 +936,7 @@ nicely on different mobile devices. ...@@ -936,7 +936,7 @@ nicely on different mobile devices.
``` ```
```` ````
Syntax highlighting is required for code blocks added to the GitLab documentation. Syntax highlighting is required for fenced code blocks added to the GitLab documentation.
Refer to the table below for the most common language classes, or check the Refer to the table below for the most common language classes, or check the
[complete list](https://github.com/rouge-ruby/rouge/wiki/List-of-supported-languages-and-lexers) [complete list](https://github.com/rouge-ruby/rouge/wiki/List-of-supported-languages-and-lexers)
of language classes available. of language classes available.
......
...@@ -73,7 +73,7 @@ The following items will be exported: ...@@ -73,7 +73,7 @@ The following items will be exported:
- Project and wiki repositories - Project and wiki repositories
- Project uploads - Project uploads
- Project configuration, including services - Project configuration, including services
- Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, - Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, time tracking,
and other project entities and other project entities
- Design Management files and data - Design Management files and data
- LFS objects - LFS objects
......
# frozen_string_literal: true
module Gitlab
module ImportExport
module Project
class LegacyTreeSaver
attr_reader :full_path
def initialize(project:, current_user:, shared:, params: {})
@params = params
@project = project
@current_user = current_user
@shared = shared
@full_path = File.join(@shared.export_path, ImportExport.project_filename)
end
def save
project_tree = tree_saver.serialize(@project, reader.project_tree)
fix_project_tree(project_tree)
tree_saver.save(project_tree, @shared.export_path, ImportExport.project_filename)
true
rescue => e
@shared.error(e)
false
end
private
# Aware that the resulting hash needs to be pure-hash and
# does not include any AR objects anymore, only objects that run `.to_json`
def fix_project_tree(project_tree)
if @params[:description].present?
project_tree['description'] = @params[:description]
end
project_tree['project_members'] += group_members_array
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
end
def group_members_array
group_members.as_json(reader.group_members_tree).each do |group_member|
group_member['source_type'] = 'Project' # Make group members project members of the future import
end
end
def group_members
return [] unless @current_user.can?(:admin_group, @project.group)
# We need `.where.not(user_id: nil)` here otherwise when a group has an
# invitee, it would make the following query return 0 rows since a NULL
# user_id would be present in the subquery
# See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
non_null_user_ids = @project.project_members.where.not(user_id: nil).select(:user_id)
GroupMembersFinder.new(@project.group).execute.where.not(user_id: non_null_user_ids)
end
def tree_saver
@tree_saver ||= Gitlab::ImportExport::LegacyRelationTreeSaver.new
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module RepositoryUrlBuilder
class << self
def build(path, protocol: :ssh)
# TODO: See https://gitlab.com/gitlab-org/gitlab/-/issues/213021
path = path.sub('@snippets', 'snippets')
case protocol
when :ssh
ssh_url(path)
when :http
http_url(path)
else
raise NotImplementedError.new("No URL builder defined for protocol #{protocol}")
end
end
private
def ssh_url(path)
Gitlab.config.gitlab_shell.ssh_path_prefix + "#{path}.git"
end
def http_url(path)
root = Gitlab::CurrentSettings.custom_http_clone_url_root.presence || Gitlab::Routing.url_helpers.root_url
Gitlab::Utils.append_path(root, "#{path}.git")
end
end
end
end
...@@ -47,14 +47,6 @@ module Gitlab ...@@ -47,14 +47,6 @@ module Gitlab
@version ||= File.read(gitlab_shell_version_file).chomp if File.readable?(gitlab_shell_version_file) @version ||= File.read(gitlab_shell_version_file).chomp if File.readable?(gitlab_shell_version_file)
end end
# Return a SSH url for a given project path
#
# @param [String] full_path project path (URL)
# @return [String] SSH URL
def url_to_repo(full_path)
Gitlab.config.gitlab_shell.ssh_path_prefix + "#{full_path}.git"
end
private private
def gitlab_shell_path def gitlab_shell_path
......
# frozen_string_literal: true # frozen_string_literal: true
require 'date'
module QA module QA
module Page module Page
module Profile module Profile
class PersonalAccessTokens < Page::Base class PersonalAccessTokens < Page::Base
view 'app/views/shared/_personal_access_tokens_form.html.haml' do view 'app/views/shared/_personal_access_tokens_form.html.haml' do
element :expiry_date_field
element :personal_access_token_name_field element :personal_access_token_name_field
element :create_token_button element :create_token_button
end end
...@@ -36,6 +39,13 @@ module QA ...@@ -36,6 +39,13 @@ module QA
find_element(:created_personal_access_token, wait: 30).value find_element(:created_personal_access_token, wait: 30).value
end end
def fill_expiry_date(date)
date = date.to_s if date.is_a?(Date)
Date.strptime(date, '%Y-%m-%d') rescue ArgumentError raise "Expiry date must be in YYYY-MM-DD format"
fill_element(:expiry_date_field, date)
end
def has_token_row_for_name?(token_name) def has_token_row_for_name?(token_name)
page.has_css?('tr', text: token_name, wait: 1.0) page.has_css?('tr', text: token_name, wait: 1.0)
end end
......
# frozen_string_literal: true # frozen_string_literal: true
require 'date'
module QA module QA
module Resource module Resource
## ##
...@@ -19,6 +21,8 @@ module QA ...@@ -19,6 +21,8 @@ module QA
Page::Profile::PersonalAccessTokens.perform do |token_page| Page::Profile::PersonalAccessTokens.perform do |token_page|
token_page.fill_token_name(name || 'api-test-token') token_page.fill_token_name(name || 'api-test-token')
token_page.check_api token_page.check_api
# Expire in 2 days just in case the token is created just before midnight
token_page.fill_expiry_date(Date.today + 2)
token_page.click_create_token_button token_page.click_create_token_button
end end
end end
......
...@@ -48,7 +48,7 @@ describe Projects::Alerting::NotificationsController do ...@@ -48,7 +48,7 @@ describe Projects::Alerting::NotificationsController do
end end
context 'when notification service fails' do context 'when notification service fails' do
let(:service_response) { ServiceResponse.error(message: 'Unauthorized', http_status: 401) } let(:service_response) { ServiceResponse.error(message: 'Unauthorized', http_status: :unauthorized) }
it 'responds with the service response' do it 'responds with the service response' do
make_request make_request
......
...@@ -158,7 +158,8 @@ describe Projects::Prometheus::AlertsController do ...@@ -158,7 +158,8 @@ describe Projects::Prometheus::AlertsController do
end end
describe 'POST #notify' do describe 'POST #notify' do
let(:notify_service) { spy } let(:service_response) { ServiceResponse.success }
let(:notify_service) { instance_double(Projects::Prometheus::Alerts::NotifyService, execute: service_response) }
before do before do
sign_out(user) sign_out(user)
...@@ -170,7 +171,7 @@ describe Projects::Prometheus::AlertsController do ...@@ -170,7 +171,7 @@ describe Projects::Prometheus::AlertsController do
end end
it 'returns ok if notification succeeds' do it 'returns ok if notification succeeds' do
expect(notify_service).to receive(:execute).and_return(true) expect(notify_service).to receive(:execute).and_return(ServiceResponse.success)
post :notify, params: project_params, session: { as: :json } post :notify, params: project_params, session: { as: :json }
...@@ -178,7 +179,9 @@ describe Projects::Prometheus::AlertsController do ...@@ -178,7 +179,9 @@ describe Projects::Prometheus::AlertsController do
end end
it 'returns unprocessable entity if notification fails' do it 'returns unprocessable entity if notification fails' do
expect(notify_service).to receive(:execute).and_return(false) expect(notify_service).to receive(:execute).and_return(
ServiceResponse.error(message: 'Unprocessable Entity', http_status: :unprocessable_entity)
)
post :notify, params: project_params, session: { as: :json } post :notify, params: project_params, session: { as: :json }
......
...@@ -38,7 +38,11 @@ describe 'Import/Export - project export integration test', :js do ...@@ -38,7 +38,11 @@ describe 'Import/Export - project export integration test', :js do
sign_in(user) sign_in(user)
end end
shared_examples 'export file without sensitive words' do context "with streaming serializer" do
before do
stub_feature_flags(project_export_as_ndjson: false)
end
it 'exports a project successfully', :sidekiq_inline do it 'exports a project successfully', :sidekiq_inline do
export_project_and_download_file(page, project) export_project_and_download_file(page, project)
...@@ -59,27 +63,8 @@ describe 'Import/Export - project export integration test', :js do ...@@ -59,27 +63,8 @@ describe 'Import/Export - project export integration test', :js do
end end
end end
context "with legacy export" do
before do
stub_feature_flags(streaming_serializer: false)
stub_feature_flags(project_export_as_ndjson: false)
end
it_behaves_like "export file without sensitive words"
end
context "with streaming serializer" do
before do
stub_feature_flags(streaming_serializer: true)
stub_feature_flags(project_export_as_ndjson: false)
end
it_behaves_like "export file without sensitive words"
end
context "with ndjson" do context "with ndjson" do
before do before do
stub_feature_flags(streaming_serializer: true)
stub_feature_flags(project_export_as_ndjson: true) stub_feature_flags(project_export_as_ndjson: true)
end end
......
...@@ -283,6 +283,8 @@ MergeRequest::Metrics: ...@@ -283,6 +283,8 @@ MergeRequest::Metrics:
- commits_count - commits_count
- first_approved_at - first_approved_at
- first_reassigned_at - first_reassigned_at
- added_lines
- removed_lines
Ci::Pipeline: Ci::Pipeline:
- id - id
- project_id - project_id
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::RepositoryUrlBuilder do
describe '.build' do
using RSpec::Parameterized::TableSyntax
where(:factory, :path_generator) do
:project | ->(project) { project.full_path }
:project_snippet | ->(snippet) { "#{snippet.project.full_path}/snippets/#{snippet.id}" }
:project_wiki | ->(wiki) { "#{wiki.project.full_path}.wiki" }
:personal_snippet | ->(snippet) { "snippets/#{snippet.id}" }
end
with_them do
let(:container) { build_stubbed(factory) }
let(:repository) { container.repository }
let(:path) { path_generator.call(container) }
let(:url) { subject.build(repository.full_path, protocol: protocol) }
context 'when passing SSH protocol' do
let(:protocol) { :ssh }
it 'returns the SSH URL to the repository' do
expect(url).to eq("#{Gitlab.config.gitlab_shell.ssh_path_prefix}#{path}.git")
end
end
context 'when passing HTTP protocol' do
let(:protocol) { :http }
it 'returns the HTTP URL to the repo without a username' do
expect(url).to eq("#{Gitlab.config.gitlab.url}/#{path}.git")
expect(url).not_to include('@')
end
it 'includes the custom HTTP clone root if set' do
clone_root = 'https://git.example.com:51234/mygitlab'
stub_application_setting(custom_http_clone_url_root: clone_root)
expect(url).to eq("#{clone_root}/#{path}.git")
end
end
context 'when passing an unsupported protocol' do
let(:protocol) { :ftp }
it 'raises an exception' do
expect { url }.to raise_error(NotImplementedError)
end
end
end
end
end
...@@ -10,14 +10,6 @@ describe Gitlab::Shell do ...@@ -10,14 +10,6 @@ describe Gitlab::Shell do
it { is_expected.to respond_to :remove_repository } it { is_expected.to respond_to :remove_repository }
describe '.url_to_repo' do
let(:full_path) { 'diaspora/disaspora-rails' }
subject { described_class.url_to_repo(full_path) }
it { is_expected.to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + full_path + '.git') }
end
describe 'memoized secret_token' do describe 'memoized secret_token' do
let(:secret_file) { 'tmp/tests/.secret_shell_test' } let(:secret_file) { 'tmp/tests/.secret_shell_test' }
let(:link_file) { 'tmp/tests/shell-secret-test/.gitlab_shell_secret' } let(:link_file) { 'tmp/tests/shell-secret-test/.gitlab_shell_secret' }
......
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
require 'spec_helper' require 'spec_helper'
describe Member do describe Member do
include ExclusiveLeaseHelpers
using RSpec::Parameterized::TableSyntax using RSpec::Parameterized::TableSyntax
describe "Associations" do describe "Associations" do
...@@ -593,6 +595,9 @@ describe Member do ...@@ -593,6 +595,9 @@ describe Member do
end end
context 'when after_commit :update_highest_role' do context 'when after_commit :update_highest_role' do
let!(:user) { create(:user) }
let(:user_id) { user.id }
where(:member_type, :source_type) do where(:member_type, :source_type) do
:project_member | :project :project_member | :project
:group_member | :group :group_member | :group
...@@ -600,43 +605,34 @@ describe Member do ...@@ -600,43 +605,34 @@ describe Member do
with_them do with_them do
describe 'create member' do describe 'create member' do
it 'initializes a new Members::UpdateHighestRoleService object' do let!(:source) { create(source_type) }
source = create(source_type) # source owner initializes a new service object too
user = create(:user)
expect(Members::UpdateHighestRoleService).to receive(:new).with(user.id).and_call_original subject { create(member_type, :guest, user: user, source_type => source) }
create(member_type, :guest, user: user, source_type => source) include_examples 'update highest role with exclusive lease'
end
end end
context 'when member exists' do context 'when member exists' do
let!(:member) { create(member_type) } let!(:member) { create(member_type, user: user) }
describe 'update member' do describe 'update member' do
context 'when access level was changed' do context 'when access level was changed' do
it 'initializes a new Members::UpdateHighestRoleService object' do subject { member.update(access_level: Gitlab::Access::GUEST) }
expect(Members::UpdateHighestRoleService).to receive(:new).with(member.user_id).and_call_original
member.update(access_level: Gitlab::Access::GUEST) include_examples 'update highest role with exclusive lease'
end
end end
context 'when access level was not changed' do context 'when access level was not changed' do
it 'does not initialize a new Members::UpdateHighestRoleService object' do subject { member.update(notification_level: NotificationSetting.levels[:disabled]) }
expect(Members::UpdateHighestRoleService).not_to receive(:new).with(member.user_id)
member.update(notification_level: NotificationSetting.levels[:disabled]) include_examples 'does not update the highest role'
end
end end
end end
describe 'destroy member' do describe 'destroy member' do
it 'initializes a new Members::UpdateHighestRoleService object' do subject { member.destroy }
expect(Members::UpdateHighestRoleService).to receive(:new).with(member.user_id).and_call_original
member.destroy include_examples 'update highest role with exclusive lease'
end
end end
end end
end end
......
...@@ -34,7 +34,7 @@ describe ProjectWiki do ...@@ -34,7 +34,7 @@ describe ProjectWiki do
describe "#url_to_repo" do describe "#url_to_repo" do
it "returns the correct ssh url to the repo" do it "returns the correct ssh url to the repo" do
expect(subject.url_to_repo).to eq(Gitlab::Shell.url_to_repo(subject.full_path)) expect(subject.url_to_repo).to eq(Gitlab::RepositoryUrlBuilder.build(subject.repository.full_path, protocol: :ssh))
end end
end end
...@@ -45,27 +45,8 @@ describe ProjectWiki do ...@@ -45,27 +45,8 @@ describe ProjectWiki do
end end
describe "#http_url_to_repo" do describe "#http_url_to_repo" do
let(:project) { create :project } it "returns the correct http url to the repo" do
expect(subject.http_url_to_repo).to eq(Gitlab::RepositoryUrlBuilder.build(subject.repository.full_path, protocol: :http))
context 'when a custom HTTP clone URL root is not set' do
it 'returns the full http url to the repo' do
expected_url = "#{Gitlab.config.gitlab.url}/#{subject.full_path}.git"
expect(project_wiki.http_url_to_repo).to eq(expected_url)
expect(project_wiki.http_url_to_repo).not_to include('@')
end
end
context 'when a custom HTTP clone URL root is set' do
before do
stub_application_setting(custom_http_clone_url_root: 'https://git.example.com:51234')
end
it 'returns the full http url to the repo, with the root replaced with the custom one' do
expected_url = "https://git.example.com:51234/#{subject.full_path}.git"
expect(project_wiki.http_url_to_repo).to eq(expected_url)
end
end end
end end
......
...@@ -735,22 +735,6 @@ describe Snippet do ...@@ -735,22 +735,6 @@ describe Snippet do
end end
end end
describe '#url_to_repo' do
subject { snippet.url_to_repo }
context 'with personal snippet' do
let(:snippet) { create(:personal_snippet) }
it { is_expected.to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + "snippets/#{snippet.id}.git") }
end
context 'with project snippet' do
let(:snippet) { create(:project_snippet) }
it { is_expected.to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + "#{snippet.project.full_path}/snippets/#{snippet.id}.git") }
end
end
describe '#versioned_enabled_for?' do describe '#versioned_enabled_for?' do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
......
...@@ -5,6 +5,7 @@ require 'spec_helper' ...@@ -5,6 +5,7 @@ require 'spec_helper'
describe User, :do_not_mock_admin_mode do describe User, :do_not_mock_admin_mode do
include ProjectForksHelper include ProjectForksHelper
include TermsHelper include TermsHelper
include ExclusiveLeaseHelpers
it_behaves_like 'having unique enum values' it_behaves_like 'having unique enum values'
...@@ -4535,17 +4536,22 @@ describe User, :do_not_mock_admin_mode do ...@@ -4535,17 +4536,22 @@ describe User, :do_not_mock_admin_mode do
context 'when after_commit :update_highest_role' do context 'when after_commit :update_highest_role' do
describe 'create user' do describe 'create user' do
it 'initializes a new Members::UpdateHighestRoleService object' do subject { create(:user) }
expect_next_instance_of(Members::UpdateHighestRoleService) do |service|
expect(service).to receive(:execute) it 'schedules a job in the future', :aggregate_failures, :clean_gitlab_redis_shared_state do
allow_next_instance_of(Gitlab::ExclusiveLease) do |instance|
allow(instance).to receive(:try_obtain).and_return('uuid')
end end
create(:user) expect(UpdateHighestRoleWorker).to receive(:perform_in).and_call_original
expect { subject }.to change(UpdateHighestRoleWorker.jobs, :size).by(1)
end end
end end
context 'when user already exists' do context 'when user already exists' do
let!(:user) { create(:user) } let!(:user) { create(:user) }
let(:user_id) { user.id }
describe 'update user' do describe 'update user' do
using RSpec::Parameterized::TableSyntax using RSpec::Parameterized::TableSyntax
...@@ -4560,24 +4566,24 @@ describe User, :do_not_mock_admin_mode do ...@@ -4560,24 +4566,24 @@ describe User, :do_not_mock_admin_mode do
with_them do with_them do
context 'when state was changed' do context 'when state was changed' do
it 'initializes a new Members::UpdateHighestRoleService object' do subject { user.update(attributes) }
expect_next_instance_of(Members::UpdateHighestRoleService) do |service|
expect(service).to receive(:execute)
end
user.update(attributes) include_examples 'update highest role with exclusive lease'
end
end end
end end
context 'when state was not changed' do context 'when state was not changed' do
it 'does not initialize a new Members::UpdateHighestRoleService object' do subject { user.update(email: 'newmail@example.com') }
expect(Members::UpdateHighestRoleService).not_to receive(:new)
user.update(email: 'newmail@example.com') include_examples 'does not update the highest role'
end
end end
end end
describe 'destroy user' do
subject { user.destroy }
include_examples 'does not update the highest role'
end
end end
end end
......
...@@ -5,9 +5,9 @@ require 'spec_helper' ...@@ -5,9 +5,9 @@ require 'spec_helper'
describe MergeRequestWidgetEntity do describe MergeRequestWidgetEntity do
include ProjectForksHelper include ProjectForksHelper
let(:project) { create :project, :repository } let(:project) { create :project, :repository }
let(:resource) { create(:merge_request, source_project: project, target_project: project) } let(:resource) { create(:merge_request, source_project: project, target_project: project) }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:request) { double('request', current_user: user, project: project) } let(:request) { double('request', current_user: user, project: project) }
...@@ -54,15 +54,17 @@ describe MergeRequestWidgetEntity do ...@@ -54,15 +54,17 @@ describe MergeRequestWidgetEntity do
end end
describe 'merge_request_add_ci_config_path' do describe 'merge_request_add_ci_config_path' do
let!(:project_auto_devops) { create(:project_auto_devops, :disabled, project: project) }
before do before do
project.add_role(user, role) project.add_role(user, role)
end end
context 'when there are pipelines' do context 'when there is a standard ci config file in the source project' do
let(:role) { :developer } let(:role) { :developer }
before do before do
create(:ci_empty_pipeline, project: project, sha: resource.all_commit_shas.first, ref: resource.source_branch) project.repository.create_file(user, Gitlab::FileDetector::PATTERNS[:gitlab_ci], 'CONTENT', message: 'Add .gitlab-ci.yml', branch_name: 'master')
end end
it 'no ci config path' do it 'no ci config path' do
...@@ -70,7 +72,7 @@ describe MergeRequestWidgetEntity do ...@@ -70,7 +72,7 @@ describe MergeRequestWidgetEntity do
end end
end end
context 'when there are no pipelines' do context 'when there is no standard ci config file in the source project' do
context 'when user has permissions' do context 'when user has permissions' do
let(:role) { :developer } let(:role) { :developer }
...@@ -80,6 +82,16 @@ describe MergeRequestWidgetEntity do ...@@ -80,6 +82,16 @@ describe MergeRequestWidgetEntity do
expect(subject[:merge_request_add_ci_config_path]).to eq(expected_path) expect(subject[:merge_request_add_ci_config_path]).to eq(expected_path)
end end
context 'when auto devops is enabled' do
before do
project_auto_devops.enabled = true
end
it 'returns a blank ci config path' do
expect(subject[:merge_request_add_ci_config_path]).to be_nil
end
end
context 'when source project is missing' do context 'when source project is missing' do
before do before do
resource.source_project = nil resource.source_project = nil
......
# frozen_string_literal: true
require 'spec_helper'
require 'sidekiq/testing'
describe Members::UpdateHighestRoleService, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
let_it_be(:user) { create(:user) }
let_it_be(:lease_key) { "update_highest_role:#{user.id}" }
let(:service) { described_class.new(user.id) }
describe '#perform' do
subject { service.execute }
context 'when lease is obtained' do
it 'takes the lease but does not release it', :aggregate_failures do
expect_to_obtain_exclusive_lease(lease_key, 'uuid', timeout: described_class::LEASE_TIMEOUT)
subject
expect(service.exclusive_lease.exists?).to be_truthy
end
it 'schedules a job in the future', :aggregate_failures do
expect(UpdateHighestRoleWorker).to receive(:perform_in).with(described_class::DELAY, user.id).and_call_original
Sidekiq::Testing.fake! do
expect { subject }.to change(UpdateHighestRoleWorker.jobs, :size).by(1)
end
end
end
context 'when lease cannot be obtained' do
it 'only schedules one job' do
Sidekiq::Testing.fake! do
stub_exclusive_lease_taken(lease_key, timeout: described_class::LEASE_TIMEOUT)
expect { subject }.not_to change(UpdateHighestRoleWorker.jobs, :size)
end
end
end
end
end
...@@ -20,7 +20,7 @@ describe Projects::Alerting::NotifyService do ...@@ -20,7 +20,7 @@ describe Projects::Alerting::NotifyService do
.exactly(amount).times .exactly(amount).times
Sidekiq::Testing.inline! do Sidekiq::Testing.inline! do
expect(subject.status).to eq(:success) expect(subject).to be_success
end end
end end
end end
...@@ -36,7 +36,7 @@ describe Projects::Alerting::NotifyService do ...@@ -36,7 +36,7 @@ describe Projects::Alerting::NotifyService do
expect(notification_service) expect(notification_service)
.to receive_message_chain(:async, :prometheus_alerts_fired) .to receive_message_chain(:async, :prometheus_alerts_fired)
expect(subject.status).to eq(:success) expect(subject).to be_success
end end
end end
...@@ -45,7 +45,7 @@ describe Projects::Alerting::NotifyService do ...@@ -45,7 +45,7 @@ describe Projects::Alerting::NotifyService do
expect(IncidentManagement::ProcessAlertWorker) expect(IncidentManagement::ProcessAlertWorker)
.not_to receive(:perform_async) .not_to receive(:perform_async)
expect(subject.status).to eq(:success) expect(subject).to be_success
end end
end end
...@@ -54,7 +54,7 @@ describe Projects::Alerting::NotifyService do ...@@ -54,7 +54,7 @@ describe Projects::Alerting::NotifyService do
expect(IncidentManagement::ProcessAlertWorker) expect(IncidentManagement::ProcessAlertWorker)
.not_to receive(:perform_async) .not_to receive(:perform_async)
expect(subject.status).to eq(:error) expect(subject).to be_error
expect(subject.http_status).to eq(http_status) expect(subject.http_status).to eq(http_status)
end end
end end
...@@ -102,7 +102,7 @@ describe Projects::Alerting::NotifyService do ...@@ -102,7 +102,7 @@ describe Projects::Alerting::NotifyService do
.and_raise(Gitlab::Alerting::NotificationPayloadParser::BadPayloadError) .and_raise(Gitlab::Alerting::NotificationPayloadParser::BadPayloadError)
end end
it_behaves_like 'does not process incident issues due to error', http_status: 400 it_behaves_like 'does not process incident issues due to error', http_status: :bad_request
end end
end end
...@@ -114,13 +114,13 @@ describe Projects::Alerting::NotifyService do ...@@ -114,13 +114,13 @@ describe Projects::Alerting::NotifyService do
end end
context 'with invalid token' do context 'with invalid token' do
it_behaves_like 'does not process incident issues due to error', http_status: 401 it_behaves_like 'does not process incident issues due to error', http_status: :unauthorized
end end
context 'with deactivated Alerts Service' do context 'with deactivated Alerts Service' do
let!(:alerts_service) { create(:alerts_service, :inactive, project: project) } let!(:alerts_service) { create(:alerts_service, :inactive, project: project) }
it_behaves_like 'does not process incident issues due to error', http_status: 403 it_behaves_like 'does not process incident issues due to error', http_status: :forbidden
end end
end end
end end
......
...@@ -26,28 +26,10 @@ describe Projects::ImportExport::ExportService do ...@@ -26,28 +26,10 @@ describe Projects::ImportExport::ExportService do
service.execute service.execute
end end
context 'when :streaming_serializer feature is enabled' do it 'saves the models' do
before do expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
stub_feature_flags(streaming_serializer: true)
end
it 'saves the models' do
expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
service.execute
end
end
context 'when :streaming_serializer feature is disabled' do service.execute
before do
stub_feature_flags(streaming_serializer: false)
end
it 'saves the models' do
expect(Gitlab::ImportExport::Project::LegacyTreeSaver).to receive(:new).and_call_original
service.execute
end
end end
it 'saves the uploads' do it 'saves the uploads' do
......
...@@ -30,7 +30,7 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -30,7 +30,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
expect(notification_service) expect(notification_service)
.to receive_message_chain(:async, :prometheus_alerts_fired) .to receive_message_chain(:async, :prometheus_alerts_fired)
expect(subject).to eq(true) expect(subject).to be_success
end end
end end
...@@ -44,7 +44,7 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -44,7 +44,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
.exactly(amount).times .exactly(amount).times
Sidekiq::Testing.inline! do Sidekiq::Testing.inline! do
expect(subject).to eq(true) expect(subject).to be_success
end end
end end
end end
...@@ -54,7 +54,7 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -54,7 +54,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
expect(IncidentManagement::ProcessPrometheusAlertWorker) expect(IncidentManagement::ProcessPrometheusAlertWorker)
.not_to receive(:perform_async) .not_to receive(:perform_async)
expect(subject).to eq(true) expect(subject).to be_success
end end
end end
...@@ -69,7 +69,7 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -69,7 +69,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
expect(create_events_service) expect(create_events_service)
.to receive(:execute) .to receive(:execute)
expect(subject).to eq(true) expect(subject).to be_success
end end
end end
...@@ -78,7 +78,7 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -78,7 +78,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
it_behaves_like 'persists events' it_behaves_like 'persists events'
end end
shared_examples 'no notifications' do shared_examples 'no notifications' do |http_status:|
let(:notification_service) { spy } let(:notification_service) { spy }
let(:create_events_service) { spy } let(:create_events_service) { spy }
...@@ -86,7 +86,8 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -86,7 +86,8 @@ describe Projects::Prometheus::Alerts::NotifyService do
expect(notification_service).not_to receive(:async) expect(notification_service).not_to receive(:async)
expect(create_events_service).not_to receive(:execute) expect(create_events_service).not_to receive(:execute)
expect(subject).to eq(false) expect(subject).to be_error
expect(subject.http_status).to eq(http_status)
end end
end end
...@@ -130,7 +131,7 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -130,7 +131,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
when :success when :success
it_behaves_like 'notifies alerts' it_behaves_like 'notifies alerts'
when :failure when :failure
it_behaves_like 'no notifications' it_behaves_like 'no notifications', http_status: :unauthorized
else else
raise "invalid result: #{result.inspect}" raise "invalid result: #{result.inspect}"
end end
...@@ -140,7 +141,7 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -140,7 +141,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
context 'without project specific cluster' do context 'without project specific cluster' do
let!(:cluster) { create(:cluster, enabled: true) } let!(:cluster) { create(:cluster, enabled: true) }
it_behaves_like 'no notifications' it_behaves_like 'no notifications', http_status: :unauthorized
end end
context 'with manual prometheus installation' do context 'with manual prometheus installation' do
...@@ -171,7 +172,7 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -171,7 +172,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
when :success when :success
it_behaves_like 'notifies alerts' it_behaves_like 'notifies alerts'
when :failure when :failure
it_behaves_like 'no notifications' it_behaves_like 'no notifications', http_status: :unauthorized
else else
raise "invalid result: #{result.inspect}" raise "invalid result: #{result.inspect}"
end end
...@@ -193,7 +194,7 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -193,7 +194,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
expect_any_instance_of(NotificationService) expect_any_instance_of(NotificationService)
.not_to receive(:async) .not_to receive(:async)
expect(subject).to eq(true) expect(subject).to be_success
end end
end end
...@@ -211,7 +212,7 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -211,7 +212,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
it 'does not send notification' do it 'does not send notification' do
expect(NotificationService).not_to receive(:new) expect(NotificationService).not_to receive(:new)
expect(subject).to eq(true) expect(subject).to be_success
end end
end end
end end
...@@ -260,19 +261,19 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -260,19 +261,19 @@ describe Projects::Prometheus::Alerts::NotifyService do
context 'without version' do context 'without version' do
let(:payload) { {} } let(:payload) { {} }
it_behaves_like 'no notifications' it_behaves_like 'no notifications', http_status: :unprocessable_entity
end end
context 'when version is not "4"' do context 'when version is not "4"' do
let(:payload) { { 'version' => '5' } } let(:payload) { { 'version' => '5' } }
it_behaves_like 'no notifications' it_behaves_like 'no notifications', http_status: :unprocessable_entity
end end
context 'with missing alerts' do context 'with missing alerts' do
let(:payload) { { 'version' => '4' } } let(:payload) { { 'version' => '4' } }
it_behaves_like 'no notifications' it_behaves_like 'no notifications', http_status: :unauthorized
end end
context 'when the payload is too big' do context 'when the payload is too big' do
...@@ -283,7 +284,7 @@ describe Projects::Prometheus::Alerts::NotifyService do ...@@ -283,7 +284,7 @@ describe Projects::Prometheus::Alerts::NotifyService do
allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object) allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object)
end end
it_behaves_like 'no notifications' it_behaves_like 'no notifications', http_status: :bad_request
it 'does not process issues' do it 'does not process issues' do
expect(IncidentManagement::ProcessPrometheusAlertWorker) expect(IncidentManagement::ProcessPrometheusAlertWorker)
......
# frozen_string_literal: true
# requires a subject and a user_id
RSpec.shared_examples 'update highest role with exclusive lease' do
include ExclusiveLeaseHelpers
let(:lease_key) { "update_highest_role:#{user_id}" }
before do
allow(Gitlab::ExclusiveLease).to receive(:new).and_call_original
end
context 'when lease is obtained', :clean_gitlab_redis_shared_state do
it 'takes the lease but does not release it', :aggregate_failures do
expect_to_obtain_exclusive_lease(lease_key, 'uuid', timeout: described_class::HIGHEST_ROLE_LEASE_TIMEOUT)
expect(Gitlab::ExclusiveLease).not_to receive(:cancel).with(lease_key, 'uuid')
subject
end
it 'schedules a job in the future', :aggregate_failures do
allow_next_instance_of(Gitlab::ExclusiveLease) do |instance|
allow(instance).to receive(:try_obtain).and_return('uuid')
end
expect(UpdateHighestRoleWorker).to receive(:perform_in).with(described_class::HIGHEST_ROLE_JOB_DELAY, user_id).and_call_original
expect { subject }.to change(UpdateHighestRoleWorker.jobs, :size).by(1)
end
end
context 'when lease cannot be obtained', :clean_gitlab_redis_shared_state do
it 'only schedules one job' do
stub_exclusive_lease_taken(lease_key, timeout: described_class::HIGHEST_ROLE_LEASE_TIMEOUT)
expect { subject }.not_to change(UpdateHighestRoleWorker.jobs, :size)
end
end
end
# requires a subject and a user_id
RSpec.shared_examples 'does not update the highest role' do
it 'does not obtain an exclusive lease' do
allow(Gitlab::ExclusiveLease).to receive(:new).and_call_original
lease = stub_exclusive_lease("update_highest_role:#{user_id}", 'uuid', timeout: described_class::HIGHEST_ROLE_LEASE_TIMEOUT)
expect(lease).not_to receive(:try_obtain)
subject
end
end
...@@ -9,54 +9,36 @@ RSpec.shared_examples 'versioned description' do ...@@ -9,54 +9,36 @@ RSpec.shared_examples 'versioned description' do
let(:factory_name) { described_class.name.underscore.to_sym } let(:factory_name) { described_class.name.underscore.to_sym }
let!(:model) { create(factory_name, description: 'Original description') } let!(:model) { create(factory_name, description: 'Original description') }
context 'when feature is enabled' do context 'when description was changed' do
before do before do
stub_feature_flags(save_description_versions: true) model.update!(description: 'New description')
end end
context 'when description was changed' do it 'saves the old and new description for the first update' do
before do expect(model.description_versions.first.description).to eq('Original description')
model.update!(description: 'New description') expect(model.description_versions.last.description).to eq('New description')
end end
it 'saves the old and new description for the first update' do
expect(model.description_versions.first.description).to eq('Original description')
expect(model.description_versions.last.description).to eq('New description')
end
it 'only saves the new description for subsequent updates' do
expect { model.update!(description: 'Another description') }.to change { model.description_versions.count }.by(1)
expect(model.description_versions.last.description).to eq('Another description')
end
it 'sets the new description version to `saved_description_version`' do it 'only saves the new description for subsequent updates' do
expect(model.saved_description_version).to eq(model.description_versions.last) expect { model.update!(description: 'Another description') }.to change { model.description_versions.count }.by(1)
end
it 'clears `saved_description_version` after another save that does not change description' do expect(model.description_versions.last.description).to eq('Another description')
model.save! end
expect(model.saved_description_version).to be_nil it 'sets the new description version to `saved_description_version`' do
end expect(model.saved_description_version).to eq(model.description_versions.last)
end end
context 'when description was not changed' do it 'clears `saved_description_version` after another save that does not change description' do
it 'does not save any description version' do model.save!
expect { model.save! }.not_to change { model.description_versions.count }
expect(model.saved_description_version).to be_nil expect(model.saved_description_version).to be_nil
end
end end
end end
context 'when feature is disabled' do context 'when description was not changed' do
before do
stub_feature_flags(save_description_versions: false)
end
it 'does not save any description version' do it 'does not save any description version' do
expect { model.update!(description: 'New description') }.not_to change { model.description_versions.count } expect { model.save! }.not_to change { model.description_versions.count }
expect(model.saved_description_version).to be_nil expect(model.saved_description_version).to be_nil
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment