Commit 3cd08f4b authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent dd4bee69
...@@ -67,7 +67,7 @@ export default { ...@@ -67,7 +67,7 @@ export default {
'setShowErrorBanner', 'setShowErrorBanner',
]), ]),
chartHasData(chart) { chartHasData(chart) {
return chart.metrics.some(metric => this.metricsWithData().includes(metric.metric_id)); return chart.metrics.some(metric => this.metricsWithData().includes(metric.metricId));
}, },
onSidebarMutation() { onSidebarMutation() {
setTimeout(() => { setTimeout(() => {
......
...@@ -13,11 +13,12 @@ export const gqClient = createGqClient( ...@@ -13,11 +13,12 @@ export const gqClient = createGqClient(
/** /**
* Metrics loaded from project-defined dashboards do not have a metric_id. * Metrics loaded from project-defined dashboards do not have a metric_id.
* This method creates a unique ID combining metric_id and id, if either is present. * This method creates a unique ID combining metric_id and id, if either is present.
* This is hopefully a temporary solution until BE processes metrics before passing to fE * This is hopefully a temporary solution until BE processes metrics before passing to FE
* @param {Object} metric - metric * @param {Object} metric - metric
* @returns {Object} - normalized metric with a uniqueID * @returns {Object} - normalized metric with a uniqueID
*/ */
export const uniqMetricsId = metric => `${metric.metric_id}_${metric.id}`; // eslint-disable-next-line babel/camelcase
export const uniqMetricsId = ({ metric_id, id }) => `${metric_id}_${id}`;
/** /**
* Project path has a leading slash that doesn't work well * Project path has a leading slash that doesn't work well
...@@ -68,10 +69,6 @@ const mapToMetricsViewModel = (metrics, defaultLabel) => ...@@ -68,10 +69,6 @@ const mapToMetricsViewModel = (metrics, defaultLabel) =>
queryRange: query_range, queryRange: query_range,
prometheusEndpointPath: prometheus_endpoint_path, prometheusEndpointPath: prometheus_endpoint_path,
metricId: uniqMetricsId({ metric_id, id }), metricId: uniqMetricsId({ metric_id, id }),
// `metric_id` is used by embed.vue, keeping this duplicated.
// https://gitlab.com/gitlab-org/gitlab/issues/37492
metric_id: uniqMetricsId({ metric_id, id }),
...metric, ...metric,
})); }));
......
...@@ -48,7 +48,7 @@ module Projects ...@@ -48,7 +48,7 @@ module Projects
end end
def elasticsearch_params def elasticsearch_params
params.permit(:container_name, :pod_name, :search, :start, :end) params.permit(:container_name, :pod_name, :search, :start, :end, :cursor)
end end
def environment def environment
......
...@@ -77,7 +77,7 @@ class Issue < ApplicationRecord ...@@ -77,7 +77,7 @@ class Issue < ApplicationRecord
scope :counts_by_state, -> { reorder(nil).group(:state_id).count } scope :counts_by_state, -> { reorder(nil).group(:state_id).count }
ignore_column :state, remove_with: '12.7', remove_after: '2019-12-22' ignore_column :state, remove_with: '12.10', remove_after: '2020-03-22'
after_commit :expire_etag_cache, unless: :importing? after_commit :expire_etag_cache, unless: :importing?
after_save :ensure_metrics, unless: :importing? after_save :ensure_metrics, unless: :importing?
......
...@@ -261,7 +261,7 @@ class MergeRequest < ApplicationRecord ...@@ -261,7 +261,7 @@ class MergeRequest < ApplicationRecord
includes(:metrics) includes(:metrics)
end end
ignore_column :state, remove_with: '12.7', remove_after: '2019-12-22' ignore_column :state, remove_with: '12.10', remove_after: '2020-03-22'
after_save :keep_around_commit, unless: :importing? after_save :keep_around_commit, unless: :importing?
......
...@@ -280,21 +280,17 @@ class JiraService < IssueTrackerService ...@@ -280,21 +280,17 @@ class JiraService < IssueTrackerService
return unless client_url.present? return unless client_url.present?
jira_request do jira_request do
create_issue_link(issue, remote_link_props) remote_link = find_remote_link(issue, remote_link_props[:object][:url])
create_issue_comment(issue, message)
create_issue_comment(issue, message) unless remote_link
remote_link ||= issue.remotelink.build
remote_link.save!(remote_link_props)
log_info("Successfully posted", client_url: client_url) log_info("Successfully posted", client_url: client_url)
"SUCCESS: Successfully posted to #{client_url}." "SUCCESS: Successfully posted to #{client_url}."
end end
end end
def create_issue_link(issue, remote_link_props)
remote_link = find_remote_link(issue, remote_link_props[:object][:url])
remote_link ||= issue.remotelink.build
remote_link.save!(remote_link_props)
end
def create_issue_comment(issue, message) def create_issue_comment(issue, message)
return unless comment_on_event_enabled return unless comment_on_event_enabled
......
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
class X509Certificate < ApplicationRecord class X509Certificate < ApplicationRecord
include X509SerialNumberAttribute include X509SerialNumberAttribute
include AfterCommitQueue
x509_serial_number_attribute :serial_number x509_serial_number_attribute :serial_number
...@@ -25,8 +26,14 @@ class X509Certificate < ApplicationRecord ...@@ -25,8 +26,14 @@ class X509Certificate < ApplicationRecord
validates :x509_issuer_id, presence: true validates :x509_issuer_id, presence: true
after_commit :mark_commit_signatures_unverified
def self.safe_create!(attributes) def self.safe_create!(attributes)
create_with(attributes) create_with(attributes)
.safe_find_or_create_by!(subject_key_identifier: attributes[:subject_key_identifier]) .safe_find_or_create_by!(subject_key_identifier: attributes[:subject_key_identifier])
end end
def mark_commit_signatures_unverified
X509CertificateRevokeWorker.perform_async(self.id) if revoked?
end
end end
...@@ -10,8 +10,6 @@ module PodLogs ...@@ -10,8 +10,6 @@ module PodLogs
CACHE_KEY_GET_POD_LOG = 'get_pod_log' CACHE_KEY_GET_POD_LOG = 'get_pod_log'
K8S_NAME_MAX_LENGTH = 253 K8S_NAME_MAX_LENGTH = 253
SUCCESS_RETURN_KEYS = %i(status logs pod_name container_name pods).freeze
def id def id
cluster.id cluster.id
end end
...@@ -49,6 +47,10 @@ module PodLogs ...@@ -49,6 +47,10 @@ module PodLogs
%w(pod_name container_name) %w(pod_name container_name)
end end
def success_return_keys
%i(status logs pod_name container_name pods)
end
def check_arguments(result) def check_arguments(result)
return error(_('Cluster does not exist')) if cluster.nil? return error(_('Cluster does not exist')) if cluster.nil?
return error(_('Namespace is empty')) if namespace.blank? return error(_('Namespace is empty')) if namespace.blank?
...@@ -122,7 +124,7 @@ module PodLogs ...@@ -122,7 +124,7 @@ module PodLogs
end end
def filter_return_keys(result) def filter_return_keys(result)
result.slice(*SUCCESS_RETURN_KEYS) result.slice(*success_return_keys)
end end
def filter_params(params) def filter_params(params)
......
...@@ -10,6 +10,7 @@ module PodLogs ...@@ -10,6 +10,7 @@ module PodLogs
:check_container_name, :check_container_name,
:check_times, :check_times,
:check_search, :check_search,
:check_cursor,
:pod_logs, :pod_logs,
:filter_return_keys :filter_return_keys
...@@ -18,7 +19,11 @@ module PodLogs ...@@ -18,7 +19,11 @@ module PodLogs
private private
def valid_params def valid_params
%w(pod_name container_name search start end) super + %w(search start end cursor)
end
def success_return_keys
super + %i(cursor)
end end
def check_times(result) def check_times(result)
...@@ -36,19 +41,28 @@ module PodLogs ...@@ -36,19 +41,28 @@ module PodLogs
success(result) success(result)
end end
def check_cursor(result)
result[:cursor] = params['cursor'] if params.key?('cursor')
success(result)
end
def pod_logs(result) def pod_logs(result)
client = cluster&.application_elastic_stack&.elasticsearch_client client = cluster&.application_elastic_stack&.elasticsearch_client
return error(_('Unable to connect to Elasticsearch')) unless client return error(_('Unable to connect to Elasticsearch')) unless client
result[:logs] = ::Gitlab::Elasticsearch::Logs.new(client).pod_logs( response = ::Gitlab::Elasticsearch::Logs.new(client).pod_logs(
namespace, namespace,
result[:pod_name], result[:pod_name],
result[:container_name], container_name: result[:container_name],
result[:search], search: result[:search],
result[:start], start_time: result[:start],
result[:end] end_time: result[:end],
cursor: result[:cursor]
) )
result.merge!(response)
success(result) success(result)
rescue Elasticsearch::Transport::Transport::ServerError => e rescue Elasticsearch::Transport::Transport::ServerError => e
::Gitlab::ErrorTracking.track_exception(e) ::Gitlab::ErrorTracking.track_exception(e)
...@@ -58,6 +72,8 @@ module PodLogs ...@@ -58,6 +72,8 @@ module PodLogs
# there is no method on the exception other than the class name to determine the type of error encountered. # there is no method on the exception other than the class name to determine the type of error encountered.
status_code: e.class.name.split('::').last status_code: e.class.name.split('::').last
}) })
rescue ::Gitlab::Elasticsearch::Logs::InvalidCursor
error(_('Invalid cursor value provided'))
end end
end end
end end
...@@ -5,19 +5,12 @@ module Projects ...@@ -5,19 +5,12 @@ module Projects
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
Error = Class.new(StandardError) Error = Class.new(StandardError)
RepositoryAlreadyMoved = Class.new(StandardError)
def initialize(project) def initialize(project)
@project = project @project = project
end end
def execute(new_repository_storage_key) def execute(new_repository_storage_key)
# Raising an exception is a little heavy handed but this behavior (doing
# nothing if the repo is already on the right storage) prevents data
# loss, so it is valuable for us to be able to observe it via the
# exception.
raise RepositoryAlreadyMoved if project.repository_storage == new_repository_storage_key
mirror_repositories(new_repository_storage_key) mirror_repositories(new_repository_storage_key)
mark_old_paths_for_archive mark_old_paths_for_archive
...@@ -30,7 +23,7 @@ module Projects ...@@ -30,7 +23,7 @@ module Projects
success success
rescue Error => e rescue Error, ArgumentError, Gitlab::Git::BaseError => e
project.update(repository_read_only: false) project.update(repository_read_only: false)
Gitlab::ErrorTracking.track_exception(e, project_path: project.full_path) Gitlab::ErrorTracking.track_exception(e, project_path: project.full_path)
...@@ -65,10 +58,7 @@ module Projects ...@@ -65,10 +58,7 @@ module Projects
raw_repository.gl_repository, raw_repository.gl_repository,
full_path) full_path)
unless new_repository.fetch_repository_as_mirror(raw_repository) new_repository.replicate(raw_repository)
raise Error, s_('UpdateRepositoryStorage|Failed to fetch %{type} repository as mirror') % { type: type.name }
end
new_checksum = new_repository.checksum new_checksum = new_repository.checksum
if checksum != new_checksum if checksum != new_checksum
......
# frozen_string_literal: true
class X509CertificateRevokeService
def execute(certificate)
return unless certificate.revoked?
certificate.x509_commit_signatures.update_all(verification_status: :unverified)
end
end
...@@ -11,4 +11,15 @@ class AttachmentUploader < GitlabUploader ...@@ -11,4 +11,15 @@ class AttachmentUploader < GitlabUploader
def dynamic_segment def dynamic_segment
File.join(model.class.underscore, mounted_as.to_s, model.id.to_s) File.join(model.class.underscore, mounted_as.to_s, model.id.to_s)
end end
def mounted_as
# Geo fails to sync attachments on Note, and LegacyDiffNotes with missing mount_point.
#
# See https://gitlab.com/gitlab-org/gitlab/-/issues/209752 for more details.
if model.class.underscore.include?('note')
super || 'attachment'
else
super
end
end
end end
.gpg-popover-certificate-details .gpg-popover-certificate-details
%strong= _('Certificate Subject') %strong= _('Certificate Subject')
- if signature.x509_certificate.revoked?
%strong.cred= _('(revoked)')
%ul %ul
- x509_subject(signature.x509_certificate.subject, ["CN", "O"]).map do |key, value| - x509_subject(signature.x509_certificate.subject, ["CN", "O"]).map do |key, value|
%li= key + "=" + value %li= key + "=" + value
......
...@@ -1291,3 +1291,10 @@ ...@@ -1291,3 +1291,10 @@
:resource_boundary: :unknown :resource_boundary: :unknown
:weight: 1 :weight: 1
:idempotent: :idempotent:
- :name: x509_certificate_revoke
:feature_category: :source_code_management
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
...@@ -9,7 +9,5 @@ class ProjectUpdateRepositoryStorageWorker # rubocop:disable Scalability/Idempot ...@@ -9,7 +9,5 @@ class ProjectUpdateRepositoryStorageWorker # rubocop:disable Scalability/Idempot
project = Project.find(project_id) project = Project.find(project_id)
::Projects::UpdateRepositoryStorageService.new(project).execute(new_repository_storage_key) ::Projects::UpdateRepositoryStorageService.new(project).execute(new_repository_storage_key)
rescue ::Projects::UpdateRepositoryStorageService::RepositoryAlreadyMoved
Rails.logger.info "#{self.class}: repository already moved: #{project}" # rubocop:disable Gitlab/RailsLogger
end end
end end
# frozen_string_literal: true
class X509CertificateRevokeWorker
include ApplicationWorker
feature_category :source_code_management
idempotent!
def perform(certificate_id)
return unless certificate_id
X509Certificate.find_by_id(certificate_id).try do |certificate|
X509CertificateRevokeService.new.execute(certificate)
end
end
end
---
title: Migrate mentions for commit notes to commit_user_mentions DB table
merge_request: 23859
author:
type: changed
---
title: Ensure valid mount point is used by attachments on notes
merge_request: 26849
author:
type: fixed
---
title: Generate JSON-formatted a11y CI artifacts
merge_request: 26687
author:
type: added
---
title: Remove state column from issues and merge_requests
merge_request: 25561
author:
type: deprecated
---
title: Add functionality to revoke a X509Certificate and update related X509CommitSignatures
merge_request: 24889
author: Roger Meier
type: added
---
title: Remove promoted notes temporary index
merge_request: 26896
author:
type: other
---
title: Use ReplicateRepository when moving repo storage
merge_request: 26550
author:
type: changed
...@@ -546,6 +546,9 @@ Gitlab.ee do ...@@ -546,6 +546,9 @@ Gitlab.ee do
Settings.cron_jobs['elastic_index_bulk_cron_worker'] ||= Settingslogic.new({}) Settings.cron_jobs['elastic_index_bulk_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['elastic_index_bulk_cron_worker']['cron'] ||= '*/1 * * * *' Settings.cron_jobs['elastic_index_bulk_cron_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['elastic_index_bulk_cron_worker']['job_class'] ||= 'ElasticIndexBulkCronWorker' Settings.cron_jobs['elastic_index_bulk_cron_worker']['job_class'] ||= 'ElasticIndexBulkCronWorker'
Settings.cron_jobs['sync_seat_link_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['sync_seat_link_worker']['cron'] ||= "#{rand(60)} 0 * * *"
Settings.cron_jobs['sync_seat_link_worker']['job_class'] = 'SyncSeatLinkWorker'
end end
# #
......
...@@ -133,6 +133,9 @@ Rails.application.routes.draw do ...@@ -133,6 +133,9 @@ Rails.application.routes.draw do
get :kill get :kill
end end
end end
# Notification settings
resources :notification_settings, only: [:create, :update]
end end
concern :clusterable do concern :clusterable do
...@@ -181,9 +184,6 @@ Rails.application.routes.draw do ...@@ -181,9 +184,6 @@ Rails.application.routes.draw do
# Spam reports # Spam reports
resources :abuse_reports, only: [:new, :create] resources :abuse_reports, only: [:new, :create]
# Notification settings
resources :notification_settings, only: [:create, :update]
resources :groups, only: [:index, :new, :create] do resources :groups, only: [:index, :new, :create] do
post :preview_markdown post :preview_markdown
end end
......
...@@ -232,6 +232,8 @@ ...@@ -232,6 +232,8 @@
- 2 - 2
- - service_desk_email_receiver - - service_desk_email_receiver
- 1 - 1
- - sync_seat_link_request
- 1
- - system_hook_push - - system_hook_push
- 1 - 1
- - todos_destroyer - - todos_destroyer
...@@ -248,3 +250,5 @@ ...@@ -248,3 +250,5 @@
- 1 - 1
- - web_hook - - web_hook
- 1 - 1
- - x509_certificate_revoke
- 1
# frozen_string_literal: true
class CleanupEmptyCommitUserMentions < ActiveRecord::Migration[5.2]
DOWNTIME = false
BATCH_SIZE = 10_000
class CommitUserMention < ActiveRecord::Base
include EachBatch
self.table_name = 'commit_user_mentions'
end
def up
# cleanup commit user mentions with no actual mentions,
# re https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24586#note_285982468
CommitUserMention
.where(mentioned_users_ids: nil)
.where(mentioned_groups_ids: nil)
.where(mentioned_projects_ids: nil)
.each_batch(of: BATCH_SIZE) do |batch|
batch.delete_all
end
end
def down
# no-op
end
end
# frozen_string_literal: true
class MigrateCommitNotesMentionsToDb < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
DELAY = 3.minutes.to_i
BATCH_SIZE = 1_000
MIGRATION = 'UserMentions::CreateResourceUserMention'
QUERY_CONDITIONS = "note LIKE '%@%'::text AND notes.noteable_type = 'Commit' AND commit_user_mentions.commit_id IS NULL"
JOIN = 'LEFT JOIN commit_user_mentions ON notes.id = commit_user_mentions.note_id'
class Note < ActiveRecord::Base
include EachBatch
self.table_name = 'notes'
end
def up
Note
.joins(JOIN)
.where(QUERY_CONDITIONS)
.each_batch(of: BATCH_SIZE) do |batch, index|
range = batch.pluck(Arel.sql('MIN(notes.id)'), Arel.sql('MAX(notes.id)')).first
migrate_in(index * DELAY, MIGRATION, ['Commit', JOIN, QUERY_CONDITIONS, true, *range])
end
end
def down
# no-op
# temporary index is to be dropped in a different migration in an upcoming release:
# https://gitlab.com/gitlab-org/gitlab/issues/196842
end
end
# frozen_string_literal: true
class RemoveIssueStateIndexes < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
# issues state column is ignored since 12.6 and will be removed on a following migration
def up
remove_concurrent_index_by_name :issues, 'index_issues_on_state'
remove_concurrent_index_by_name :issues, 'index_issues_on_project_id_and_created_at_and_id_and_state'
remove_concurrent_index_by_name :issues, 'idx_issues_on_project_id_and_due_date_and_id_and_state_partial'
remove_concurrent_index_by_name :issues, 'index_issues_on_project_id_and_rel_position_and_state_and_id'
remove_concurrent_index_by_name :issues, 'index_issues_on_project_id_and_updated_at_and_id_and_state'
end
def down
add_concurrent_index :issues, :state, name: 'index_issues_on_state'
add_concurrent_index :issues,
[:project_id, :created_at, :id, :state],
name: 'index_issues_on_project_id_and_created_at_and_id_and_state'
add_concurrent_index :issues,
[:project_id, :due_date, :id, :state],
where: 'due_date IS NOT NULL',
name: 'idx_issues_on_project_id_and_due_date_and_id_and_state_partial'
add_concurrent_index :issues,
[:project_id, :relative_position, :state, :id],
order: { id: :desc },
name: 'index_issues_on_project_id_and_rel_position_and_state_and_id'
add_concurrent_index :issues,
[:project_id, :updated_at, :id, :state],
name: 'index_issues_on_project_id_and_updated_at_and_id_and_state'
end
end
# frozen_string_literal: true
class RemoveMergeRequestStateIndexes < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
# merge_requests state column is ignored since 12.6 and will be removed on a following migration
def up
remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_id_and_merge_jid'
remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_source_project_and_branch_state_opened'
remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_state_and_merge_status'
remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_target_project_id_and_iid_opened'
end
def down
add_concurrent_index :merge_requests,
[:id, :merge_jid],
where: "merge_jid IS NOT NULL and state = 'locked'",
name: 'index_merge_requests_on_id_and_merge_jid'
add_concurrent_index :merge_requests,
[:source_project_id, :source_branch],
where: "state = 'opened'",
name: 'index_merge_requests_on_source_project_and_branch_state_opened'
add_concurrent_index :merge_requests,
[:state, :merge_status],
where: "state = 'opened' AND merge_status = 'can_be_merged'",
name: 'index_merge_requests_on_state_and_merge_status'
add_concurrent_index :merge_requests,
[:target_project_id, :iid],
where: "state = 'opened'",
name: 'index_merge_requests_on_target_project_id_and_iid_opened'
end
end
# frozen_string_literal: true
class RemoveStateFromIssues < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
return unless issue_state_column_exists?
# Ignored in 12.6 - https://gitlab.com/gitlab-org/gitlab/-/merge_requests/19574
with_lock_retries do
remove_column :issues, :state, :string
end
end
def down
return if issue_state_column_exists?
with_lock_retries do
add_column :issues, :state, :string # rubocop:disable Migration/AddLimitToStringColumns
end
end
private
def issue_state_column_exists?
column_exists?(:issues, :state)
end
end
# frozen_string_literal: true
class RemoveStateFromMergeRequests < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
return unless merge_requests_state_column_exists?
# Ignored in 12.6 - https://gitlab.com/gitlab-org/gitlab/-/merge_requests/19574
with_lock_retries do
remove_column :merge_requests, :state, :string
end
end
def down
return if merge_requests_state_column_exists?
with_lock_retries do
add_column :merge_requests, :state, :string # rubocop:disable Migration/AddLimitToStringColumns
end
end
private
def merge_requests_state_column_exists?
column_exists?(:merge_requests, :state)
end
end
# frozen_string_literal: true
# Removes temporary index to fix orphan promoted issues.
# For more information check: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23916
class RemoveTemporaryPromotedNotesIndex < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
remove_concurrent_index_by_name :notes, 'tmp_idx_on_promoted_notes'
end
def down
add_concurrent_index :notes,
:note,
where: "noteable_type = 'Issue' AND system IS TRUE AND note LIKE 'promoted to epic%'",
name: 'tmp_idx_on_promoted_notes'
end
end
...@@ -2184,7 +2184,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do ...@@ -2184,7 +2184,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.datetime "updated_at" t.datetime "updated_at"
t.text "description" t.text "description"
t.integer "milestone_id" t.integer "milestone_id"
t.string "state"
t.integer "iid" t.integer "iid"
t.integer "updated_by_id" t.integer "updated_by_id"
t.integer "weight" t.integer "weight"
...@@ -2216,18 +2215,13 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do ...@@ -2216,18 +2215,13 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.index ["lock_version"], name: "index_issues_on_lock_version", where: "(lock_version IS NULL)" t.index ["lock_version"], name: "index_issues_on_lock_version", where: "(lock_version IS NULL)"
t.index ["milestone_id"], name: "index_issues_on_milestone_id" t.index ["milestone_id"], name: "index_issues_on_milestone_id"
t.index ["moved_to_id"], name: "index_issues_on_moved_to_id", where: "(moved_to_id IS NOT NULL)" t.index ["moved_to_id"], name: "index_issues_on_moved_to_id", where: "(moved_to_id IS NOT NULL)"
t.index ["project_id", "created_at", "id", "state"], name: "index_issues_on_project_id_and_created_at_and_id_and_state"
t.index ["project_id", "created_at", "id", "state_id"], name: "idx_issues_on_project_id_and_created_at_and_id_and_state_id" t.index ["project_id", "created_at", "id", "state_id"], name: "idx_issues_on_project_id_and_created_at_and_id_and_state_id"
t.index ["project_id", "due_date", "id", "state"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_partial", where: "(due_date IS NOT NULL)"
t.index ["project_id", "due_date", "id", "state_id"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_id", where: "(due_date IS NOT NULL)" t.index ["project_id", "due_date", "id", "state_id"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_id", where: "(due_date IS NOT NULL)"
t.index ["project_id", "iid"], name: "index_issues_on_project_id_and_iid", unique: true t.index ["project_id", "iid"], name: "index_issues_on_project_id_and_iid", unique: true
t.index ["project_id", "relative_position", "state", "id"], name: "index_issues_on_project_id_and_rel_position_and_state_and_id", order: { id: :desc }
t.index ["project_id", "relative_position", "state_id", "id"], name: "idx_issues_on_project_id_and_rel_position_and_state_id_and_id", order: { id: :desc } t.index ["project_id", "relative_position", "state_id", "id"], name: "idx_issues_on_project_id_and_rel_position_and_state_id_and_id", order: { id: :desc }
t.index ["project_id", "updated_at", "id", "state"], name: "index_issues_on_project_id_and_updated_at_and_id_and_state"
t.index ["project_id", "updated_at", "id", "state_id"], name: "idx_issues_on_project_id_and_updated_at_and_id_and_state_id" t.index ["project_id", "updated_at", "id", "state_id"], name: "idx_issues_on_project_id_and_updated_at_and_id_and_state_id"
t.index ["promoted_to_epic_id"], name: "index_issues_on_promoted_to_epic_id", where: "(promoted_to_epic_id IS NOT NULL)" t.index ["promoted_to_epic_id"], name: "index_issues_on_promoted_to_epic_id", where: "(promoted_to_epic_id IS NOT NULL)"
t.index ["relative_position"], name: "index_issues_on_relative_position" t.index ["relative_position"], name: "index_issues_on_relative_position"
t.index ["state"], name: "index_issues_on_state"
t.index ["state_id"], name: "idx_issues_on_state_id" t.index ["state_id"], name: "idx_issues_on_state_id"
t.index ["title"], name: "index_issues_on_title_trigram", opclass: :gin_trgm_ops, using: :gin t.index ["title"], name: "index_issues_on_title_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["updated_at"], name: "index_issues_on_updated_at" t.index ["updated_at"], name: "index_issues_on_updated_at"
...@@ -2597,7 +2591,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do ...@@ -2597,7 +2591,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.datetime "created_at" t.datetime "created_at"
t.datetime "updated_at" t.datetime "updated_at"
t.integer "milestone_id" t.integer "milestone_id"
t.string "state", default: "opened", null: false
t.string "merge_status", default: "unchecked", null: false t.string "merge_status", default: "unchecked", null: false
t.integer "target_project_id", null: false t.integer "target_project_id", null: false
t.integer "iid" t.integer "iid"
...@@ -2633,7 +2626,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do ...@@ -2633,7 +2626,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.index ["description"], name: "index_merge_requests_on_description_trigram", opclass: :gin_trgm_ops, using: :gin t.index ["description"], name: "index_merge_requests_on_description_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["head_pipeline_id"], name: "index_merge_requests_on_head_pipeline_id" t.index ["head_pipeline_id"], name: "index_merge_requests_on_head_pipeline_id"
t.index ["id", "merge_jid"], name: "idx_merge_requests_on_id_and_merge_jid", where: "((merge_jid IS NOT NULL) AND (state_id = 4))" t.index ["id", "merge_jid"], name: "idx_merge_requests_on_id_and_merge_jid", where: "((merge_jid IS NOT NULL) AND (state_id = 4))"
t.index ["id", "merge_jid"], name: "index_merge_requests_on_id_and_merge_jid", where: "((merge_jid IS NOT NULL) AND ((state)::text = 'locked'::text))"
t.index ["id"], name: "merge_request_mentions_temp_index", where: "((description ~~ '%@%'::text) OR ((title)::text ~~ '%@%'::text))" t.index ["id"], name: "merge_request_mentions_temp_index", where: "((description ~~ '%@%'::text) OR ((title)::text ~~ '%@%'::text))"
t.index ["latest_merge_request_diff_id"], name: "index_merge_requests_on_latest_merge_request_diff_id" t.index ["latest_merge_request_diff_id"], name: "index_merge_requests_on_latest_merge_request_diff_id"
t.index ["lock_version"], name: "index_merge_requests_on_lock_version", where: "(lock_version IS NULL)" t.index ["lock_version"], name: "index_merge_requests_on_lock_version", where: "(lock_version IS NULL)"
...@@ -2641,15 +2633,12 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do ...@@ -2641,15 +2633,12 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.index ["milestone_id"], name: "index_merge_requests_on_milestone_id" t.index ["milestone_id"], name: "index_merge_requests_on_milestone_id"
t.index ["source_branch"], name: "index_merge_requests_on_source_branch" t.index ["source_branch"], name: "index_merge_requests_on_source_branch"
t.index ["source_project_id", "source_branch"], name: "idx_merge_requests_on_source_project_and_branch_state_opened", where: "(state_id = 1)" t.index ["source_project_id", "source_branch"], name: "idx_merge_requests_on_source_project_and_branch_state_opened", where: "(state_id = 1)"
t.index ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_and_branch_state_opened", where: "((state)::text = 'opened'::text)"
t.index ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_id_and_source_branch" t.index ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_id_and_source_branch"
t.index ["state", "merge_status"], name: "index_merge_requests_on_state_and_merge_status", where: "(((state)::text = 'opened'::text) AND ((merge_status)::text = 'can_be_merged'::text))"
t.index ["state_id", "merge_status"], name: "idx_merge_requests_on_state_id_and_merge_status", where: "((state_id = 1) AND ((merge_status)::text = 'can_be_merged'::text))" t.index ["state_id", "merge_status"], name: "idx_merge_requests_on_state_id_and_merge_status", where: "((state_id = 1) AND ((merge_status)::text = 'can_be_merged'::text))"
t.index ["target_branch"], name: "index_merge_requests_on_target_branch" t.index ["target_branch"], name: "index_merge_requests_on_target_branch"
t.index ["target_project_id", "created_at"], name: "index_merge_requests_target_project_id_created_at" t.index ["target_project_id", "created_at"], name: "index_merge_requests_target_project_id_created_at"
t.index ["target_project_id", "iid"], name: "idx_merge_requests_on_target_project_id_and_iid_opened", where: "(state_id = 1)" t.index ["target_project_id", "iid"], name: "idx_merge_requests_on_target_project_id_and_iid_opened", where: "(state_id = 1)"
t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid", unique: true t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid", unique: true
t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid_opened", where: "((state)::text = 'opened'::text)"
t.index ["target_project_id", "merge_commit_sha", "id"], name: "index_merge_requests_on_tp_id_and_merge_commit_sha_and_id" t.index ["target_project_id", "merge_commit_sha", "id"], name: "index_merge_requests_on_tp_id_and_merge_commit_sha_and_id"
t.index ["target_project_id", "target_branch"], name: "index_merge_requests_on_target_project_id_and_target_branch", where: "((state_id = 1) AND (merge_when_pipeline_succeeds = true))" t.index ["target_project_id", "target_branch"], name: "index_merge_requests_on_target_project_id_and_target_branch", where: "((state_id = 1) AND (merge_when_pipeline_succeeds = true))"
t.index ["title"], name: "index_merge_requests_on_title" t.index ["title"], name: "index_merge_requests_on_title"
...@@ -2844,7 +2833,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do ...@@ -2844,7 +2833,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.index ["id", "noteable_type"], name: "note_mentions_temp_index", where: "(note ~~ '%@%'::text)" t.index ["id", "noteable_type"], name: "note_mentions_temp_index", where: "(note ~~ '%@%'::text)"
t.index ["line_code"], name: "index_notes_on_line_code" t.index ["line_code"], name: "index_notes_on_line_code"
t.index ["note"], name: "index_notes_on_note_trigram", opclass: :gin_trgm_ops, using: :gin t.index ["note"], name: "index_notes_on_note_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["note"], name: "tmp_idx_on_promoted_notes", where: "(((noteable_type)::text = 'Issue'::text) AND (system IS TRUE) AND (note ~~ 'promoted to epic%'::text))"
t.index ["noteable_id", "noteable_type"], name: "index_notes_on_noteable_id_and_noteable_type" t.index ["noteable_id", "noteable_type"], name: "index_notes_on_noteable_id_and_noteable_type"
t.index ["project_id", "id"], name: "index_notes_on_project_id_and_id_and_system_false", where: "(NOT system)" t.index ["project_id", "id"], name: "index_notes_on_project_id_and_id_and_system_false", where: "(NOT system)"
t.index ["project_id", "noteable_type"], name: "index_notes_on_project_id_and_noteable_type" t.index ["project_id", "noteable_type"], name: "index_notes_on_project_id_and_noteable_type"
......
...@@ -8,4 +8,4 @@ link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#links ...@@ -8,4 +8,4 @@ link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#links
level: error level: error
scope: raw scope: raw
raw: raw:
- '\[.+\]\((https?:){0}[\w\/\.]+(\.html).*\)' - '\[.+\]\((https?:){0}[\w\/\.-]+(\.html).*\)'
...@@ -67,7 +67,7 @@ This configuration is supported in [GitLab Premium and Ultimate](https://about.g ...@@ -67,7 +67,7 @@ This configuration is supported in [GitLab Premium and Ultimate](https://about.g
References: References:
- [Geo Documentation](../../gitlab-geo/README.html) - [Geo Documentation](../geo/replication/index.md)
- [GitLab Geo with a highly available configuration](../geo/replication/high_availability.md) - [GitLab Geo with a highly available configuration](../geo/replication/high_availability.md)
## Recommended setups based on number of users ## Recommended setups based on number of users
......
...@@ -258,6 +258,134 @@ export default { ...@@ -258,6 +258,134 @@ export default {
}; };
``` ```
### Working with pagination
GitLab's GraphQL API uses [Relay-style cursor pagination](https://www.apollographql.com/docs/react/data/pagination/#cursor-based)
for connection types. This means a "cursor" is used to keep track of where in the data
set the next items should be fetched from.
Every connection type (for example, `DesignConnection` and `DiscussionConnection`) has a field `pageInfo` that contains an information required for pagination:
```javascript
pageInfo {
endCursor
hasNextPage
hasPreviousPage
startCursor
}
```
Here:
- `startCursor` and `endCursor` display the cursor of the first and last items
respectively.
- `hasPreviousPage` and `hasNextPage` allow us to check if there are more pages
available before or after the current page.
When we fetch data with a connection type, we can pass cursor as `after` or `before`
parameter, indicating a starting or ending point of our pagination. They should be
followed with `first` or `last` parameter respectively to indicate _how many_ items
we want to fetch after or before a given endpoint.
For example, here we're fetching 10 designs after a cursor:
```javascript
query {
project(fullPath: "root/my-project") {
id
issue(iid: "42") {
designCollection {
designs(atVersion: null, after: "Ihwffmde0i", first: 10) {
edges {
node {
id
}
}
}
}
}
}
}
```
#### Using `fetchMore` method in components
When making an initial fetch, we usually want to start a pagination from the beginning.
In this case, we can either:
- Skip passing a cursor.
- Pass `null` explicitly to `after`.
After data is fetched, we should save a `pageInfo` object. Let's assume we're storing
it to Vue component `data`:
```javascript
data() {
return {
pageInfo: null,
}
},
apollo: {
designs: {
query: projectQuery,
variables() {
return {
// rest of design variables
...
first: 10,
};
},
result(res) {
this.pageInfo = res.data?.project?.issue?.designCollection?.designs?.pageInfo;
},
},
},
```
When we want to move to the next page, we use an Apollo `fetchMore` method, passing a
new cursor (and, optionally, new variables) there. In the `updateQuery` hook, we have
to return a result we want to see in the Apollo cache after fetching the next page.
```javascript
fetchNextPage() {
// as a first step, we're checking if we have more pages to move forward
if (this.pageInfo?.hasNextPage) {
this.$apollo.queries.designs.fetchMore({
variables: {
// rest of design variables
...
first: 10,
after: this.pageInfo?.endCursor,
},
updateQuery(previousResult, { fetchMoreResult }) {
// here we can implement the logic of adding new designs to fetched one (for example, if we use infinite scroll)
// or replacing old result with the new one if we use numbered pages
const newDesigns = fetchMoreResult.project.issue.designCollection.designs;
previousResult.project.issue.designCollection.designs.push(...newDesigns)
return previousResult;
},
});
}
}
```
Please note we don't have to save `pageInfo` one more time; `fetchMore` triggers a query
`result` hook as well.
#### Limitations
Currently, bidirectional pagination doesn't work:
- `hasNextPage` returns a correct value only when we paginate forward using `endCursor`
and `first` parameters.
- `hasPreviousPage` returns a correct value only when we paginate backward using
`startCursor` and `last` parameters.
This should be resolved in the scope of the issue
[Bi-directional Pagination in GraphQL doesn't work as expected](https://gitlab.com/gitlab-org/gitlab/-/issues/208301).
### Testing ### Testing
#### Mocking response as component data #### Mocking response as component data
......
...@@ -45,17 +45,41 @@ This could lead to false successes where subsequent "requests" could have querie ...@@ -45,17 +45,41 @@ This could lead to false successes where subsequent "requests" could have querie
## Finding the source of the query ## Finding the source of the query
It may be useful to identify the source of the queries by looking at the call backtrace. There are multiple ways to find the source of queries.
To enable this, run the specs with the `QUERY_RECORDER_DEBUG` environment variable set. For example:
```shell 1. The `QueryRecorder` `data` attribute stores queries by `file_name:line_number:method_name`.
QUERY_RECORDER_DEBUG=1 bundle exec rspec spec/requests/api/projects_spec.rb Each entry is a `hash` with the following fields:
```
- `count`: the number of times a query from this `file_name:line_number:method_name` was called
- `occurrences`: the actual `SQL` of each call
- `backtrace`: the stack trace of each call (if either of the two following options were enabled)
`QueryRecorder#find_query` allows filtering queries by their `file_name:line_number:method_name` and
`count` attributes. For example:
```ruby
control = ActiveRecord::QueryRecorder.new(skip_cached: false) { visit_some_page }
control.find_query(/.*note.rb.*/, 0, first_only: true)
```
`QueryRecorder#occurrences_by_line_method` returns a sorted array based on `data`, sorted by `count`.
This will log calls to QueryRecorder into the `test.log`. For example: 1. You can output the call backtrace for the specific `QueryRecorder` instance you want
by using `ActiveRecord::QueryRecorder.new(query_recorder_debug: true)`. The output
will be in `test.log`
```plaintext 1. Using the environment variable `QUERY_RECORDER_DEBUG`, the call backtrace will be output for all tests.
QueryRecorder SQL: SELECT COUNT(*) FROM "issues" WHERE "issues"."deleted_at" IS NULL AND "issues"."project_id" = $1 AND ("issues"."state" IN ('opened')) AND "issues"."confidential" = $2
To enable this, run the specs with the `QUERY_RECORDER_DEBUG` environment variable set. For example:
```shell
QUERY_RECORDER_DEBUG=1 bundle exec rspec spec/requests/api/projects_spec.rb
```
This will log calls to QueryRecorder into the `test.log` file. For example:
```plaintext
QueryRecorder SQL: SELECT COUNT(*) FROM "issues" WHERE "issues"."deleted_at" IS NULL AND "issues"."project_id" = $1 AND ("issues"."state" IN ('opened')) AND "issues"."confidential" = $2
--> /home/user/gitlab/gdk/gitlab/spec/support/query_recorder.rb:19:in `callback' --> /home/user/gitlab/gdk/gitlab/spec/support/query_recorder.rb:19:in `callback'
--> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:127:in `finish' --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:127:in `finish'
--> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:46:in `block in finish' --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/notifications/fanout.rb:46:in `block in finish'
...@@ -87,7 +111,7 @@ QueryRecorder SQL: SELECT COUNT(*) FROM "issues" WHERE "issues"."deleted_at" IS ...@@ -87,7 +111,7 @@ QueryRecorder SQL: SELECT COUNT(*) FROM "issues" WHERE "issues"."deleted_at" IS
--> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:299:in `fetch' --> /home/user/.rbenv/versions/2.3.5/lib/ruby/gems/2.3.0/gems/activesupport-4.2.8/lib/active_support/cache.rb:299:in `fetch'
--> /home/user/gitlab/gdk/gitlab/app/services/base_count_service.rb:12:in `count' --> /home/user/gitlab/gdk/gitlab/app/services/base_count_service.rb:12:in `count'
--> /home/user/gitlab/gdk/gitlab/app/models/project.rb:1296:in `open_issues_count' --> /home/user/gitlab/gdk/gitlab/app/models/project.rb:1296:in `open_issues_count'
``` ```
## See also ## See also
......
...@@ -64,13 +64,13 @@ source projects, GitLab grants access to **Gold** features for all GitLab.com ...@@ -64,13 +64,13 @@ source projects, GitLab grants access to **Gold** features for all GitLab.com
#### Self-managed #### Self-managed
A self-managed subscription uses a hybrid model. You pay for a subscription according to the maximum number of users enabled during the subscription period. At the end of the subscription period, the maximum number of simultaneous users in the self-managed installation is checked. If the number of users is higher than your subscription, you are billed for the extra users. The maximum number of simultaneous users is also used to calculate the cost of subscription renewal. A self-managed subscription uses a hybrid model. You pay for a subscription according to the maximum number of users enabled during the subscription period. For instances that aren't air-gapped or on a closed network, the maximum number of simultaneous users in the self-managed installation is checked each quarter, using [Seat Link](#seat-link).
Every occupied seat, whether by person, job, or bot is counted in the subscription, with the following exceptions: Every occupied seat, whether by person, job, or bot is counted in the subscription, with the following exceptions:
- Blocked users who are blocked prior to the renewal of a subscription won't be counted as active users for the renewal subscription. They may count as active users in the subscription period in which they were originally added. - Blocked users who are blocked prior to the renewal of a subscription won't be counted as active users for the renewal subscription. They may count as active users in the subscription period in which they were originally added.
- Members with Guest permissions on an Ultimate subscription. - Members with Guest permissions on an Ultimate subscription.
- Special internal GitLab accounts: `Ghost User` and `Support Bot`. - GitLab-created service accounts: `Ghost User` and `Support Bot`.
NOTE: **Note:** NOTE: **Note:**
If you have LDAP integration enabled, anyone in the configured domain can sign up for a GitLab account. This can result in an unexpected bill at time of renewal. Consider [disabling new signups](../user/admin_area/settings/sign_up_restrictions.md) and managing new users manually instead. If you have LDAP integration enabled, anyone in the configured domain can sign up for a GitLab account. This can result in an unexpected bill at time of renewal. Consider [disabling new signups](../user/admin_area/settings/sign_up_restrictions.md) and managing new users manually instead.
...@@ -237,6 +237,65 @@ The following will be emailed to you: ...@@ -237,6 +237,65 @@ The following will be emailed to you:
- A payment receipt. You can also access this information in the Customers Portal under **Payment History**. - A payment receipt. You can also access this information in the Customers Portal under **Payment History**.
- A new license. [Upload this license](../user/admin_area/license.md#uploading-your-license) to your instance to use it. - A new license. [Upload this license](../user/admin_area/license.md#uploading-your-license) to your instance to use it.
### Seat Link
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/208832) in [GitLab Starter](https://about.gitlab.com/pricing) 12.9.
Seat Link allows us to provide our self-managed customers with prorated charges for user growth throughout the year using a quarterly reconciliation process.
Seat Link sends to GitLab daily a count of all users in connected self-managed instances. That information is used to automate prorated reconciliations. The data is sent securely through an encrypted HTTPS connection.
Seat Link is mandatory because we need the user count data to enable prorated billing. Seat Link provides **only** the following information to GitLab:
- Date
- Historical maximum user count
- License key
Here is an example of the POST request:
```plaintext
{
date: '2020-01-29',
license_key: 'ZXlKa1lYUmhJam9pWm5WNmVsTjVZekZ2YTJoV2NucDBh
RXRxTTA5amQxcG1VMVZqDQpXR3RwZEc5SGIyMVhibmxuZDJ0NWFrNXJTVzVH
UzFCT1hHNVRiVFIyT0ZaUFlVSm1OV1ZGV0VObE1uVk4NCk4xY3ZkM1F4Y2to
MFFuVklXSFJvUWpSM01VdE9SVE5rYkVjclZrdDJORkpOTlhka01qaE5aalpj
YmxSMg0KWVd3MFNFTldTRmRtV1ZGSGRDOUhPR05oUVZvNUsxVnRXRUZIZFU1
U1VqUm5aVFZGZUdwTWIxbDFZV1EyDQphV1JTY1V4c1ZYSjNPVGhrYVZ4dVlu
TkpWMHRJZUU5dmF6ZEJRVVkxTlVWdFUwMTNSMGRHWm5SNlJFcFYNClQyVkJl
VXc0UzA0NWFFb3ZlSFJrZW0xbVRqUlZabkZ4U1hWcWNXRnZYRzVaTm5GSmVW
UnJVR1JQYTJKdA0KU0ZZclRHTmFPRTVhZEVKMUt6UjRkSE15WkRCT1UyNWlS
MGRJZDFCdmRFWk5Za2h4Tm5sT1VsSktlVlYyDQpXRmhjYmxSeU4wRnRNMU5q
THpCVWFGTmpTMnh3UWpOWVkyc3pkbXBST1dnelZHY3hUV3hxVDIwdlZYRlQN
Ck9EWTJSVWx4WlVOT01EQXhVRlZ3ZGs1Rk0xeHVSVEJTTDFkMWJUQTVhV1ZK
WjBORFdWUktaRXNyVnpsTw0KTldkWWQwWTNZa05VWlZBMmRUVk9kVUpxT1hV
Mk5VdDFTUzk0TUU5V05XbFJhWGh0WEc1cVkyWnhaeTlXDQpTMEpyZWt0cmVY
bzBOVGhFVG1oU1oxSm5WRFprY0Uwck0wZEdhVUpEV1d4a1RXZFRjVU5tYTB0
a2RteEQNCmNWTlFSbFpuWlZWY2JpdFVVbXhIV0d4MFRuUnRWbkJKTkhwSFJt
TnRaMGsyV0U1MFFUUXJWMUJVTWtOSA0KTVhKUWVGTkxPVTkzV1VsMlVUUldk
R3hNTWswNU1USlNjRnh1U1UxTGJTdHRRM1l5YTFWaWJtSlBTMkUxDQplRkpL
SzJSckszaG1hVXB1ZVRWT1UwdHZXV0ZOVG1WamMyVjRPV0pSUlZkUU9UUnpU
VWh2Wlc5cFhHNUgNClNtRkdVMDUyY1RGMWNGTnhVbU5JUkZkeGVWcHVRMnBh
VTBSUGR6VnRNVGhvWTFBM00zVkZlVzFOU0djMA0KY1ZFM1FWSlplSFZ5UzFS
aGIxTmNia3BSUFQxY2JpSXNJbxRsZVNJNkltZFhiVzFGVkRZNWNFWndiV2Rt
DQpNWEIyY21SbFFrdFNZamxaYURCdVVHcHhiRlV3Tm1WQ2JGSlFaSFJ3Y0Rs
cFMybGhSMnRPTkZOMWNVNU0NClVGeHVTa3N6TUUxcldVOTVWREl6WVVWdk5U
ZGhWM1ZvVjJkSFRtZFBZVXRJTkVGcE55dE1NRE5dWnpWeQ0KWlV0aWJsVk9T
RmRzVVROUGRHVXdWR3hEWEc1MWjWaEtRMGQ2YTAxWFpUZHJURTVET0doV00w
ODRWM0V2DQphV2M1YWs5cWFFWk9aR3BYTm1aVmJXNUNaazlXVUVRMWRrMXpj
bTFDV0V4dldtRmNibFpTTWpWU05VeFMNClEwTjRNMWxWCUtSVGEzTTJaV2xE
V0hKTFRGQmpURXRsZFVaQlNtRnJTbkpPZGtKdlUyUmlNVWxNWWpKaQ0KT0dw
c05YbE1kVnh1YzFWbk5VZDFhbU56ZUM5Tk16TXZUakZOVW05cVpsVTNObEo0
TjJ4eVlVUkdkWEJtDQpkSHByYWpreVJrcG9UVlo0Y0hKSU9URndiV2RzVFdO
VlhHNXRhVmszTkV0SVEzcEpNMWRyZEVoRU4ydHINCmRIRnFRVTlCVUVVM1pV
SlRORE4xUjFaYVJGb3JlWGM5UFZ4dUlpd2lhWFlpt2lKV00yRnNVbk5RTjJk
Sg0KU1hNMGExaE9SVGR2V2pKQlBUMWNiaUo5DQo=',
max_historical_user_count: 10
}
```
For air-gapped or closed network customers, the existing [true-up model](#users-over-license) will be used. Prorated charges are not possible without user count data.
### Renew or change a GitLab.com subscription ### Renew or change a GitLab.com subscription
To renew for more users than are currently active in your GitLab.com system, contact our sales team via `renewals@gitlab.com` for assistance as this can't be done in the Customers Portal. To renew for more users than are currently active in your GitLab.com system, contact our sales team via `renewals@gitlab.com` for assistance as this can't be done in the Customers Portal.
......
...@@ -35,24 +35,21 @@ export REGION=us-central1 # the GCP region where the GKE cluster is provisioned. ...@@ -35,24 +35,21 @@ export REGION=us-central1 # the GCP region where the GKE cluster is provisioned.
## Configure RBAC permissions ## Configure RBAC permissions
- For a non-GitLab managed cluster(s), ensure that the service account for the token provided can manage resources in the `database.crossplane.io` API group. - For GitLab-managed clusters, RBAC is configured automatically.
Manually grant GitLab's service account the ability to manage resources in the
`database.crossplane.io` API group. The Aggregated ClusterRole allows us to do that.
NOTE: **Note:**
For a non-GitLab managed cluster, ensure that the service account for the token provided can manage resources in the `database.crossplane.io` API group.
​1. Save the following YAML as `crossplane-database-role.yaml`:
```shell - For non-GitLab managed clusters, ensure that the service account for the token provided can manage resources in the `database.crossplane.io` API group:
cat > crossplane-database-role.yaml <<EOF
apiVersion: rbac.authorization.k8s.io/v1 1. Save the following YAML as `crossplane-database-role.yaml`:
kind: ClusterRole
metadata: ```yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: crossplane-database-role name: crossplane-database-role
labels: labels:
rbac.authorization.k8s.io/aggregate-to-edit: "true" rbac.authorization.k8s.io/aggregate-to-edit: "true"
rules: rules:
- apiGroups: - apiGroups:
- database.crossplane.io - database.crossplane.io
resources: resources:
- postgresqlinstances - postgresqlinstances
...@@ -64,14 +61,13 @@ rules: ...@@ -64,14 +61,13 @@ rules:
- delete - delete
- patch - patch
- watch - watch
EOF ```
```
Once the file is created, apply it with the following command in order to create the necessary role: 1. Apply the cluster role to the cluster:
```shell ```shell
kubectl apply -f crossplane-database-role.yaml kubectl apply -f crossplane-database-role.yaml
``` ```
## Configure Crossplane with a cloud provider ## Configure Crossplane with a cloud provider
......
...@@ -21,32 +21,34 @@ analyzed to a file called `accessibility`. ...@@ -21,32 +21,34 @@ analyzed to a file called `accessibility`.
## Configure Accessibility Testing ## Configure Accessibility Testing
This example shows how to run [pa11y](https://pa11y.org/) This example shows how to run [pa11y](https://pa11y.org/)
on your code with GitLab CI/CD using a node Docker image. on your code with GitLab CI/CD using the [GitLab Accessibility Docker image](https://gitlab.com/gitlab-org/ci-cd/accessibility).
For GitLab 12.8 and later, to define the `a11y` job, you must For GitLab 12.9 and later, to define the `a11y` job, you must
[include](../../../ci/yaml/README.md#includetemplate) the [include](../../../ci/yaml/README.md#includetemplate) the
[`Accessibility.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml) [`Accessibility.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml)
included with your GitLab installation, as shown below. included with your GitLab installation, as shown below.
For GitLab versions earlier than 12.8, you can copy and use the job as
defined in that template.
Add the following to your `.gitlab-ci.yml` file: Add the following to your `.gitlab-ci.yml` file:
```yaml ```yaml
variables: variables:
a11y_urls: "https://about.gitlab.com" a11y_urls: "https://about.gitlab.com https://gitlab.com/users/sign_in"
include: include:
- remote: "https://gitlab.com/gitlab-org/gitlab/-/raw/master/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml" - template: "Verify/Accessibility.gitlab-ci.yml"
``` ```
The example above will create an `a11y` job in your CI/CD pipeline and will run creates an `a11y` job in your CI/CD pipeline, runs
Pa11y against the webpage you defined in `a11y_urls` to build a report. Pa11y against the webpages defined in `a11y_urls`, and builds an HTML report for each.
NOTE: **Note:** The report for each URL is saved as an artifact that can be [viewed directly in your browser](../../../ci/pipelines/job_artifacts.md#browsing-artifacts).
Only one URL may be currently passed into `a11y_urls`.
A single `accessibility.json` artifact is created and saved along with the individual HTML reports.
It includes report data for all URLs scanned.
The full HTML Pa11y report will be saved as an artifact that can be [viewed directly in your browser](../../../ci/pipelines/job_artifacts.md#browsing-artifacts). NOTE: **Note:**
For GitLab versions earlier than 12.9, you can use `include:remote` and use a
link to the [current template in `master`](https://gitlab.com/gitlab-org/gitlab/-/raw/master/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml)
NOTE: **Note:** NOTE: **Note:**
The job definition provided by the template does not support Kubernetes yet. The job definition provided by the template does not support Kubernetes yet.
......
...@@ -69,7 +69,6 @@ Currently the following names are reserved as top level groups: ...@@ -69,7 +69,6 @@ Currently the following names are reserved as top level groups:
- `invites` - `invites`
- `jwt` - `jwt`
- `login` - `login`
- `notification_settings`
- `oauth` - `oauth`
- `profile` - `profile`
- `projects` - `projects`
......
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
module UserMentions
module Models
class Commit
include Concerns::IsolatedMentionable
include Concerns::MentionableMigrationMethods
def self.user_mention_model
Gitlab::BackgroundMigration::UserMentions::Models::CommitUserMention
end
def user_mention_model
self.class.user_mention_model
end
def user_mention_resource_id
id
end
def user_mention_note_id
'NULL'
end
def self.no_quote_columns
[:note_id]
end
end
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
module UserMentions
module Models
class CommitUserMention < ActiveRecord::Base
self.table_name = 'commit_user_mentions'
def self.resource_foreign_key
:commit_id
end
end
end
end
end
end
...@@ -20,7 +20,7 @@ module Gitlab ...@@ -20,7 +20,7 @@ module Gitlab
belongs_to :project belongs_to :project
def for_personal_snippet? def for_personal_snippet?
noteable.class.name == 'PersonalSnippet' noteable && noteable.class.name == 'PersonalSnippet'
end end
def for_project_noteable? def for_project_noteable?
...@@ -32,7 +32,7 @@ module Gitlab ...@@ -32,7 +32,7 @@ module Gitlab
end end
def for_epic? def for_epic?
noteable.class.name == 'Epic' noteable && noteable_type == 'Epic'
end end
def user_mention_resource_id def user_mention_resource_id
...@@ -43,6 +43,14 @@ module Gitlab ...@@ -43,6 +43,14 @@ module Gitlab
id id
end end
def noteable
super unless for_commit?
end
def for_commit?
noteable_type == "Commit"
end
private private
def mentionable_params def mentionable_params
...@@ -52,6 +60,8 @@ module Gitlab ...@@ -52,6 +60,8 @@ module Gitlab
end end
def banzai_context_params def banzai_context_params
return {} unless noteable
{ group: noteable.group, label_url_method: :group_epics_url } { group: noteable.group, label_url_method: :group_epics_url }
end end
end end
......
...@@ -8,20 +8,12 @@ stages: ...@@ -8,20 +8,12 @@ stages:
a11y: a11y:
stage: accessibility stage: accessibility
image: node image: registry.gitlab.com/gitlab-org/ci-cd/accessibility:5.3.0-gitlab.2
script: script: /gitlab-accessibility.sh $a11y_urls
- wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \
- echo "deb http://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google.list && \
- apt-get update && \
- apt-get install -y google-chrome-stable && \
- rm -rf /var/lib/apt/lists/*
- npm install pa11y@5.3.0 pa11y-reporter-html@1.0.0
- 'echo { \"chromeLaunchConfig\": { \"args\": [\"--no-sandbox\"] }, \"includeWarnings\": true, \"reporter\": \"html\" } > pa11y.json'
- './node_modules/.bin/pa11y $a11y_urls > accessibility.html'
allow_failure: true allow_failure: true
artifacts: artifacts:
when: always when: always
expose_as: 'accessibility' expose_as: 'Accessibility Reports'
paths: ['accessibility.html'] paths: ['reports/']
rules: rules:
- if: $a11y_urls - if: $a11y_urls
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
module Gitlab module Gitlab
module Elasticsearch module Elasticsearch
class Logs class Logs
InvalidCursor = Class.new(RuntimeError)
# How many log lines to fetch in a query # How many log lines to fetch in a query
LOGS_LIMIT = 500 LOGS_LIMIT = 500
...@@ -10,7 +12,7 @@ module Gitlab ...@@ -10,7 +12,7 @@ module Gitlab
@client = client @client = client
end end
def pod_logs(namespace, pod_name, container_name = nil, search = nil, start_time = nil, end_time = nil) def pod_logs(namespace, pod_name, container_name: nil, search: nil, start_time: nil, end_time: nil, cursor: nil)
query = { bool: { must: [] } }.tap do |q| query = { bool: { must: [] } }.tap do |q|
filter_pod_name(q, pod_name) filter_pod_name(q, pod_name)
filter_namespace(q, namespace) filter_namespace(q, namespace)
...@@ -19,7 +21,7 @@ module Gitlab ...@@ -19,7 +21,7 @@ module Gitlab
filter_times(q, start_time, end_time) filter_times(q, start_time, end_time)
end end
body = build_body(query) body = build_body(query, cursor)
response = @client.search body: body response = @client.search body: body
format_response(response) format_response(response)
...@@ -27,8 +29,8 @@ module Gitlab ...@@ -27,8 +29,8 @@ module Gitlab
private private
def build_body(query) def build_body(query, cursor = nil)
{ body = {
query: query, query: query,
# reverse order so we can query N-most recent records # reverse order so we can query N-most recent records
sort: [ sort: [
...@@ -40,6 +42,12 @@ module Gitlab ...@@ -40,6 +42,12 @@ module Gitlab
# fixed limit for now, we should support paginated queries # fixed limit for now, we should support paginated queries
size: ::Gitlab::Elasticsearch::Logs::LOGS_LIMIT size: ::Gitlab::Elasticsearch::Logs::LOGS_LIMIT
} }
unless cursor.nil?
body[:search_after] = decode_cursor(cursor)
end
body
end end
def filter_pod_name(query, pod_name) def filter_pod_name(query, pod_name)
...@@ -100,7 +108,9 @@ module Gitlab ...@@ -100,7 +108,9 @@ module Gitlab
end end
def format_response(response) def format_response(response)
result = response.fetch("hits", {}).fetch("hits", []).map do |hit| results = response.fetch("hits", {}).fetch("hits", [])
last_result = results.last
results = results.map do |hit|
{ {
timestamp: hit["_source"]["@timestamp"], timestamp: hit["_source"]["@timestamp"],
message: hit["_source"]["message"] message: hit["_source"]["message"]
...@@ -108,7 +118,32 @@ module Gitlab ...@@ -108,7 +118,32 @@ module Gitlab
end end
# we queried for the N-most recent records but we want them ordered oldest to newest # we queried for the N-most recent records but we want them ordered oldest to newest
result.reverse {
logs: results.reverse,
cursor: last_result.nil? ? nil : encode_cursor(last_result["sort"])
}
end
# we want to hide the implementation details of the search_after parameter from the frontend
# behind a single easily transmitted value
def encode_cursor(obj)
obj.join(',')
end
def decode_cursor(obj)
cursor = obj.split(',').map(&:to_i)
unless valid_cursor(cursor)
raise InvalidCursor, "invalid cursor format"
end
cursor
end
def valid_cursor(cursor)
cursor.instance_of?(Array) &&
cursor.length == 2 &&
cursor.map {|i| i.instance_of?(Integer)}.reduce(:&)
end end
end end
end end
......
...@@ -152,6 +152,12 @@ module Gitlab ...@@ -152,6 +152,12 @@ module Gitlab
end end
end end
def replicate(source_repository)
wrapped_gitaly_errors do
gitaly_repository_client.replicate(source_repository)
end
end
def expire_has_local_branches_cache def expire_has_local_branches_cache
clear_memoization(:has_local_branches) clear_memoization(:has_local_branches)
end end
...@@ -767,12 +773,6 @@ module Gitlab ...@@ -767,12 +773,6 @@ module Gitlab
!has_visible_content? !has_visible_content?
end end
def fetch_repository_as_mirror(repository)
wrapped_gitaly_errors do
gitaly_remote_client.fetch_internal_remote(repository)
end
end
# Fetch remote for repository # Fetch remote for repository
# #
# remote - remote name # remote - remote name
......
...@@ -41,20 +41,6 @@ module Gitlab ...@@ -41,20 +41,6 @@ module Gitlab
GitalyClient.call(@storage, :remote_service, :remove_remote, request, timeout: GitalyClient.long_timeout).result GitalyClient.call(@storage, :remote_service, :remove_remote, request, timeout: GitalyClient.long_timeout).result
end end
def fetch_internal_remote(repository)
request = Gitaly::FetchInternalRemoteRequest.new(
repository: @gitaly_repo,
remote_repository: repository.gitaly_repository
)
response = GitalyClient.call(@storage, :remote_service,
:fetch_internal_remote, request,
timeout: GitalyClient.long_timeout,
remote_storage: repository.storage)
response.result
end
def find_remote_root_ref(remote_name) def find_remote_root_ref(remote_name)
request = Gitaly::FindRemoteRootRefRequest.new( request = Gitaly::FindRemoteRootRefRequest.new(
repository: @gitaly_repo, repository: @gitaly_repo,
......
...@@ -359,6 +359,22 @@ module Gitlab ...@@ -359,6 +359,22 @@ module Gitlab
GitalyClient.call(@storage, :repository_service, :remove_repository, request, timeout: GitalyClient.long_timeout) GitalyClient.call(@storage, :repository_service, :remove_repository, request, timeout: GitalyClient.long_timeout)
end end
def replicate(source_repository)
request = Gitaly::ReplicateRepositoryRequest.new(
repository: @gitaly_repo,
source: source_repository.gitaly_repository
)
GitalyClient.call(
@storage,
:repository_service,
:replicate_repository,
request,
remote_storage: source_repository.storage,
timeout: GitalyClient.long_timeout
)
end
private private
def search_results_from_response(gitaly_response, options = {}) def search_results_from_response(gitaly_response, options = {})
......
...@@ -15,7 +15,7 @@ module Gitlab ...@@ -15,7 +15,7 @@ module Gitlab
unless result.response.is_a?(Net::HTTPSuccess) unless result.response.is_a?(Net::HTTPSuccess)
Gitlab::ErrorTracking.track_and_raise_exception( Gitlab::ErrorTracking.track_and_raise_exception(
JIRA::HTTPError.new(result.response), JIRA::HTTPError.new(result.response),
response_body: result.body response: result.body
) )
end end
......
...@@ -42,7 +42,6 @@ module Gitlab ...@@ -42,7 +42,6 @@ module Gitlab
invites invites
jwt jwt
login login
notification_settings
oauth oauth
profile profile
projects projects
......
...@@ -33,8 +33,6 @@ module Gitlab ...@@ -33,8 +33,6 @@ module Gitlab
if Rails.env.test? if Rails.env.test?
storage_path = Rails.root.join('tmp', 'tests', 'second_storage').to_s storage_path = Rails.root.join('tmp', 'tests', 'second_storage').to_s
FileUtils.mkdir(storage_path) unless File.exist?(storage_path)
storages << { name: 'test_second_storage', path: storage_path } storages << { name: 'test_second_storage', path: storage_path }
end end
......
...@@ -184,11 +184,13 @@ module Gitlab ...@@ -184,11 +184,13 @@ module Gitlab
commit_sha: @commit.sha, commit_sha: @commit.sha,
project: @commit.project, project: @commit.project,
x509_certificate_id: certificate.id, x509_certificate_id: certificate.id,
verification_status: verification_status verification_status: verification_status(certificate)
} }
end end
def verification_status def verification_status(certificate)
return :unverified if certificate.revoked?
if verified_signature && certificate_email == @commit.committer_email if verified_signature && certificate_email == @commit.committer_email
:verified :verified
else else
......
...@@ -560,6 +560,9 @@ msgstr "" ...@@ -560,6 +560,9 @@ msgstr ""
msgid "(removed)" msgid "(removed)"
msgstr "" msgstr ""
msgid "(revoked)"
msgstr ""
msgid "*" msgid "*"
msgstr "" msgstr ""
...@@ -10889,6 +10892,9 @@ msgstr "" ...@@ -10889,6 +10892,9 @@ msgstr ""
msgid "Invalid URL" msgid "Invalid URL"
msgstr "" msgstr ""
msgid "Invalid cursor value provided"
msgstr ""
msgid "Invalid date" msgid "Invalid date"
msgstr "" msgstr ""
...@@ -21370,9 +21376,6 @@ msgstr "" ...@@ -21370,9 +21376,6 @@ msgstr ""
msgid "UpdateRepositoryStorage|Error moving repository storage for %{project_full_path} - %{message}" msgid "UpdateRepositoryStorage|Error moving repository storage for %{project_full_path} - %{message}"
msgstr "" msgstr ""
msgid "UpdateRepositoryStorage|Failed to fetch %{type} repository as mirror"
msgstr ""
msgid "UpdateRepositoryStorage|Failed to verify %{type} repository checksum from %{old} to %{new}" msgid "UpdateRepositoryStorage|Failed to verify %{type} repository checksum from %{old} to %{new}"
msgstr "" msgstr ""
......
...@@ -8,5 +8,6 @@ FactoryBot.define do ...@@ -8,5 +8,6 @@ FactoryBot.define do
email { 'gitlab@example.org' } email { 'gitlab@example.org' }
serial_number { 278969561018901340486471282831158785578 } serial_number { 278969561018901340486471282831158785578 }
x509_issuer x509_issuer
certificate_status { :good }
end end
end end
{
"query": {
"bool": {
"must": [
{
"match_phrase": {
"kubernetes.pod.name": {
"query": "production-6866bc8974-m4sk4"
}
}
},
{
"match_phrase": {
"kubernetes.namespace": {
"query": "autodevops-deploy-9-production"
}
}
}
]
}
},
"sort": [
{
"@timestamp": {
"order": "desc"
}
},
{
"offset": {
"order": "desc"
}
}
],
"search_after": [
9999934,
1572449784442
],
"_source": [
"@timestamp",
"message"
],
"size": 500
}
export const metricsWithData = [15, 16]; export const metricsWithData = ['15_metric_a', '16_metric_b'];
export const groups = [ export const groups = [
{ {
...@@ -7,39 +7,10 @@ export const groups = [ ...@@ -7,39 +7,10 @@ export const groups = [
title: 'Memory Usage (Total)', title: 'Memory Usage (Total)',
type: 'area-chart', type: 'area-chart',
y_label: 'Total Memory Used', y_label: 'Total Memory Used',
weight: 4, metrics: null,
metrics: [
{
id: 'system_metrics_kubernetes_container_memory_total',
metric_id: 15,
},
],
},
{
title: 'Core Usage (Total)',
type: 'area-chart',
y_label: 'Total Cores',
weight: 3,
metrics: [
{
id: 'system_metrics_kubernetes_container_cores_total',
metric_id: 16,
}, },
], ],
}, },
],
},
];
export const metrics = [
{
id: 'system_metrics_kubernetes_container_memory_total',
metric_id: 15,
},
{
id: 'system_metrics_kubernetes_container_cores_total',
metric_id: 16,
},
]; ];
const result = [ const result = [
...@@ -60,7 +31,7 @@ export const metricsData = [ ...@@ -60,7 +31,7 @@ export const metricsData = [
{ {
metrics: [ metrics: [
{ {
metric_id: 15, metricId: '15_metric_a',
result, result,
}, },
], ],
...@@ -68,7 +39,7 @@ export const metricsData = [ ...@@ -68,7 +39,7 @@ export const metricsData = [
{ {
metrics: [ metrics: [
{ {
metric_id: 16, metricId: '16_metric_b',
result, result,
}, },
], ],
......
...@@ -213,20 +213,16 @@ describe('mapToDashboardViewModel', () => { ...@@ -213,20 +213,16 @@ describe('mapToDashboardViewModel', () => {
expect(getMappedMetric(dashboard)).toEqual({ expect(getMappedMetric(dashboard)).toEqual({
label: expect.any(String), label: expect.any(String),
metricId: expect.any(String), metricId: expect.any(String),
metric_id: expect.any(String),
}); });
}); });
it('creates a metric with a correct ids', () => { it('creates a metric with a correct id', () => {
const dashboard = dashboardWithMetric({ const dashboard = dashboardWithMetric({
id: 'http_responses', id: 'http_responses',
metric_id: 1, metric_id: 1,
}); });
expect(getMappedMetric(dashboard)).toMatchObject({ expect(getMappedMetric(dashboard).metricId).toEqual('1_http_responses');
metricId: '1_http_responses',
metric_id: '1_http_responses',
});
}); });
it('creates a metric with a default label', () => { it('creates a metric with a default label', () => {
......
...@@ -149,10 +149,12 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover, schema: :latest do ...@@ -149,10 +149,12 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover, schema: :latest do
context 'when an upload belongs to a legacy_diff_note' do context 'when an upload belongs to a legacy_diff_note' do
let!(:merge_request) { create(:merge_request, source_project: project) } let!(:merge_request) { create(:merge_request, source_project: project) }
let!(:note) do let!(:note) do
create(:legacy_diff_note_on_merge_request, create(:legacy_diff_note_on_merge_request,
note: 'some note', project: project, noteable: merge_request) note: 'some note', project: project, noteable: merge_request)
end end
let(:legacy_upload) do let(:legacy_upload) do
create(:upload, :with_file, :attachment_upload, create(:upload, :with_file, :attachment_upload,
path: "uploads/-/system/note/attachment/#{note.id}/#{filename}", model: note) path: "uploads/-/system/note/attachment/#{note.id}/#{filename}", model: note)
...@@ -193,6 +195,17 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover, schema: :latest do ...@@ -193,6 +195,17 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover, schema: :latest do
it_behaves_like 'move error' it_behaves_like 'move error'
end end
context 'when upload has mount_point nil' do
let(:legacy_upload) do
create(:upload, :with_file, :attachment_upload,
path: "uploads/-/system/note/attachment/#{note.id}/#{filename}", model: note, mount_point: nil)
end
it_behaves_like 'migrates the file correctly'
it_behaves_like 'legacy local file'
it_behaves_like 'legacy upload deletion'
end
context 'when the file can be handled correctly' do context 'when the file can be handled correctly' do
it_behaves_like 'migrates the file correctly' it_behaves_like 'migrates the file correctly'
it_behaves_like 'legacy local file' it_behaves_like 'legacy local file'
......
# frozen_string_literal: true # frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
require './db/post_migrate/20200128134110_migrate_commit_notes_mentions_to_db'
require './db/post_migrate/20200211155539_migrate_merge_request_mentions_to_db' require './db/post_migrate/20200211155539_migrate_merge_request_mentions_to_db'
describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, schema: 20200211155539 do describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, schema: 20200211155539 do
...@@ -73,11 +74,36 @@ describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, s ...@@ -73,11 +74,36 @@ describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, s
it_behaves_like 'resource mentions migration', MigrateMergeRequestMentionsToDb, MergeRequest it_behaves_like 'resource mentions migration', MigrateMergeRequestMentionsToDb, MergeRequest
end end
context 'migrate commit mentions' do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:note1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: description_mentions) }
let!(:note2) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'sample note') }
let!(:note3) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: description_mentions, system: true) }
# this not does not have actual mentions
let!(:note4) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'note for an email@somesite.com and some other random @ ref' ) }
# this should have pointed to an innexisted commit record in a commits table
# but because commit is not an AR we'll just make it so that it does not have mentions
let!(:note5) { notes.create!(commit_id: 'abc', noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'note for an email@somesite.com and some other random @ ref') }
let(:user_mentions) { commit_user_mentions }
let(:resource) { commit }
it_behaves_like 'resource notes mentions migration', MigrateCommitNotesMentionsToDb, Commit
end
end end
context 'checks no_quote_columns' do context 'checks no_quote_columns' do
it 'has correct no_quote_columns' do it 'has correct no_quote_columns' do
expect(Gitlab::BackgroundMigration::UserMentions::Models::MergeRequest.no_quote_columns).to match([:note_id, :merge_request_id]) expect(Gitlab::BackgroundMigration::UserMentions::Models::MergeRequest.no_quote_columns).to match([:note_id, :merge_request_id])
end end
it 'commit has correct no_quote_columns' do
expect(Gitlab::BackgroundMigration::UserMentions::Models::Commit.no_quote_columns).to match([:note_id])
end
end end
end end
...@@ -20,6 +20,7 @@ describe Gitlab::Elasticsearch::Logs do ...@@ -20,6 +20,7 @@ describe Gitlab::Elasticsearch::Logs do
let(:search) { "foo +bar "} let(:search) { "foo +bar "}
let(:start_time) { "2019-12-13T14:35:34.034Z" } let(:start_time) { "2019-12-13T14:35:34.034Z" }
let(:end_time) { "2019-12-13T14:35:34.034Z" } let(:end_time) { "2019-12-13T14:35:34.034Z" }
let(:cursor) { "9999934,1572449784442" }
let(:body) { JSON.parse(fixture_file('lib/elasticsearch/query.json')) } let(:body) { JSON.parse(fixture_file('lib/elasticsearch/query.json')) }
let(:body_with_container) { JSON.parse(fixture_file('lib/elasticsearch/query_with_container.json')) } let(:body_with_container) { JSON.parse(fixture_file('lib/elasticsearch/query_with_container.json')) }
...@@ -27,6 +28,7 @@ describe Gitlab::Elasticsearch::Logs do ...@@ -27,6 +28,7 @@ describe Gitlab::Elasticsearch::Logs do
let(:body_with_times) { JSON.parse(fixture_file('lib/elasticsearch/query_with_times.json')) } let(:body_with_times) { JSON.parse(fixture_file('lib/elasticsearch/query_with_times.json')) }
let(:body_with_start_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) } let(:body_with_start_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) }
let(:body_with_end_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) } let(:body_with_end_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) }
let(:body_with_cursor) { JSON.parse(fixture_file('lib/elasticsearch/query_with_cursor.json')) }
RSpec::Matchers.define :a_hash_equal_to_json do |expected| RSpec::Matchers.define :a_hash_equal_to_json do |expected|
match do |actual| match do |actual|
...@@ -39,42 +41,49 @@ describe Gitlab::Elasticsearch::Logs do ...@@ -39,42 +41,49 @@ describe Gitlab::Elasticsearch::Logs do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body)).and_return(es_response) expect(client).to receive(:search).with(body: a_hash_equal_to_json(body)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name) result = subject.pod_logs(namespace, pod_name)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1]) expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end end
it 'can further filter the logs by container name' do it 'can further filter the logs by container name' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_container)).and_return(es_response) expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_container)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, container_name) result = subject.pod_logs(namespace, pod_name, container_name: container_name)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1]) expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end end
it 'can further filter the logs by search' do it 'can further filter the logs by search' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_search)).and_return(es_response) expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_search)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, search) result = subject.pod_logs(namespace, pod_name, search: search)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1]) expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end end
it 'can further filter the logs by start_time and end_time' do it 'can further filter the logs by start_time and end_time' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_times)).and_return(es_response) expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_times)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, nil, start_time, end_time) result = subject.pod_logs(namespace, pod_name, start_time: start_time, end_time: end_time)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1]) expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end end
it 'can further filter the logs by only start_time' do it 'can further filter the logs by only start_time' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_start_time)).and_return(es_response) expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_start_time)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, nil, start_time) result = subject.pod_logs(namespace, pod_name, start_time: start_time)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1]) expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end end
it 'can further filter the logs by only end_time' do it 'can further filter the logs by only end_time' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_end_time)).and_return(es_response) expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_end_time)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, nil, nil, end_time) result = subject.pod_logs(namespace, pod_name, end_time: end_time)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1]) expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end
it 'can search after a cursor' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_cursor)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, cursor: cursor)
expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end end
end end
end end
...@@ -492,50 +492,6 @@ describe Gitlab::Git::Repository, :seed_helper do ...@@ -492,50 +492,6 @@ describe Gitlab::Git::Repository, :seed_helper do
end end
end end
describe '#fetch_repository_as_mirror' do
let(:new_repository) do
Gitlab::Git::Repository.new('default', 'my_project.git', '', 'group/project')
end
subject { new_repository.fetch_repository_as_mirror(repository) }
before do
new_repository.create_repository
end
after do
new_repository.remove
end
it 'fetches a repository as a mirror remote' do
subject
expect(refs(new_repository_path)).to eq(refs(repository_path))
end
context 'with keep-around refs' do
let(:sha) { SeedRepo::Commit::ID }
let(:keep_around_ref) { "refs/keep-around/#{sha}" }
let(:tmp_ref) { "refs/tmp/#{SecureRandom.hex}" }
before do
repository_rugged.references.create(keep_around_ref, sha, force: true)
repository_rugged.references.create(tmp_ref, sha, force: true)
end
it 'includes the temporary and keep-around refs' do
subject
expect(refs(new_repository_path)).to include(keep_around_ref)
expect(refs(new_repository_path)).to include(tmp_ref)
end
end
def new_repository_path
File.join(TestEnv.repos_path, new_repository.relative_path)
end
end
describe '#fetch_remote' do describe '#fetch_remote' do
it 'delegates to the gitaly RepositoryService' do it 'delegates to the gitaly RepositoryService' do
ssh_auth = double(:ssh_auth) ssh_auth = double(:ssh_auth)
...@@ -2181,4 +2137,49 @@ describe Gitlab::Git::Repository, :seed_helper do ...@@ -2181,4 +2137,49 @@ describe Gitlab::Git::Repository, :seed_helper do
end end
end end
end end
describe '#replicate' do
let(:new_repository) do
Gitlab::Git::Repository.new('test_second_storage', TEST_REPO_PATH, '', 'group/project')
end
let(:new_repository_path) { File.join(TestEnv::SECOND_STORAGE_PATH, new_repository.relative_path) }
subject { new_repository.replicate(repository) }
before do
stub_storage_settings('test_second_storage' => {
'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
'path' => TestEnv::SECOND_STORAGE_PATH
})
Gitlab::Shell.new.create_repository('test_second_storage', TEST_REPO_PATH, 'group/project')
end
after do
Gitlab::Shell.new.remove_repository('test_second_storage', TEST_REPO_PATH)
end
it 'mirrors the source repository' do
subject
expect(refs(new_repository_path)).to eq(refs(repository_path))
end
context 'with keep-around refs' do
let(:sha) { SeedRepo::Commit::ID }
let(:keep_around_ref) { "refs/keep-around/#{sha}" }
let(:tmp_ref) { "refs/tmp/#{SecureRandom.hex}" }
before do
repository.write_ref(keep_around_ref, sha)
repository.write_ref(tmp_ref, sha)
end
it 'includes the temporary and keep-around refs' do
subject
expect(refs(new_repository_path)).to include(keep_around_ref)
expect(refs(new_repository_path)).to include(tmp_ref)
end
end
end
end end
...@@ -34,19 +34,6 @@ describe Gitlab::GitalyClient::RemoteService do ...@@ -34,19 +34,6 @@ describe Gitlab::GitalyClient::RemoteService do
end end
end end
describe '#fetch_internal_remote' do
let(:remote_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '', 'group/project') }
it 'sends an fetch_internal_remote message and returns the result value' do
expect_any_instance_of(Gitaly::RemoteService::Stub)
.to receive(:fetch_internal_remote)
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
.and_return(double(result: true))
expect(client.fetch_internal_remote(remote_repository)).to be(true)
end
end
describe '#find_remote_root_ref' do describe '#find_remote_root_ref' do
it 'sends an find_remote_root_ref message and returns the root ref' do it 'sends an find_remote_root_ref message and returns the root ref' do
expect_any_instance_of(Gitaly::RemoteService::Stub) expect_any_instance_of(Gitaly::RemoteService::Stub)
......
...@@ -275,7 +275,18 @@ describe Gitlab::GitalyClient::RepositoryService do ...@@ -275,7 +275,18 @@ describe Gitlab::GitalyClient::RepositoryService do
end end
end end
describe 'remove' do describe '#rename' do
it 'sends a rename_repository message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
.to receive(:rename_repository)
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
.and_return(double(value: true))
client.rename('some/new/path')
end
end
describe '#remove' do
it 'sends a remove_repository message' do it 'sends a remove_repository message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub) expect_any_instance_of(Gitaly::RepositoryService::Stub)
.to receive(:remove_repository) .to receive(:remove_repository)
...@@ -286,14 +297,15 @@ describe Gitlab::GitalyClient::RepositoryService do ...@@ -286,14 +297,15 @@ describe Gitlab::GitalyClient::RepositoryService do
end end
end end
describe 'rename' do describe '#replicate' do
it 'sends a rename_repository message' do let(:source_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '', 'group/project') }
it 'sends a replicate_repository message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub) expect_any_instance_of(Gitaly::RepositoryService::Stub)
.to receive(:rename_repository) .to receive(:replicate_repository)
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
.and_return(double(value: true))
client.rename('some/new/path') client.replicate(source_repository)
end end
end end
end end
...@@ -111,6 +111,22 @@ describe Gitlab::X509::Commit do ...@@ -111,6 +111,22 @@ describe Gitlab::X509::Commit do
expect(signature.x509_certificate.x509_issuer).to have_attributes(user1_issuer_attributes) expect(signature.x509_certificate.x509_issuer).to have_attributes(user1_issuer_attributes)
expect(signature.persisted?).to be_truthy expect(signature.persisted?).to be_truthy
end end
context 'revoked certificate' do
let(:x509_issuer) { create(:x509_issuer, user1_issuer_attributes) }
let!(:x509_certificate) { create(:x509_certificate, user1_certificate_attributes.merge(x509_issuer_id: x509_issuer.id, certificate_status: :revoked)) }
it 'returns an unverified signature' do
expect(signature).to have_attributes(
commit_sha: commit_sha,
project: project,
verification_status: 'unverified'
)
expect(signature.x509_certificate).to have_attributes(user1_certificate_attributes)
expect(signature.x509_certificate.x509_issuer).to have_attributes(user1_issuer_attributes)
expect(signature.persisted?).to be_truthy
end
end
end end
context 'without trusted certificate within store' do context 'without trusted certificate within store' do
......
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200128133510_cleanup_empty_commit_user_mentions')
describe CleanupEmptyCommitUserMentions, :migration, :sidekiq do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:notes) { table(:notes) }
let(:user) { users.create!(name: 'root', email: 'root@example.com', username: 'root', projects_limit: 0) }
let(:group) { namespaces.create!(name: 'group1', path: 'group1', owner_id: user.id) }
let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:resource1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
let!(:resource2) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
let!(:resource3) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check', system: true) }
# this note is already migrated, as it has a record in the commit_user_mentions table
let!(:resource4) { notes.create!(note: 'note3 for @root to check', commit_id: commit.id, noteable_type: 'Commit') }
let!(:user_mention) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource4.id, mentioned_users_ids: [1]) }
# these should get cleanup, by the migration
let!(:blank_commit_user_mention1) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource1.id)}
let!(:blank_commit_user_mention2) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource2.id)}
let!(:blank_commit_user_mention3) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource3.id)}
it 'cleanups blank user mentions' do
expect { migrate! }.to change { commit_user_mentions.count }.by(-3)
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200128134110_migrate_commit_notes_mentions_to_db')
describe MigrateCommitNotesMentionsToDb, :migration, :sidekiq do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:notes) { table(:notes) }
let(:user) { users.create!(name: 'root', email: 'root@example.com', username: 'root', projects_limit: 0) }
let(:group) { namespaces.create!(name: 'group1', path: 'group1', owner_id: user.id) }
let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:resource1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
let!(:resource2) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
let!(:resource3) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check', system: true) }
# non-migrateable resources
# this note is already migrated, as it has a record in the commit_user_mentions table
let!(:resource4) { notes.create!(note: 'note3 for @root to check', commit_id: commit.id, noteable_type: 'Commit') }
let!(:user_mention) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource4.id, mentioned_users_ids: [1]) }
# this should have pointed to an inexistent commit record in a commits table
# but because commit is not an AR, we'll just make it so that the note does not have mentions, i.e. no `@` char.
let!(:resource5) { notes.create!(note: 'note3 to check', commit_id: 'abc', noteable_type: 'Commit') }
before do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
end
it_behaves_like 'schedules resource mentions migration', Commit, true
end
...@@ -440,6 +440,27 @@ describe JiraService do ...@@ -440,6 +440,27 @@ describe JiraService do
end end
end end
context 'when Remote Link already exists' do
let(:remote_link) do
double(
'remote link',
object: {
url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/-/commit/#{commit_id}"
}.with_indifferent_access
)
end
it 'does not create comment' do
allow(JIRA::Resource::Remotelink).to receive(:all).and_return([remote_link])
expect(remote_link).to receive(:save!)
@jira_service.close_issue(resource, ExternalIssue.new('JIRA-123', project))
expect(WebMock).not_to have_requested(:post, @comment_url)
end
end
it 'does not send comment or remote links to issues already closed' do it 'does not send comment or remote links to issues already closed' do
allow_any_instance_of(JIRA::Resource::Issue).to receive(:resolution).and_return(true) allow_any_instance_of(JIRA::Resource::Issue).to receive(:resolution).and_return(true)
......
...@@ -127,6 +127,36 @@ describe Upload do ...@@ -127,6 +127,36 @@ describe Upload do
expect(uploader.mounted_as).to eq(subject.send(:mount_point)) expect(uploader.mounted_as).to eq(subject.send(:mount_point))
expect(uploader.file).not_to be_nil expect(uploader.file).not_to be_nil
end end
context 'when upload has mount_point nil' do
context 'when an upload belongs to a note' do
it 'mounts it as attachment' do
project = create(:project, :legacy_storage)
merge_request = create(:merge_request, source_project: project)
note = create(:legacy_diff_note_on_merge_request, note: 'some note', project: project, noteable: merge_request)
subject = build(:upload, :with_file, :attachment_upload, model: note, mount_point: nil)
uploader = subject.retrieve_uploader
expect(uploader.upload).to eq(subject)
expect(uploader.path).to include('attachment')
expect(uploader.file).not_to be_nil
end
end
context 'when an upload does not belong to a note' do
it 'does not mount it as attachment' do
appearance = create(:appearance)
subject = build(:upload, :with_file, :attachment_upload, model: appearance, mount_point: nil)
uploader = subject.retrieve_uploader
expect(uploader.upload).to eq(subject)
expect(uploader.path).not_to include('attachment')
expect(uploader.file).not_to be_nil
end
end
end
end end
describe '#needs_checksum?' do describe '#needs_checksum?' do
......
...@@ -43,6 +43,28 @@ RSpec.describe X509Certificate do ...@@ -43,6 +43,28 @@ RSpec.describe X509Certificate do
expect(certificate.subject).to eq(subject) expect(certificate.subject).to eq(subject)
expect(certificate.email).to eq(email) expect(certificate.email).to eq(email)
end end
it 'calls mark_commit_signatures_unverified' do
expect_any_instance_of(described_class).to receive(:mark_commit_signatures_unverified)
described_class.safe_create!(attributes)
end
context 'certificate revocation handling' do
let(:x509_certificate) { create(:x509_certificate) }
it 'starts a revoke worker if certificate is revoked' do
expect(X509CertificateRevokeWorker).to receive(:perform_async).with(x509_certificate.id)
x509_certificate.revoked!
end
it 'does not starts a revoke worker for good certificates' do
expect(X509CertificateRevokeWorker).not_to receive(:perform_async).with(x509_certificate.id)
x509_certificate
end
end
end end
describe 'validators' do describe 'validators' do
......
...@@ -2455,7 +2455,7 @@ describe API::Projects do ...@@ -2455,7 +2455,7 @@ describe API::Projects do
end end
it 'returns 200 when repository storage has changed' do it 'returns 200 when repository storage has changed' do
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/second_storage' }) stub_storage_settings('test_second_storage' => { 'path' => TestEnv::SECOND_STORAGE_PATH })
expect do expect do
Sidekiq::Testing.fake! do Sidekiq::Testing.fake! do
......
...@@ -11,6 +11,7 @@ describe ::PodLogs::ElasticsearchService do ...@@ -11,6 +11,7 @@ describe ::PodLogs::ElasticsearchService do
let(:search) { 'foo -bar' } let(:search) { 'foo -bar' }
let(:start_time) { '2019-01-02T12:13:14+02:00' } let(:start_time) { '2019-01-02T12:13:14+02:00' }
let(:end_time) { '2019-01-03T12:13:14+02:00' } let(:end_time) { '2019-01-03T12:13:14+02:00' }
let(:cursor) { '9999934,1572449784442' }
let(:params) { {} } let(:params) { {} }
let(:expected_logs) do let(:expected_logs) do
[ [
...@@ -116,6 +117,36 @@ describe ::PodLogs::ElasticsearchService do ...@@ -116,6 +117,36 @@ describe ::PodLogs::ElasticsearchService do
end end
end end
describe '#check_cursor' do
context 'with cursor provided and valid' do
let(:params) do
{
'cursor' => cursor
}
end
it 'returns success with cursor' do
result = subject.send(:check_cursor, {})
expect(result[:status]).to eq(:success)
expect(result[:cursor]).to eq(cursor)
end
end
context 'with cursor not provided' do
let(:params) do
{}
end
it 'returns success with nothing else' do
result = subject.send(:check_cursor, {})
expect(result.keys.length).to eq(1)
expect(result[:status]).to eq(:success)
end
end
end
describe '#pod_logs' do describe '#pod_logs' do
let(:result_arg) do let(:result_arg) do
{ {
...@@ -123,9 +154,11 @@ describe ::PodLogs::ElasticsearchService do ...@@ -123,9 +154,11 @@ describe ::PodLogs::ElasticsearchService do
container_name: container_name, container_name: container_name,
search: search, search: search,
start: start_time, start: start_time,
end: end_time end: end_time,
cursor: cursor
} }
end end
let(:expected_cursor) { '9999934,1572449784442' }
before do before do
create(:clusters_applications_elastic_stack, :installed, cluster: cluster) create(:clusters_applications_elastic_stack, :installed, cluster: cluster)
...@@ -137,13 +170,14 @@ describe ::PodLogs::ElasticsearchService do ...@@ -137,13 +170,14 @@ describe ::PodLogs::ElasticsearchService do
.and_return(Elasticsearch::Transport::Client.new) .and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs) allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
.to receive(:pod_logs) .to receive(:pod_logs)
.with(namespace, pod_name, container_name, search, start_time, end_time) .with(namespace, pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor)
.and_return(expected_logs) .and_return({ logs: expected_logs, cursor: expected_cursor })
result = subject.send(:pod_logs, result_arg) result = subject.send(:pod_logs, result_arg)
expect(result[:status]).to eq(:success) expect(result[:status]).to eq(:success)
expect(result[:logs]).to eq(expected_logs) expect(result[:logs]).to eq(expected_logs)
expect(result[:cursor]).to eq(expected_cursor)
end end
it 'returns an error when ES is unreachable' do it 'returns an error when ES is unreachable' do
...@@ -170,5 +204,19 @@ describe ::PodLogs::ElasticsearchService do ...@@ -170,5 +204,19 @@ describe ::PodLogs::ElasticsearchService do
expect(result[:status]).to eq(:error) expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable') expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable')
end end
it 'handles cursor errors from elasticsearch' do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
.to receive(:pod_logs)
.and_raise(::Gitlab::Elasticsearch::Logs::InvalidCursor.new)
result = subject.send(:pod_logs, result_arg)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Invalid cursor value provided')
end
end end
end end
...@@ -311,9 +311,10 @@ describe Projects::ForkService do ...@@ -311,9 +311,10 @@ describe Projects::ForkService do
fork_before_move = fork_project(project) fork_before_move = fork_project(project)
# Stub everything required to move a project to a Gitaly shard that does not exist # Stub everything required to move a project to a Gitaly shard that does not exist
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/second_storage' }) stub_storage_settings('test_second_storage' => { 'path' => TestEnv::SECOND_STORAGE_PATH })
allow_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror).and_return(true) allow_any_instance_of(Gitlab::Git::Repository).to receive(:replicate)
allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum).and_return(::Gitlab::Git::BLANK_SHA) allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum)
.and_return(::Gitlab::Git::BLANK_SHA)
Projects::UpdateRepositoryStorageService.new(project).execute('test_second_storage') Projects::UpdateRepositoryStorageService.new(project).execute('test_second_storage')
fork_after_move = fork_project(project) fork_after_move = fork_project(project)
......
...@@ -32,8 +32,8 @@ describe Projects::UpdateRepositoryStorageService do ...@@ -32,8 +32,8 @@ describe Projects::UpdateRepositoryStorageService do
project.repository.path_to_repo project.repository.path_to_repo
end end
expect(project_repository_double).to receive(:fetch_repository_as_mirror) expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw).and_return(true) .with(project.repository.raw)
expect(project_repository_double).to receive(:checksum) expect(project_repository_double).to receive(:checksum)
.and_return(checksum) .and_return(checksum)
...@@ -49,16 +49,18 @@ describe Projects::UpdateRepositoryStorageService do ...@@ -49,16 +49,18 @@ describe Projects::UpdateRepositoryStorageService do
context 'when the project is already on the target storage' do context 'when the project is already on the target storage' do
it 'bails out and does nothing' do it 'bails out and does nothing' do
expect do result = subject.execute(project.repository_storage)
subject.execute(project.repository_storage)
end.to raise_error(described_class::RepositoryAlreadyMoved) expect(result[:status]).to eq(:error)
expect(result[:message]).to match(/repository and source have the same storage/)
end end
end end
context 'when the move fails' do context 'when the move fails' do
it 'unmarks the repository as read-only without updating the repository storage' do it 'unmarks the repository as read-only without updating the repository storage' do
expect(project_repository_double).to receive(:fetch_repository_as_mirror) expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw).and_return(false) .with(project.repository.raw)
.and_raise(Gitlab::Git::CommandError)
expect(GitlabShellWorker).not_to receive(:perform_async) expect(GitlabShellWorker).not_to receive(:perform_async)
result = subject.execute('test_second_storage') result = subject.execute('test_second_storage')
...@@ -71,8 +73,8 @@ describe Projects::UpdateRepositoryStorageService do ...@@ -71,8 +73,8 @@ describe Projects::UpdateRepositoryStorageService do
context 'when the checksum does not match' do context 'when the checksum does not match' do
it 'unmarks the repository as read-only without updating the repository storage' do it 'unmarks the repository as read-only without updating the repository storage' do
expect(project_repository_double).to receive(:fetch_repository_as_mirror) expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw).and_return(true) .with(project.repository.raw)
expect(project_repository_double).to receive(:checksum) expect(project_repository_double).to receive(:checksum)
.and_return('not matching checksum') .and_return('not matching checksum')
expect(GitlabShellWorker).not_to receive(:perform_async) expect(GitlabShellWorker).not_to receive(:perform_async)
...@@ -89,8 +91,8 @@ describe Projects::UpdateRepositoryStorageService do ...@@ -89,8 +91,8 @@ describe Projects::UpdateRepositoryStorageService do
let!(:pool) { create(:pool_repository, :ready, source_project: project) } let!(:pool) { create(:pool_repository, :ready, source_project: project) }
it 'leaves the pool' do it 'leaves the pool' do
expect(project_repository_double).to receive(:fetch_repository_as_mirror) expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw).and_return(true) .with(project.repository.raw)
expect(project_repository_double).to receive(:checksum) expect(project_repository_double).to receive(:checksum)
.and_return(checksum) .and_return(checksum)
......
# frozen_string_literal: true
require 'spec_helper'
describe X509CertificateRevokeService do
describe '#execute' do
let(:service) { described_class.new }
let!(:x509_signature_1) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified ) }
let!(:x509_signature_2) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified ) }
context 'for revoked certificates' do
let(:x509_certificate) { create(:x509_certificate, certificate_status: :revoked ) }
it 'update all commit signatures' do
expect do
service.execute(x509_certificate)
x509_signature_1.reload
x509_signature_2.reload
end
.to change(x509_signature_1, :verification_status).from('verified').to('unverified')
.and change(x509_signature_2, :verification_status).from('verified').to('unverified')
end
end
context 'for good certificates' do
RSpec::Matchers.define_negated_matcher :not_change, :change
let(:x509_certificate) { create(:x509_certificate) }
it 'do not update any commit signature' do
expect do
service.execute(x509_certificate)
x509_signature_1.reload
x509_signature_2.reload
end
.to not_change(x509_signature_1, :verification_status)
.and not_change(x509_signature_2, :verification_status)
end
end
end
end
...@@ -84,6 +84,7 @@ module TestEnv ...@@ -84,6 +84,7 @@ module TestEnv
TMP_TEST_PATH = Rails.root.join('tmp', 'tests', '**') TMP_TEST_PATH = Rails.root.join('tmp', 'tests', '**')
REPOS_STORAGE = 'default'.freeze REPOS_STORAGE = 'default'.freeze
SECOND_STORAGE_PATH = Rails.root.join('tmp', 'tests', 'second_storage')
# Test environment # Test environment
# #
...@@ -141,6 +142,7 @@ module TestEnv ...@@ -141,6 +142,7 @@ module TestEnv
end end
FileUtils.mkdir_p(repos_path) FileUtils.mkdir_p(repos_path)
FileUtils.mkdir_p(SECOND_STORAGE_PATH)
FileUtils.mkdir_p(backup_path) FileUtils.mkdir_p(backup_path)
FileUtils.mkdir_p(pages_path) FileUtils.mkdir_p(pages_path)
FileUtils.mkdir_p(artifacts_path) FileUtils.mkdir_p(artifacts_path)
...@@ -176,8 +178,6 @@ module TestEnv ...@@ -176,8 +178,6 @@ module TestEnv
return return
end end
FileUtils.mkdir_p("tmp/tests/second_storage") unless File.exist?("tmp/tests/second_storage")
spawn_script = Rails.root.join('scripts/gitaly-test-spawn').to_s spawn_script = Rails.root.join('scripts/gitaly-test-spawn').to_s
Bundler.with_original_env do Bundler.with_original_env do
unless system(spawn_script) unless system(spawn_script)
......
...@@ -22,14 +22,13 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type| ...@@ -22,14 +22,13 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
context 'when the move succeeds', :clean_gitlab_redis_shared_state do context 'when the move succeeds', :clean_gitlab_redis_shared_state do
before do before do
allow(project_repository_double).to receive(:fetch_repository_as_mirror) allow(project_repository_double).to receive(:replicate)
.with(project.repository.raw) .with(project.repository.raw)
.and_return(true)
allow(project_repository_double).to receive(:checksum) allow(project_repository_double).to receive(:checksum)
.and_return(project_repository_checksum) .and_return(project_repository_checksum)
allow(repository_double).to receive(:fetch_repository_as_mirror) allow(repository_double).to receive(:replicate)
.with(repository.raw).and_return(true) .with(repository.raw)
allow(repository_double).to receive(:checksum) allow(repository_double).to receive(:checksum)
.and_return(repository_checksum) .and_return(repository_checksum)
end end
...@@ -82,20 +81,23 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type| ...@@ -82,20 +81,23 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
context 'when the project is already on the target storage' do context 'when the project is already on the target storage' do
it 'bails out and does nothing' do it 'bails out and does nothing' do
expect do result = subject.execute(project.repository_storage)
subject.execute(project.repository_storage)
end.to raise_error(described_class::RepositoryAlreadyMoved) expect(result[:status]).to eq(:error)
expect(result[:message]).to match(/repository and source have the same storage/)
end end
end end
context "when the move of the #{repository_type} repository fails" do context "when the move of the #{repository_type} repository fails" do
it 'unmarks the repository as read-only without updating the repository storage' do it 'unmarks the repository as read-only without updating the repository storage' do
allow(project_repository_double).to receive(:fetch_repository_as_mirror) allow(project_repository_double).to receive(:replicate)
.with(project.repository.raw).and_return(true) .with(project.repository.raw)
allow(project_repository_double).to receive(:checksum) allow(project_repository_double).to receive(:checksum)
.and_return(project_repository_checksum) .and_return(project_repository_checksum)
allow(repository_double).to receive(:fetch_repository_as_mirror)
.with(repository.raw).and_return(false) allow(repository_double).to receive(:replicate)
.with(repository.raw)
.and_raise(Gitlab::Git::CommandError)
expect(GitlabShellWorker).not_to receive(:perform_async) expect(GitlabShellWorker).not_to receive(:perform_async)
...@@ -109,13 +111,13 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type| ...@@ -109,13 +111,13 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
context "when the checksum of the #{repository_type} repository does not match" do context "when the checksum of the #{repository_type} repository does not match" do
it 'unmarks the repository as read-only without updating the repository storage' do it 'unmarks the repository as read-only without updating the repository storage' do
allow(project_repository_double).to receive(:fetch_repository_as_mirror) allow(project_repository_double).to receive(:replicate)
.with(project.repository.raw).and_return(true) .with(project.repository.raw)
allow(project_repository_double).to receive(:checksum) allow(project_repository_double).to receive(:checksum)
.and_return(project_repository_checksum) .and_return(project_repository_checksum)
allow(repository_double).to receive(:fetch_repository_as_mirror) allow(repository_double).to receive(:replicate)
.with(repository.raw).and_return(true) .with(repository.raw)
allow(repository_double).to receive(:checksum) allow(repository_double).to receive(:checksum)
.and_return('not matching checksum') .and_return('not matching checksum')
......
...@@ -9,16 +9,11 @@ describe ProjectUpdateRepositoryStorageWorker do ...@@ -9,16 +9,11 @@ describe ProjectUpdateRepositoryStorageWorker do
describe "#perform" do describe "#perform" do
it "calls the update repository storage service" do it "calls the update repository storage service" do
expect_any_instance_of(Projects::UpdateRepositoryStorageService) expect_next_instance_of(Projects::UpdateRepositoryStorageService) do |instance|
.to receive(:execute).with('new_storage') expect(instance).to receive(:execute).with('new_storage')
subject.perform(project.id, 'new_storage')
end end
it 'catches and logs RepositoryAlreadyMoved' do subject.perform(project.id, 'new_storage')
expect(Rails.logger).to receive(:info).with(/repository already moved/)
expect { subject.perform(project.id, project.repository_storage) }.not_to raise_error
end end
end end
end end
# frozen_string_literal: true
require 'spec_helper'
describe X509CertificateRevokeWorker do
describe '#perform' do
context 'with a revoked certificate' do
subject { described_class.new }
let(:x509_certificate) { create(:x509_certificate, certificate_status: :revoked) }
let(:job_args) { x509_certificate.id }
include_examples 'an idempotent worker' do
it 'executes the revoke service' do
spy_service = X509CertificateRevokeService.new
allow(X509CertificateRevokeService).to receive(:new) { spy_service }
expect(spy_service).to receive(:execute)
.exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES).times
.with(x509_certificate)
.and_call_original
subject
end
end
it 'executes the revoke service' do
spy_service = X509CertificateRevokeService.new
allow(X509CertificateRevokeService).to receive(:new) { spy_service }
expect_next_instance_of(X509CertificateRevokeService) do |service|
expect(service).to receive(:execute).with(x509_certificate)
end
subject
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment