Commit 3cd08f4b authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent dd4bee69
......@@ -67,7 +67,7 @@ export default {
'setShowErrorBanner',
]),
chartHasData(chart) {
return chart.metrics.some(metric => this.metricsWithData().includes(metric.metric_id));
return chart.metrics.some(metric => this.metricsWithData().includes(metric.metricId));
},
onSidebarMutation() {
setTimeout(() => {
......
......@@ -13,11 +13,12 @@ export const gqClient = createGqClient(
/**
* Metrics loaded from project-defined dashboards do not have a metric_id.
* This method creates a unique ID combining metric_id and id, if either is present.
* This is hopefully a temporary solution until BE processes metrics before passing to fE
* This is hopefully a temporary solution until BE processes metrics before passing to FE
* @param {Object} metric - metric
* @returns {Object} - normalized metric with a uniqueID
*/
export const uniqMetricsId = metric => `${metric.metric_id}_${metric.id}`;
// eslint-disable-next-line babel/camelcase
export const uniqMetricsId = ({ metric_id, id }) => `${metric_id}_${id}`;
/**
* Project path has a leading slash that doesn't work well
......@@ -68,10 +69,6 @@ const mapToMetricsViewModel = (metrics, defaultLabel) =>
queryRange: query_range,
prometheusEndpointPath: prometheus_endpoint_path,
metricId: uniqMetricsId({ metric_id, id }),
// `metric_id` is used by embed.vue, keeping this duplicated.
// https://gitlab.com/gitlab-org/gitlab/issues/37492
metric_id: uniqMetricsId({ metric_id, id }),
...metric,
}));
......
......@@ -48,7 +48,7 @@ module Projects
end
def elasticsearch_params
params.permit(:container_name, :pod_name, :search, :start, :end)
params.permit(:container_name, :pod_name, :search, :start, :end, :cursor)
end
def environment
......
......@@ -77,7 +77,7 @@ class Issue < ApplicationRecord
scope :counts_by_state, -> { reorder(nil).group(:state_id).count }
ignore_column :state, remove_with: '12.7', remove_after: '2019-12-22'
ignore_column :state, remove_with: '12.10', remove_after: '2020-03-22'
after_commit :expire_etag_cache, unless: :importing?
after_save :ensure_metrics, unless: :importing?
......
......@@ -261,7 +261,7 @@ class MergeRequest < ApplicationRecord
includes(:metrics)
end
ignore_column :state, remove_with: '12.7', remove_after: '2019-12-22'
ignore_column :state, remove_with: '12.10', remove_after: '2020-03-22'
after_save :keep_around_commit, unless: :importing?
......
......@@ -280,21 +280,17 @@ class JiraService < IssueTrackerService
return unless client_url.present?
jira_request do
create_issue_link(issue, remote_link_props)
create_issue_comment(issue, message)
remote_link = find_remote_link(issue, remote_link_props[:object][:url])
create_issue_comment(issue, message) unless remote_link
remote_link ||= issue.remotelink.build
remote_link.save!(remote_link_props)
log_info("Successfully posted", client_url: client_url)
"SUCCESS: Successfully posted to #{client_url}."
end
end
def create_issue_link(issue, remote_link_props)
remote_link = find_remote_link(issue, remote_link_props[:object][:url])
remote_link ||= issue.remotelink.build
remote_link.save!(remote_link_props)
end
def create_issue_comment(issue, message)
return unless comment_on_event_enabled
......
......@@ -2,6 +2,7 @@
class X509Certificate < ApplicationRecord
include X509SerialNumberAttribute
include AfterCommitQueue
x509_serial_number_attribute :serial_number
......@@ -25,8 +26,14 @@ class X509Certificate < ApplicationRecord
validates :x509_issuer_id, presence: true
after_commit :mark_commit_signatures_unverified
def self.safe_create!(attributes)
create_with(attributes)
.safe_find_or_create_by!(subject_key_identifier: attributes[:subject_key_identifier])
end
def mark_commit_signatures_unverified
X509CertificateRevokeWorker.perform_async(self.id) if revoked?
end
end
......@@ -10,8 +10,6 @@ module PodLogs
CACHE_KEY_GET_POD_LOG = 'get_pod_log'
K8S_NAME_MAX_LENGTH = 253
SUCCESS_RETURN_KEYS = %i(status logs pod_name container_name pods).freeze
def id
cluster.id
end
......@@ -49,6 +47,10 @@ module PodLogs
%w(pod_name container_name)
end
def success_return_keys
%i(status logs pod_name container_name pods)
end
def check_arguments(result)
return error(_('Cluster does not exist')) if cluster.nil?
return error(_('Namespace is empty')) if namespace.blank?
......@@ -122,7 +124,7 @@ module PodLogs
end
def filter_return_keys(result)
result.slice(*SUCCESS_RETURN_KEYS)
result.slice(*success_return_keys)
end
def filter_params(params)
......
......@@ -10,6 +10,7 @@ module PodLogs
:check_container_name,
:check_times,
:check_search,
:check_cursor,
:pod_logs,
:filter_return_keys
......@@ -18,7 +19,11 @@ module PodLogs
private
def valid_params
%w(pod_name container_name search start end)
super + %w(search start end cursor)
end
def success_return_keys
super + %i(cursor)
end
def check_times(result)
......@@ -36,19 +41,28 @@ module PodLogs
success(result)
end
def check_cursor(result)
result[:cursor] = params['cursor'] if params.key?('cursor')
success(result)
end
def pod_logs(result)
client = cluster&.application_elastic_stack&.elasticsearch_client
return error(_('Unable to connect to Elasticsearch')) unless client
result[:logs] = ::Gitlab::Elasticsearch::Logs.new(client).pod_logs(
response = ::Gitlab::Elasticsearch::Logs.new(client).pod_logs(
namespace,
result[:pod_name],
result[:container_name],
result[:search],
result[:start],
result[:end]
container_name: result[:container_name],
search: result[:search],
start_time: result[:start],
end_time: result[:end],
cursor: result[:cursor]
)
result.merge!(response)
success(result)
rescue Elasticsearch::Transport::Transport::ServerError => e
::Gitlab::ErrorTracking.track_exception(e)
......@@ -58,6 +72,8 @@ module PodLogs
# there is no method on the exception other than the class name to determine the type of error encountered.
status_code: e.class.name.split('::').last
})
rescue ::Gitlab::Elasticsearch::Logs::InvalidCursor
error(_('Invalid cursor value provided'))
end
end
end
......@@ -5,19 +5,12 @@ module Projects
include Gitlab::ShellAdapter
Error = Class.new(StandardError)
RepositoryAlreadyMoved = Class.new(StandardError)
def initialize(project)
@project = project
end
def execute(new_repository_storage_key)
# Raising an exception is a little heavy handed but this behavior (doing
# nothing if the repo is already on the right storage) prevents data
# loss, so it is valuable for us to be able to observe it via the
# exception.
raise RepositoryAlreadyMoved if project.repository_storage == new_repository_storage_key
mirror_repositories(new_repository_storage_key)
mark_old_paths_for_archive
......@@ -30,7 +23,7 @@ module Projects
success
rescue Error => e
rescue Error, ArgumentError, Gitlab::Git::BaseError => e
project.update(repository_read_only: false)
Gitlab::ErrorTracking.track_exception(e, project_path: project.full_path)
......@@ -65,10 +58,7 @@ module Projects
raw_repository.gl_repository,
full_path)
unless new_repository.fetch_repository_as_mirror(raw_repository)
raise Error, s_('UpdateRepositoryStorage|Failed to fetch %{type} repository as mirror') % { type: type.name }
end
new_repository.replicate(raw_repository)
new_checksum = new_repository.checksum
if checksum != new_checksum
......
# frozen_string_literal: true
class X509CertificateRevokeService
def execute(certificate)
return unless certificate.revoked?
certificate.x509_commit_signatures.update_all(verification_status: :unverified)
end
end
......@@ -11,4 +11,15 @@ class AttachmentUploader < GitlabUploader
def dynamic_segment
File.join(model.class.underscore, mounted_as.to_s, model.id.to_s)
end
def mounted_as
# Geo fails to sync attachments on Note, and LegacyDiffNotes with missing mount_point.
#
# See https://gitlab.com/gitlab-org/gitlab/-/issues/209752 for more details.
if model.class.underscore.include?('note')
super || 'attachment'
else
super
end
end
end
.gpg-popover-certificate-details
%strong= _('Certificate Subject')
- if signature.x509_certificate.revoked?
%strong.cred= _('(revoked)')
%ul
- x509_subject(signature.x509_certificate.subject, ["CN", "O"]).map do |key, value|
%li= key + "=" + value
......
......@@ -1291,3 +1291,10 @@
:resource_boundary: :unknown
:weight: 1
:idempotent:
- :name: x509_certificate_revoke
:feature_category: :source_code_management
:has_external_dependencies:
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
......@@ -9,7 +9,5 @@ class ProjectUpdateRepositoryStorageWorker # rubocop:disable Scalability/Idempot
project = Project.find(project_id)
::Projects::UpdateRepositoryStorageService.new(project).execute(new_repository_storage_key)
rescue ::Projects::UpdateRepositoryStorageService::RepositoryAlreadyMoved
Rails.logger.info "#{self.class}: repository already moved: #{project}" # rubocop:disable Gitlab/RailsLogger
end
end
# frozen_string_literal: true
class X509CertificateRevokeWorker
include ApplicationWorker
feature_category :source_code_management
idempotent!
def perform(certificate_id)
return unless certificate_id
X509Certificate.find_by_id(certificate_id).try do |certificate|
X509CertificateRevokeService.new.execute(certificate)
end
end
end
---
title: Migrate mentions for commit notes to commit_user_mentions DB table
merge_request: 23859
author:
type: changed
---
title: Ensure valid mount point is used by attachments on notes
merge_request: 26849
author:
type: fixed
---
title: Generate JSON-formatted a11y CI artifacts
merge_request: 26687
author:
type: added
---
title: Remove state column from issues and merge_requests
merge_request: 25561
author:
type: deprecated
---
title: Add functionality to revoke a X509Certificate and update related X509CommitSignatures
merge_request: 24889
author: Roger Meier
type: added
---
title: Remove promoted notes temporary index
merge_request: 26896
author:
type: other
---
title: Use ReplicateRepository when moving repo storage
merge_request: 26550
author:
type: changed
......@@ -546,6 +546,9 @@ Gitlab.ee do
Settings.cron_jobs['elastic_index_bulk_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['elastic_index_bulk_cron_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['elastic_index_bulk_cron_worker']['job_class'] ||= 'ElasticIndexBulkCronWorker'
Settings.cron_jobs['sync_seat_link_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['sync_seat_link_worker']['cron'] ||= "#{rand(60)} 0 * * *"
Settings.cron_jobs['sync_seat_link_worker']['job_class'] = 'SyncSeatLinkWorker'
end
#
......
......@@ -133,6 +133,9 @@ Rails.application.routes.draw do
get :kill
end
end
# Notification settings
resources :notification_settings, only: [:create, :update]
end
concern :clusterable do
......@@ -181,9 +184,6 @@ Rails.application.routes.draw do
# Spam reports
resources :abuse_reports, only: [:new, :create]
# Notification settings
resources :notification_settings, only: [:create, :update]
resources :groups, only: [:index, :new, :create] do
post :preview_markdown
end
......
......@@ -232,6 +232,8 @@
- 2
- - service_desk_email_receiver
- 1
- - sync_seat_link_request
- 1
- - system_hook_push
- 1
- - todos_destroyer
......@@ -248,3 +250,5 @@
- 1
- - web_hook
- 1
- - x509_certificate_revoke
- 1
# frozen_string_literal: true
class CleanupEmptyCommitUserMentions < ActiveRecord::Migration[5.2]
DOWNTIME = false
BATCH_SIZE = 10_000
class CommitUserMention < ActiveRecord::Base
include EachBatch
self.table_name = 'commit_user_mentions'
end
def up
# cleanup commit user mentions with no actual mentions,
# re https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24586#note_285982468
CommitUserMention
.where(mentioned_users_ids: nil)
.where(mentioned_groups_ids: nil)
.where(mentioned_projects_ids: nil)
.each_batch(of: BATCH_SIZE) do |batch|
batch.delete_all
end
end
def down
# no-op
end
end
# frozen_string_literal: true
class MigrateCommitNotesMentionsToDb < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
DELAY = 3.minutes.to_i
BATCH_SIZE = 1_000
MIGRATION = 'UserMentions::CreateResourceUserMention'
QUERY_CONDITIONS = "note LIKE '%@%'::text AND notes.noteable_type = 'Commit' AND commit_user_mentions.commit_id IS NULL"
JOIN = 'LEFT JOIN commit_user_mentions ON notes.id = commit_user_mentions.note_id'
class Note < ActiveRecord::Base
include EachBatch
self.table_name = 'notes'
end
def up
Note
.joins(JOIN)
.where(QUERY_CONDITIONS)
.each_batch(of: BATCH_SIZE) do |batch, index|
range = batch.pluck(Arel.sql('MIN(notes.id)'), Arel.sql('MAX(notes.id)')).first
migrate_in(index * DELAY, MIGRATION, ['Commit', JOIN, QUERY_CONDITIONS, true, *range])
end
end
def down
# no-op
# temporary index is to be dropped in a different migration in an upcoming release:
# https://gitlab.com/gitlab-org/gitlab/issues/196842
end
end
# frozen_string_literal: true
class RemoveIssueStateIndexes < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
# issues state column is ignored since 12.6 and will be removed on a following migration
def up
remove_concurrent_index_by_name :issues, 'index_issues_on_state'
remove_concurrent_index_by_name :issues, 'index_issues_on_project_id_and_created_at_and_id_and_state'
remove_concurrent_index_by_name :issues, 'idx_issues_on_project_id_and_due_date_and_id_and_state_partial'
remove_concurrent_index_by_name :issues, 'index_issues_on_project_id_and_rel_position_and_state_and_id'
remove_concurrent_index_by_name :issues, 'index_issues_on_project_id_and_updated_at_and_id_and_state'
end
def down
add_concurrent_index :issues, :state, name: 'index_issues_on_state'
add_concurrent_index :issues,
[:project_id, :created_at, :id, :state],
name: 'index_issues_on_project_id_and_created_at_and_id_and_state'
add_concurrent_index :issues,
[:project_id, :due_date, :id, :state],
where: 'due_date IS NOT NULL',
name: 'idx_issues_on_project_id_and_due_date_and_id_and_state_partial'
add_concurrent_index :issues,
[:project_id, :relative_position, :state, :id],
order: { id: :desc },
name: 'index_issues_on_project_id_and_rel_position_and_state_and_id'
add_concurrent_index :issues,
[:project_id, :updated_at, :id, :state],
name: 'index_issues_on_project_id_and_updated_at_and_id_and_state'
end
end
# frozen_string_literal: true
class RemoveMergeRequestStateIndexes < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
# merge_requests state column is ignored since 12.6 and will be removed on a following migration
def up
remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_id_and_merge_jid'
remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_source_project_and_branch_state_opened'
remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_state_and_merge_status'
remove_concurrent_index_by_name :merge_requests, 'index_merge_requests_on_target_project_id_and_iid_opened'
end
def down
add_concurrent_index :merge_requests,
[:id, :merge_jid],
where: "merge_jid IS NOT NULL and state = 'locked'",
name: 'index_merge_requests_on_id_and_merge_jid'
add_concurrent_index :merge_requests,
[:source_project_id, :source_branch],
where: "state = 'opened'",
name: 'index_merge_requests_on_source_project_and_branch_state_opened'
add_concurrent_index :merge_requests,
[:state, :merge_status],
where: "state = 'opened' AND merge_status = 'can_be_merged'",
name: 'index_merge_requests_on_state_and_merge_status'
add_concurrent_index :merge_requests,
[:target_project_id, :iid],
where: "state = 'opened'",
name: 'index_merge_requests_on_target_project_id_and_iid_opened'
end
end
# frozen_string_literal: true
class RemoveStateFromIssues < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
return unless issue_state_column_exists?
# Ignored in 12.6 - https://gitlab.com/gitlab-org/gitlab/-/merge_requests/19574
with_lock_retries do
remove_column :issues, :state, :string
end
end
def down
return if issue_state_column_exists?
with_lock_retries do
add_column :issues, :state, :string # rubocop:disable Migration/AddLimitToStringColumns
end
end
private
def issue_state_column_exists?
column_exists?(:issues, :state)
end
end
# frozen_string_literal: true
class RemoveStateFromMergeRequests < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
return unless merge_requests_state_column_exists?
# Ignored in 12.6 - https://gitlab.com/gitlab-org/gitlab/-/merge_requests/19574
with_lock_retries do
remove_column :merge_requests, :state, :string
end
end
def down
return if merge_requests_state_column_exists?
with_lock_retries do
add_column :merge_requests, :state, :string # rubocop:disable Migration/AddLimitToStringColumns
end
end
private
def merge_requests_state_column_exists?
column_exists?(:merge_requests, :state)
end
end
# frozen_string_literal: true
# Removes temporary index to fix orphan promoted issues.
# For more information check: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23916
class RemoveTemporaryPromotedNotesIndex < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
remove_concurrent_index_by_name :notes, 'tmp_idx_on_promoted_notes'
end
def down
add_concurrent_index :notes,
:note,
where: "noteable_type = 'Issue' AND system IS TRUE AND note LIKE 'promoted to epic%'",
name: 'tmp_idx_on_promoted_notes'
end
end
......@@ -2184,7 +2184,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.datetime "updated_at"
t.text "description"
t.integer "milestone_id"
t.string "state"
t.integer "iid"
t.integer "updated_by_id"
t.integer "weight"
......@@ -2216,18 +2215,13 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.index ["lock_version"], name: "index_issues_on_lock_version", where: "(lock_version IS NULL)"
t.index ["milestone_id"], name: "index_issues_on_milestone_id"
t.index ["moved_to_id"], name: "index_issues_on_moved_to_id", where: "(moved_to_id IS NOT NULL)"
t.index ["project_id", "created_at", "id", "state"], name: "index_issues_on_project_id_and_created_at_and_id_and_state"
t.index ["project_id", "created_at", "id", "state_id"], name: "idx_issues_on_project_id_and_created_at_and_id_and_state_id"
t.index ["project_id", "due_date", "id", "state"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_partial", where: "(due_date IS NOT NULL)"
t.index ["project_id", "due_date", "id", "state_id"], name: "idx_issues_on_project_id_and_due_date_and_id_and_state_id", where: "(due_date IS NOT NULL)"
t.index ["project_id", "iid"], name: "index_issues_on_project_id_and_iid", unique: true
t.index ["project_id", "relative_position", "state", "id"], name: "index_issues_on_project_id_and_rel_position_and_state_and_id", order: { id: :desc }
t.index ["project_id", "relative_position", "state_id", "id"], name: "idx_issues_on_project_id_and_rel_position_and_state_id_and_id", order: { id: :desc }
t.index ["project_id", "updated_at", "id", "state"], name: "index_issues_on_project_id_and_updated_at_and_id_and_state"
t.index ["project_id", "updated_at", "id", "state_id"], name: "idx_issues_on_project_id_and_updated_at_and_id_and_state_id"
t.index ["promoted_to_epic_id"], name: "index_issues_on_promoted_to_epic_id", where: "(promoted_to_epic_id IS NOT NULL)"
t.index ["relative_position"], name: "index_issues_on_relative_position"
t.index ["state"], name: "index_issues_on_state"
t.index ["state_id"], name: "idx_issues_on_state_id"
t.index ["title"], name: "index_issues_on_title_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["updated_at"], name: "index_issues_on_updated_at"
......@@ -2597,7 +2591,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.datetime "created_at"
t.datetime "updated_at"
t.integer "milestone_id"
t.string "state", default: "opened", null: false
t.string "merge_status", default: "unchecked", null: false
t.integer "target_project_id", null: false
t.integer "iid"
......@@ -2633,7 +2626,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.index ["description"], name: "index_merge_requests_on_description_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["head_pipeline_id"], name: "index_merge_requests_on_head_pipeline_id"
t.index ["id", "merge_jid"], name: "idx_merge_requests_on_id_and_merge_jid", where: "((merge_jid IS NOT NULL) AND (state_id = 4))"
t.index ["id", "merge_jid"], name: "index_merge_requests_on_id_and_merge_jid", where: "((merge_jid IS NOT NULL) AND ((state)::text = 'locked'::text))"
t.index ["id"], name: "merge_request_mentions_temp_index", where: "((description ~~ '%@%'::text) OR ((title)::text ~~ '%@%'::text))"
t.index ["latest_merge_request_diff_id"], name: "index_merge_requests_on_latest_merge_request_diff_id"
t.index ["lock_version"], name: "index_merge_requests_on_lock_version", where: "(lock_version IS NULL)"
......@@ -2641,15 +2633,12 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.index ["milestone_id"], name: "index_merge_requests_on_milestone_id"
t.index ["source_branch"], name: "index_merge_requests_on_source_branch"
t.index ["source_project_id", "source_branch"], name: "idx_merge_requests_on_source_project_and_branch_state_opened", where: "(state_id = 1)"
t.index ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_and_branch_state_opened", where: "((state)::text = 'opened'::text)"
t.index ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_id_and_source_branch"
t.index ["state", "merge_status"], name: "index_merge_requests_on_state_and_merge_status", where: "(((state)::text = 'opened'::text) AND ((merge_status)::text = 'can_be_merged'::text))"
t.index ["state_id", "merge_status"], name: "idx_merge_requests_on_state_id_and_merge_status", where: "((state_id = 1) AND ((merge_status)::text = 'can_be_merged'::text))"
t.index ["target_branch"], name: "index_merge_requests_on_target_branch"
t.index ["target_project_id", "created_at"], name: "index_merge_requests_target_project_id_created_at"
t.index ["target_project_id", "iid"], name: "idx_merge_requests_on_target_project_id_and_iid_opened", where: "(state_id = 1)"
t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid", unique: true
t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid_opened", where: "((state)::text = 'opened'::text)"
t.index ["target_project_id", "merge_commit_sha", "id"], name: "index_merge_requests_on_tp_id_and_merge_commit_sha_and_id"
t.index ["target_project_id", "target_branch"], name: "index_merge_requests_on_target_project_id_and_target_branch", where: "((state_id = 1) AND (merge_when_pipeline_succeeds = true))"
t.index ["title"], name: "index_merge_requests_on_title"
......@@ -2844,7 +2833,6 @@ ActiveRecord::Schema.define(version: 2020_03_11_165635) do
t.index ["id", "noteable_type"], name: "note_mentions_temp_index", where: "(note ~~ '%@%'::text)"
t.index ["line_code"], name: "index_notes_on_line_code"
t.index ["note"], name: "index_notes_on_note_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["note"], name: "tmp_idx_on_promoted_notes", where: "(((noteable_type)::text = 'Issue'::text) AND (system IS TRUE) AND (note ~~ 'promoted to epic%'::text))"
t.index ["noteable_id", "noteable_type"], name: "index_notes_on_noteable_id_and_noteable_type"
t.index ["project_id", "id"], name: "index_notes_on_project_id_and_id_and_system_false", where: "(NOT system)"
t.index ["project_id", "noteable_type"], name: "index_notes_on_project_id_and_noteable_type"
......
......@@ -8,4 +8,4 @@ link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#links
level: error
scope: raw
raw:
- '\[.+\]\((https?:){0}[\w\/\.]+(\.html).*\)'
- '\[.+\]\((https?:){0}[\w\/\.-]+(\.html).*\)'
......@@ -67,7 +67,7 @@ This configuration is supported in [GitLab Premium and Ultimate](https://about.g
References:
- [Geo Documentation](../../gitlab-geo/README.html)
- [Geo Documentation](../geo/replication/index.md)
- [GitLab Geo with a highly available configuration](../geo/replication/high_availability.md)
## Recommended setups based on number of users
......
......@@ -258,6 +258,134 @@ export default {
};
```
### Working with pagination
GitLab's GraphQL API uses [Relay-style cursor pagination](https://www.apollographql.com/docs/react/data/pagination/#cursor-based)
for connection types. This means a "cursor" is used to keep track of where in the data
set the next items should be fetched from.
Every connection type (for example, `DesignConnection` and `DiscussionConnection`) has a field `pageInfo` that contains an information required for pagination:
```javascript
pageInfo {
endCursor
hasNextPage
hasPreviousPage
startCursor
}
```
Here:
- `startCursor` and `endCursor` display the cursor of the first and last items
respectively.
- `hasPreviousPage` and `hasNextPage` allow us to check if there are more pages
available before or after the current page.
When we fetch data with a connection type, we can pass cursor as `after` or `before`
parameter, indicating a starting or ending point of our pagination. They should be
followed with `first` or `last` parameter respectively to indicate _how many_ items
we want to fetch after or before a given endpoint.
For example, here we're fetching 10 designs after a cursor:
```javascript
query {
project(fullPath: "root/my-project") {
id
issue(iid: "42") {
designCollection {
designs(atVersion: null, after: "Ihwffmde0i", first: 10) {
edges {
node {
id
}
}
}
}
}
}
}
```
#### Using `fetchMore` method in components
When making an initial fetch, we usually want to start a pagination from the beginning.
In this case, we can either:
- Skip passing a cursor.
- Pass `null` explicitly to `after`.
After data is fetched, we should save a `pageInfo` object. Let's assume we're storing
it to Vue component `data`:
```javascript
data() {
return {
pageInfo: null,
}
},
apollo: {
designs: {
query: projectQuery,
variables() {
return {
// rest of design variables
...
first: 10,
};
},
result(res) {
this.pageInfo = res.data?.project?.issue?.designCollection?.designs?.pageInfo;
},
},
},
```
When we want to move to the next page, we use an Apollo `fetchMore` method, passing a
new cursor (and, optionally, new variables) there. In the `updateQuery` hook, we have
to return a result we want to see in the Apollo cache after fetching the next page.
```javascript
fetchNextPage() {
// as a first step, we're checking if we have more pages to move forward
if (this.pageInfo?.hasNextPage) {
this.$apollo.queries.designs.fetchMore({
variables: {
// rest of design variables
...
first: 10,
after: this.pageInfo?.endCursor,
},
updateQuery(previousResult, { fetchMoreResult }) {
// here we can implement the logic of adding new designs to fetched one (for example, if we use infinite scroll)
// or replacing old result with the new one if we use numbered pages
const newDesigns = fetchMoreResult.project.issue.designCollection.designs;
previousResult.project.issue.designCollection.designs.push(...newDesigns)
return previousResult;
},
});
}
}
```
Please note we don't have to save `pageInfo` one more time; `fetchMore` triggers a query
`result` hook as well.
#### Limitations
Currently, bidirectional pagination doesn't work:
- `hasNextPage` returns a correct value only when we paginate forward using `endCursor`
and `first` parameters.
- `hasPreviousPage` returns a correct value only when we paginate backward using
`startCursor` and `last` parameters.
This should be resolved in the scope of the issue
[Bi-directional Pagination in GraphQL doesn't work as expected](https://gitlab.com/gitlab-org/gitlab/-/issues/208301).
### Testing
#### Mocking response as component data
......
This diff is collapsed.
......@@ -64,13 +64,13 @@ source projects, GitLab grants access to **Gold** features for all GitLab.com
#### Self-managed
A self-managed subscription uses a hybrid model. You pay for a subscription according to the maximum number of users enabled during the subscription period. At the end of the subscription period, the maximum number of simultaneous users in the self-managed installation is checked. If the number of users is higher than your subscription, you are billed for the extra users. The maximum number of simultaneous users is also used to calculate the cost of subscription renewal.
A self-managed subscription uses a hybrid model. You pay for a subscription according to the maximum number of users enabled during the subscription period. For instances that aren't air-gapped or on a closed network, the maximum number of simultaneous users in the self-managed installation is checked each quarter, using [Seat Link](#seat-link).
Every occupied seat, whether by person, job, or bot is counted in the subscription, with the following exceptions:
- Blocked users who are blocked prior to the renewal of a subscription won't be counted as active users for the renewal subscription. They may count as active users in the subscription period in which they were originally added.
- Members with Guest permissions on an Ultimate subscription.
- Special internal GitLab accounts: `Ghost User` and `Support Bot`.
- GitLab-created service accounts: `Ghost User` and `Support Bot`.
NOTE: **Note:**
If you have LDAP integration enabled, anyone in the configured domain can sign up for a GitLab account. This can result in an unexpected bill at time of renewal. Consider [disabling new signups](../user/admin_area/settings/sign_up_restrictions.md) and managing new users manually instead.
......@@ -237,6 +237,65 @@ The following will be emailed to you:
- A payment receipt. You can also access this information in the Customers Portal under **Payment History**.
- A new license. [Upload this license](../user/admin_area/license.md#uploading-your-license) to your instance to use it.
### Seat Link
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/208832) in [GitLab Starter](https://about.gitlab.com/pricing) 12.9.
Seat Link allows us to provide our self-managed customers with prorated charges for user growth throughout the year using a quarterly reconciliation process.
Seat Link sends to GitLab daily a count of all users in connected self-managed instances. That information is used to automate prorated reconciliations. The data is sent securely through an encrypted HTTPS connection.
Seat Link is mandatory because we need the user count data to enable prorated billing. Seat Link provides **only** the following information to GitLab:
- Date
- Historical maximum user count
- License key
Here is an example of the POST request:
```plaintext
{
date: '2020-01-29',
license_key: 'ZXlKa1lYUmhJam9pWm5WNmVsTjVZekZ2YTJoV2NucDBh
RXRxTTA5amQxcG1VMVZqDQpXR3RwZEc5SGIyMVhibmxuZDJ0NWFrNXJTVzVH
UzFCT1hHNVRiVFIyT0ZaUFlVSm1OV1ZGV0VObE1uVk4NCk4xY3ZkM1F4Y2to
MFFuVklXSFJvUWpSM01VdE9SVE5rYkVjclZrdDJORkpOTlhka01qaE5aalpj
YmxSMg0KWVd3MFNFTldTRmRtV1ZGSGRDOUhPR05oUVZvNUsxVnRXRUZIZFU1
U1VqUm5aVFZGZUdwTWIxbDFZV1EyDQphV1JTY1V4c1ZYSjNPVGhrYVZ4dVlu
TkpWMHRJZUU5dmF6ZEJRVVkxTlVWdFUwMTNSMGRHWm5SNlJFcFYNClQyVkJl
VXc0UzA0NWFFb3ZlSFJrZW0xbVRqUlZabkZ4U1hWcWNXRnZYRzVaTm5GSmVW
UnJVR1JQYTJKdA0KU0ZZclRHTmFPRTVhZEVKMUt6UjRkSE15WkRCT1UyNWlS
MGRJZDFCdmRFWk5Za2h4Tm5sT1VsSktlVlYyDQpXRmhjYmxSeU4wRnRNMU5q
THpCVWFGTmpTMnh3UWpOWVkyc3pkbXBST1dnelZHY3hUV3hxVDIwdlZYRlQN
Ck9EWTJSVWx4WlVOT01EQXhVRlZ3ZGs1Rk0xeHVSVEJTTDFkMWJUQTVhV1ZK
WjBORFdWUktaRXNyVnpsTw0KTldkWWQwWTNZa05VWlZBMmRUVk9kVUpxT1hV
Mk5VdDFTUzk0TUU5V05XbFJhWGh0WEc1cVkyWnhaeTlXDQpTMEpyZWt0cmVY
bzBOVGhFVG1oU1oxSm5WRFprY0Uwck0wZEdhVUpEV1d4a1RXZFRjVU5tYTB0
a2RteEQNCmNWTlFSbFpuWlZWY2JpdFVVbXhIV0d4MFRuUnRWbkJKTkhwSFJt
TnRaMGsyV0U1MFFUUXJWMUJVTWtOSA0KTVhKUWVGTkxPVTkzV1VsMlVUUldk
R3hNTWswNU1USlNjRnh1U1UxTGJTdHRRM1l5YTFWaWJtSlBTMkUxDQplRkpL
SzJSckszaG1hVXB1ZVRWT1UwdHZXV0ZOVG1WamMyVjRPV0pSUlZkUU9UUnpU
VWh2Wlc5cFhHNUgNClNtRkdVMDUyY1RGMWNGTnhVbU5JUkZkeGVWcHVRMnBh
VTBSUGR6VnRNVGhvWTFBM00zVkZlVzFOU0djMA0KY1ZFM1FWSlplSFZ5UzFS
aGIxTmNia3BSUFQxY2JpSXNJbxRsZVNJNkltZFhiVzFGVkRZNWNFWndiV2Rt
DQpNWEIyY21SbFFrdFNZamxaYURCdVVHcHhiRlV3Tm1WQ2JGSlFaSFJ3Y0Rs
cFMybGhSMnRPTkZOMWNVNU0NClVGeHVTa3N6TUUxcldVOTVWREl6WVVWdk5U
ZGhWM1ZvVjJkSFRtZFBZVXRJTkVGcE55dE1NRE5dWnpWeQ0KWlV0aWJsVk9T
RmRzVVROUGRHVXdWR3hEWEc1MWjWaEtRMGQ2YTAxWFpUZHJURTVET0doV00w
ODRWM0V2DQphV2M1YWs5cWFFWk9aR3BYTm1aVmJXNUNaazlXVUVRMWRrMXpj
bTFDV0V4dldtRmNibFpTTWpWU05VeFMNClEwTjRNMWxWCUtSVGEzTTJaV2xE
V0hKTFRGQmpURXRsZFVaQlNtRnJTbkpPZGtKdlUyUmlNVWxNWWpKaQ0KT0dw
c05YbE1kVnh1YzFWbk5VZDFhbU56ZUM5Tk16TXZUakZOVW05cVpsVTNObEo0
TjJ4eVlVUkdkWEJtDQpkSHByYWpreVJrcG9UVlo0Y0hKSU9URndiV2RzVFdO
VlhHNXRhVmszTkV0SVEzcEpNMWRyZEVoRU4ydHINCmRIRnFRVTlCVUVVM1pV
SlRORE4xUjFaYVJGb3JlWGM5UFZ4dUlpd2lhWFlpt2lKV00yRnNVbk5RTjJk
Sg0KU1hNMGExaE9SVGR2V2pKQlBUMWNiaUo5DQo=',
max_historical_user_count: 10
}
```
For air-gapped or closed network customers, the existing [true-up model](#users-over-license) will be used. Prorated charges are not possible without user count data.
### Renew or change a GitLab.com subscription
To renew for more users than are currently active in your GitLab.com system, contact our sales team via `renewals@gitlab.com` for assistance as this can't be done in the Customers Portal.
......
......@@ -35,43 +35,39 @@ export REGION=us-central1 # the GCP region where the GKE cluster is provisioned.
## Configure RBAC permissions
- For a non-GitLab managed cluster(s), ensure that the service account for the token provided can manage resources in the `database.crossplane.io` API group.
Manually grant GitLab's service account the ability to manage resources in the
`database.crossplane.io` API group. The Aggregated ClusterRole allows us to do that.
NOTE: **Note:**
For a non-GitLab managed cluster, ensure that the service account for the token provided can manage resources in the `database.crossplane.io` API group.
​1. Save the following YAML as `crossplane-database-role.yaml`:
```shell
cat > crossplane-database-role.yaml <<EOF
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: crossplane-database-role
labels:
rbac.authorization.k8s.io/aggregate-to-edit: "true"
rules:
- apiGroups:
- database.crossplane.io
resources:
- postgresqlinstances
verbs:
- get
- list
- create
- update
- delete
- patch
- watch
EOF
```
Once the file is created, apply it with the following command in order to create the necessary role:
```shell
kubectl apply -f crossplane-database-role.yaml
```
- For GitLab-managed clusters, RBAC is configured automatically.
- For non-GitLab managed clusters, ensure that the service account for the token provided can manage resources in the `database.crossplane.io` API group:
1. Save the following YAML as `crossplane-database-role.yaml`:
```yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: crossplane-database-role
labels:
rbac.authorization.k8s.io/aggregate-to-edit: "true"
rules:
- apiGroups:
- database.crossplane.io
resources:
- postgresqlinstances
verbs:
- get
- list
- create
- update
- delete
- patch
- watch
```
1. Apply the cluster role to the cluster:
```shell
kubectl apply -f crossplane-database-role.yaml
```
## Configure Crossplane with a cloud provider
......
......@@ -21,32 +21,34 @@ analyzed to a file called `accessibility`.
## Configure Accessibility Testing
This example shows how to run [pa11y](https://pa11y.org/)
on your code with GitLab CI/CD using a node Docker image.
on your code with GitLab CI/CD using the [GitLab Accessibility Docker image](https://gitlab.com/gitlab-org/ci-cd/accessibility).
For GitLab 12.8 and later, to define the `a11y` job, you must
For GitLab 12.9 and later, to define the `a11y` job, you must
[include](../../../ci/yaml/README.md#includetemplate) the
[`Accessibility.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml)
included with your GitLab installation, as shown below.
For GitLab versions earlier than 12.8, you can copy and use the job as
defined in that template.
Add the following to your `.gitlab-ci.yml` file:
```yaml
variables:
a11y_urls: "https://about.gitlab.com"
a11y_urls: "https://about.gitlab.com https://gitlab.com/users/sign_in"
include:
- remote: "https://gitlab.com/gitlab-org/gitlab/-/raw/master/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml"
- template: "Verify/Accessibility.gitlab-ci.yml"
```
The example above will create an `a11y` job in your CI/CD pipeline and will run
Pa11y against the webpage you defined in `a11y_urls` to build a report.
creates an `a11y` job in your CI/CD pipeline, runs
Pa11y against the webpages defined in `a11y_urls`, and builds an HTML report for each.
NOTE: **Note:**
Only one URL may be currently passed into `a11y_urls`.
The report for each URL is saved as an artifact that can be [viewed directly in your browser](../../../ci/pipelines/job_artifacts.md#browsing-artifacts).
A single `accessibility.json` artifact is created and saved along with the individual HTML reports.
It includes report data for all URLs scanned.
The full HTML Pa11y report will be saved as an artifact that can be [viewed directly in your browser](../../../ci/pipelines/job_artifacts.md#browsing-artifacts).
NOTE: **Note:**
For GitLab versions earlier than 12.9, you can use `include:remote` and use a
link to the [current template in `master`](https://gitlab.com/gitlab-org/gitlab/-/raw/master/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml)
NOTE: **Note:**
The job definition provided by the template does not support Kubernetes yet.
......
......@@ -69,7 +69,6 @@ Currently the following names are reserved as top level groups:
- `invites`
- `jwt`
- `login`
- `notification_settings`
- `oauth`
- `profile`
- `projects`
......
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
module UserMentions
module Models
class Commit
include Concerns::IsolatedMentionable
include Concerns::MentionableMigrationMethods
def self.user_mention_model
Gitlab::BackgroundMigration::UserMentions::Models::CommitUserMention
end
def user_mention_model
self.class.user_mention_model
end
def user_mention_resource_id
id
end
def user_mention_note_id
'NULL'
end
def self.no_quote_columns
[:note_id]
end
end
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
module UserMentions
module Models
class CommitUserMention < ActiveRecord::Base
self.table_name = 'commit_user_mentions'
def self.resource_foreign_key
:commit_id
end
end
end
end
end
end
......@@ -20,7 +20,7 @@ module Gitlab
belongs_to :project
def for_personal_snippet?
noteable.class.name == 'PersonalSnippet'
noteable && noteable.class.name == 'PersonalSnippet'
end
def for_project_noteable?
......@@ -32,7 +32,7 @@ module Gitlab
end
def for_epic?
noteable.class.name == 'Epic'
noteable && noteable_type == 'Epic'
end
def user_mention_resource_id
......@@ -43,6 +43,14 @@ module Gitlab
id
end
def noteable
super unless for_commit?
end
def for_commit?
noteable_type == "Commit"
end
private
def mentionable_params
......@@ -52,6 +60,8 @@ module Gitlab
end
def banzai_context_params
return {} unless noteable
{ group: noteable.group, label_url_method: :group_epics_url }
end
end
......
......@@ -8,20 +8,12 @@ stages:
a11y:
stage: accessibility
image: node
script:
- wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \
- echo "deb http://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google.list && \
- apt-get update && \
- apt-get install -y google-chrome-stable && \
- rm -rf /var/lib/apt/lists/*
- npm install pa11y@5.3.0 pa11y-reporter-html@1.0.0
- 'echo { \"chromeLaunchConfig\": { \"args\": [\"--no-sandbox\"] }, \"includeWarnings\": true, \"reporter\": \"html\" } > pa11y.json'
- './node_modules/.bin/pa11y $a11y_urls > accessibility.html'
image: registry.gitlab.com/gitlab-org/ci-cd/accessibility:5.3.0-gitlab.2
script: /gitlab-accessibility.sh $a11y_urls
allow_failure: true
artifacts:
when: always
expose_as: 'accessibility'
paths: ['accessibility.html']
expose_as: 'Accessibility Reports'
paths: ['reports/']
rules:
- if: $a11y_urls
......@@ -3,6 +3,8 @@
module Gitlab
module Elasticsearch
class Logs
InvalidCursor = Class.new(RuntimeError)
# How many log lines to fetch in a query
LOGS_LIMIT = 500
......@@ -10,7 +12,7 @@ module Gitlab
@client = client
end
def pod_logs(namespace, pod_name, container_name = nil, search = nil, start_time = nil, end_time = nil)
def pod_logs(namespace, pod_name, container_name: nil, search: nil, start_time: nil, end_time: nil, cursor: nil)
query = { bool: { must: [] } }.tap do |q|
filter_pod_name(q, pod_name)
filter_namespace(q, namespace)
......@@ -19,7 +21,7 @@ module Gitlab
filter_times(q, start_time, end_time)
end
body = build_body(query)
body = build_body(query, cursor)
response = @client.search body: body
format_response(response)
......@@ -27,8 +29,8 @@ module Gitlab
private
def build_body(query)
{
def build_body(query, cursor = nil)
body = {
query: query,
# reverse order so we can query N-most recent records
sort: [
......@@ -40,6 +42,12 @@ module Gitlab
# fixed limit for now, we should support paginated queries
size: ::Gitlab::Elasticsearch::Logs::LOGS_LIMIT
}
unless cursor.nil?
body[:search_after] = decode_cursor(cursor)
end
body
end
def filter_pod_name(query, pod_name)
......@@ -100,7 +108,9 @@ module Gitlab
end
def format_response(response)
result = response.fetch("hits", {}).fetch("hits", []).map do |hit|
results = response.fetch("hits", {}).fetch("hits", [])
last_result = results.last
results = results.map do |hit|
{
timestamp: hit["_source"]["@timestamp"],
message: hit["_source"]["message"]
......@@ -108,7 +118,32 @@ module Gitlab
end
# we queried for the N-most recent records but we want them ordered oldest to newest
result.reverse
{
logs: results.reverse,
cursor: last_result.nil? ? nil : encode_cursor(last_result["sort"])
}
end
# we want to hide the implementation details of the search_after parameter from the frontend
# behind a single easily transmitted value
def encode_cursor(obj)
obj.join(',')
end
def decode_cursor(obj)
cursor = obj.split(',').map(&:to_i)
unless valid_cursor(cursor)
raise InvalidCursor, "invalid cursor format"
end
cursor
end
def valid_cursor(cursor)
cursor.instance_of?(Array) &&
cursor.length == 2 &&
cursor.map {|i| i.instance_of?(Integer)}.reduce(:&)
end
end
end
......
......@@ -152,6 +152,12 @@ module Gitlab
end
end
def replicate(source_repository)
wrapped_gitaly_errors do
gitaly_repository_client.replicate(source_repository)
end
end
def expire_has_local_branches_cache
clear_memoization(:has_local_branches)
end
......@@ -767,12 +773,6 @@ module Gitlab
!has_visible_content?
end
def fetch_repository_as_mirror(repository)
wrapped_gitaly_errors do
gitaly_remote_client.fetch_internal_remote(repository)
end
end
# Fetch remote for repository
#
# remote - remote name
......
......@@ -41,20 +41,6 @@ module Gitlab
GitalyClient.call(@storage, :remote_service, :remove_remote, request, timeout: GitalyClient.long_timeout).result
end
def fetch_internal_remote(repository)
request = Gitaly::FetchInternalRemoteRequest.new(
repository: @gitaly_repo,
remote_repository: repository.gitaly_repository
)
response = GitalyClient.call(@storage, :remote_service,
:fetch_internal_remote, request,
timeout: GitalyClient.long_timeout,
remote_storage: repository.storage)
response.result
end
def find_remote_root_ref(remote_name)
request = Gitaly::FindRemoteRootRefRequest.new(
repository: @gitaly_repo,
......
......@@ -359,6 +359,22 @@ module Gitlab
GitalyClient.call(@storage, :repository_service, :remove_repository, request, timeout: GitalyClient.long_timeout)
end
def replicate(source_repository)
request = Gitaly::ReplicateRepositoryRequest.new(
repository: @gitaly_repo,
source: source_repository.gitaly_repository
)
GitalyClient.call(
@storage,
:repository_service,
:replicate_repository,
request,
remote_storage: source_repository.storage,
timeout: GitalyClient.long_timeout
)
end
private
def search_results_from_response(gitaly_response, options = {})
......
......@@ -15,7 +15,7 @@ module Gitlab
unless result.response.is_a?(Net::HTTPSuccess)
Gitlab::ErrorTracking.track_and_raise_exception(
JIRA::HTTPError.new(result.response),
response_body: result.body
response: result.body
)
end
......
......@@ -42,7 +42,6 @@ module Gitlab
invites
jwt
login
notification_settings
oauth
profile
projects
......
......@@ -33,8 +33,6 @@ module Gitlab
if Rails.env.test?
storage_path = Rails.root.join('tmp', 'tests', 'second_storage').to_s
FileUtils.mkdir(storage_path) unless File.exist?(storage_path)
storages << { name: 'test_second_storage', path: storage_path }
end
......
......@@ -184,11 +184,13 @@ module Gitlab
commit_sha: @commit.sha,
project: @commit.project,
x509_certificate_id: certificate.id,
verification_status: verification_status
verification_status: verification_status(certificate)
}
end
def verification_status
def verification_status(certificate)
return :unverified if certificate.revoked?
if verified_signature && certificate_email == @commit.committer_email
:verified
else
......
......@@ -560,6 +560,9 @@ msgstr ""
msgid "(removed)"
msgstr ""
msgid "(revoked)"
msgstr ""
msgid "*"
msgstr ""
......@@ -10889,6 +10892,9 @@ msgstr ""
msgid "Invalid URL"
msgstr ""
msgid "Invalid cursor value provided"
msgstr ""
msgid "Invalid date"
msgstr ""
......@@ -21370,9 +21376,6 @@ msgstr ""
msgid "UpdateRepositoryStorage|Error moving repository storage for %{project_full_path} - %{message}"
msgstr ""
msgid "UpdateRepositoryStorage|Failed to fetch %{type} repository as mirror"
msgstr ""
msgid "UpdateRepositoryStorage|Failed to verify %{type} repository checksum from %{old} to %{new}"
msgstr ""
......
......@@ -8,5 +8,6 @@ FactoryBot.define do
email { 'gitlab@example.org' }
serial_number { 278969561018901340486471282831158785578 }
x509_issuer
certificate_status { :good }
end
end
{
"query": {
"bool": {
"must": [
{
"match_phrase": {
"kubernetes.pod.name": {
"query": "production-6866bc8974-m4sk4"
}
}
},
{
"match_phrase": {
"kubernetes.namespace": {
"query": "autodevops-deploy-9-production"
}
}
}
]
}
},
"sort": [
{
"@timestamp": {
"order": "desc"
}
},
{
"offset": {
"order": "desc"
}
}
],
"search_after": [
9999934,
1572449784442
],
"_source": [
"@timestamp",
"message"
],
"size": 500
}
export const metricsWithData = [15, 16];
export const metricsWithData = ['15_metric_a', '16_metric_b'];
export const groups = [
{
......@@ -7,41 +7,12 @@ export const groups = [
title: 'Memory Usage (Total)',
type: 'area-chart',
y_label: 'Total Memory Used',
weight: 4,
metrics: [
{
id: 'system_metrics_kubernetes_container_memory_total',
metric_id: 15,
},
],
},
{
title: 'Core Usage (Total)',
type: 'area-chart',
y_label: 'Total Cores',
weight: 3,
metrics: [
{
id: 'system_metrics_kubernetes_container_cores_total',
metric_id: 16,
},
],
metrics: null,
},
],
},
];
export const metrics = [
{
id: 'system_metrics_kubernetes_container_memory_total',
metric_id: 15,
},
{
id: 'system_metrics_kubernetes_container_cores_total',
metric_id: 16,
},
];
const result = [
{
values: [
......@@ -60,7 +31,7 @@ export const metricsData = [
{
metrics: [
{
metric_id: 15,
metricId: '15_metric_a',
result,
},
],
......@@ -68,7 +39,7 @@ export const metricsData = [
{
metrics: [
{
metric_id: 16,
metricId: '16_metric_b',
result,
},
],
......
......@@ -213,20 +213,16 @@ describe('mapToDashboardViewModel', () => {
expect(getMappedMetric(dashboard)).toEqual({
label: expect.any(String),
metricId: expect.any(String),
metric_id: expect.any(String),
});
});
it('creates a metric with a correct ids', () => {
it('creates a metric with a correct id', () => {
const dashboard = dashboardWithMetric({
id: 'http_responses',
metric_id: 1,
});
expect(getMappedMetric(dashboard)).toMatchObject({
metricId: '1_http_responses',
metric_id: '1_http_responses',
});
expect(getMappedMetric(dashboard).metricId).toEqual('1_http_responses');
});
it('creates a metric with a default label', () => {
......
......@@ -149,10 +149,12 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover, schema: :latest do
context 'when an upload belongs to a legacy_diff_note' do
let!(:merge_request) { create(:merge_request, source_project: project) }
let!(:note) do
create(:legacy_diff_note_on_merge_request,
note: 'some note', project: project, noteable: merge_request)
end
let(:legacy_upload) do
create(:upload, :with_file, :attachment_upload,
path: "uploads/-/system/note/attachment/#{note.id}/#{filename}", model: note)
......@@ -193,6 +195,17 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover, schema: :latest do
it_behaves_like 'move error'
end
context 'when upload has mount_point nil' do
let(:legacy_upload) do
create(:upload, :with_file, :attachment_upload,
path: "uploads/-/system/note/attachment/#{note.id}/#{filename}", model: note, mount_point: nil)
end
it_behaves_like 'migrates the file correctly'
it_behaves_like 'legacy local file'
it_behaves_like 'legacy upload deletion'
end
context 'when the file can be handled correctly' do
it_behaves_like 'migrates the file correctly'
it_behaves_like 'legacy local file'
......
# frozen_string_literal: true
require 'spec_helper'
require './db/post_migrate/20200128134110_migrate_commit_notes_mentions_to_db'
require './db/post_migrate/20200211155539_migrate_merge_request_mentions_to_db'
describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, schema: 20200211155539 do
......@@ -73,11 +74,36 @@ describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, s
it_behaves_like 'resource mentions migration', MigrateMergeRequestMentionsToDb, MergeRequest
end
context 'migrate commit mentions' do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:note1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: description_mentions) }
let!(:note2) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'sample note') }
let!(:note3) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: description_mentions, system: true) }
# this not does not have actual mentions
let!(:note4) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'note for an email@somesite.com and some other random @ ref' ) }
# this should have pointed to an innexisted commit record in a commits table
# but because commit is not an AR we'll just make it so that it does not have mentions
let!(:note5) { notes.create!(commit_id: 'abc', noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'note for an email@somesite.com and some other random @ ref') }
let(:user_mentions) { commit_user_mentions }
let(:resource) { commit }
it_behaves_like 'resource notes mentions migration', MigrateCommitNotesMentionsToDb, Commit
end
end
context 'checks no_quote_columns' do
it 'has correct no_quote_columns' do
expect(Gitlab::BackgroundMigration::UserMentions::Models::MergeRequest.no_quote_columns).to match([:note_id, :merge_request_id])
end
it 'commit has correct no_quote_columns' do
expect(Gitlab::BackgroundMigration::UserMentions::Models::Commit.no_quote_columns).to match([:note_id])
end
end
end
......@@ -20,6 +20,7 @@ describe Gitlab::Elasticsearch::Logs do
let(:search) { "foo +bar "}
let(:start_time) { "2019-12-13T14:35:34.034Z" }
let(:end_time) { "2019-12-13T14:35:34.034Z" }
let(:cursor) { "9999934,1572449784442" }
let(:body) { JSON.parse(fixture_file('lib/elasticsearch/query.json')) }
let(:body_with_container) { JSON.parse(fixture_file('lib/elasticsearch/query_with_container.json')) }
......@@ -27,6 +28,7 @@ describe Gitlab::Elasticsearch::Logs do
let(:body_with_times) { JSON.parse(fixture_file('lib/elasticsearch/query_with_times.json')) }
let(:body_with_start_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) }
let(:body_with_end_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) }
let(:body_with_cursor) { JSON.parse(fixture_file('lib/elasticsearch/query_with_cursor.json')) }
RSpec::Matchers.define :a_hash_equal_to_json do |expected|
match do |actual|
......@@ -39,42 +41,49 @@ describe Gitlab::Elasticsearch::Logs do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end
it 'can further filter the logs by container name' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_container)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, container_name)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
result = subject.pod_logs(namespace, pod_name, container_name: container_name)
expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end
it 'can further filter the logs by search' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_search)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, search)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
result = subject.pod_logs(namespace, pod_name, search: search)
expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end
it 'can further filter the logs by start_time and end_time' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_times)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, nil, start_time, end_time)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
result = subject.pod_logs(namespace, pod_name, start_time: start_time, end_time: end_time)
expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end
it 'can further filter the logs by only start_time' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_start_time)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, nil, start_time)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
result = subject.pod_logs(namespace, pod_name, start_time: start_time)
expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end
it 'can further filter the logs by only end_time' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_end_time)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, nil, nil, end_time)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
result = subject.pod_logs(namespace, pod_name, end_time: end_time)
expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end
it 'can search after a cursor' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_cursor)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, cursor: cursor)
expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end
end
end
......@@ -492,50 +492,6 @@ describe Gitlab::Git::Repository, :seed_helper do
end
end
describe '#fetch_repository_as_mirror' do
let(:new_repository) do
Gitlab::Git::Repository.new('default', 'my_project.git', '', 'group/project')
end
subject { new_repository.fetch_repository_as_mirror(repository) }
before do
new_repository.create_repository
end
after do
new_repository.remove
end
it 'fetches a repository as a mirror remote' do
subject
expect(refs(new_repository_path)).to eq(refs(repository_path))
end
context 'with keep-around refs' do
let(:sha) { SeedRepo::Commit::ID }
let(:keep_around_ref) { "refs/keep-around/#{sha}" }
let(:tmp_ref) { "refs/tmp/#{SecureRandom.hex}" }
before do
repository_rugged.references.create(keep_around_ref, sha, force: true)
repository_rugged.references.create(tmp_ref, sha, force: true)
end
it 'includes the temporary and keep-around refs' do
subject
expect(refs(new_repository_path)).to include(keep_around_ref)
expect(refs(new_repository_path)).to include(tmp_ref)
end
end
def new_repository_path
File.join(TestEnv.repos_path, new_repository.relative_path)
end
end
describe '#fetch_remote' do
it 'delegates to the gitaly RepositoryService' do
ssh_auth = double(:ssh_auth)
......@@ -2181,4 +2137,49 @@ describe Gitlab::Git::Repository, :seed_helper do
end
end
end
describe '#replicate' do
let(:new_repository) do
Gitlab::Git::Repository.new('test_second_storage', TEST_REPO_PATH, '', 'group/project')
end
let(:new_repository_path) { File.join(TestEnv::SECOND_STORAGE_PATH, new_repository.relative_path) }
subject { new_repository.replicate(repository) }
before do
stub_storage_settings('test_second_storage' => {
'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
'path' => TestEnv::SECOND_STORAGE_PATH
})
Gitlab::Shell.new.create_repository('test_second_storage', TEST_REPO_PATH, 'group/project')
end
after do
Gitlab::Shell.new.remove_repository('test_second_storage', TEST_REPO_PATH)
end
it 'mirrors the source repository' do
subject
expect(refs(new_repository_path)).to eq(refs(repository_path))
end
context 'with keep-around refs' do
let(:sha) { SeedRepo::Commit::ID }
let(:keep_around_ref) { "refs/keep-around/#{sha}" }
let(:tmp_ref) { "refs/tmp/#{SecureRandom.hex}" }
before do
repository.write_ref(keep_around_ref, sha)
repository.write_ref(tmp_ref, sha)
end
it 'includes the temporary and keep-around refs' do
subject
expect(refs(new_repository_path)).to include(keep_around_ref)
expect(refs(new_repository_path)).to include(tmp_ref)
end
end
end
end
......@@ -34,19 +34,6 @@ describe Gitlab::GitalyClient::RemoteService do
end
end
describe '#fetch_internal_remote' do
let(:remote_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '', 'group/project') }
it 'sends an fetch_internal_remote message and returns the result value' do
expect_any_instance_of(Gitaly::RemoteService::Stub)
.to receive(:fetch_internal_remote)
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
.and_return(double(result: true))
expect(client.fetch_internal_remote(remote_repository)).to be(true)
end
end
describe '#find_remote_root_ref' do
it 'sends an find_remote_root_ref message and returns the root ref' do
expect_any_instance_of(Gitaly::RemoteService::Stub)
......
......@@ -275,7 +275,18 @@ describe Gitlab::GitalyClient::RepositoryService do
end
end
describe 'remove' do
describe '#rename' do
it 'sends a rename_repository message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
.to receive(:rename_repository)
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
.and_return(double(value: true))
client.rename('some/new/path')
end
end
describe '#remove' do
it 'sends a remove_repository message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
.to receive(:remove_repository)
......@@ -286,14 +297,15 @@ describe Gitlab::GitalyClient::RepositoryService do
end
end
describe 'rename' do
it 'sends a rename_repository message' do
describe '#replicate' do
let(:source_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '', 'group/project') }
it 'sends a replicate_repository message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
.to receive(:rename_repository)
.to receive(:replicate_repository)
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
.and_return(double(value: true))
client.rename('some/new/path')
client.replicate(source_repository)
end
end
end
......@@ -111,6 +111,22 @@ describe Gitlab::X509::Commit do
expect(signature.x509_certificate.x509_issuer).to have_attributes(user1_issuer_attributes)
expect(signature.persisted?).to be_truthy
end
context 'revoked certificate' do
let(:x509_issuer) { create(:x509_issuer, user1_issuer_attributes) }
let!(:x509_certificate) { create(:x509_certificate, user1_certificate_attributes.merge(x509_issuer_id: x509_issuer.id, certificate_status: :revoked)) }
it 'returns an unverified signature' do
expect(signature).to have_attributes(
commit_sha: commit_sha,
project: project,
verification_status: 'unverified'
)
expect(signature.x509_certificate).to have_attributes(user1_certificate_attributes)
expect(signature.x509_certificate.x509_issuer).to have_attributes(user1_issuer_attributes)
expect(signature.persisted?).to be_truthy
end
end
end
context 'without trusted certificate within store' do
......
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200128133510_cleanup_empty_commit_user_mentions')
describe CleanupEmptyCommitUserMentions, :migration, :sidekiq do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:notes) { table(:notes) }
let(:user) { users.create!(name: 'root', email: 'root@example.com', username: 'root', projects_limit: 0) }
let(:group) { namespaces.create!(name: 'group1', path: 'group1', owner_id: user.id) }
let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:resource1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
let!(:resource2) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
let!(:resource3) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check', system: true) }
# this note is already migrated, as it has a record in the commit_user_mentions table
let!(:resource4) { notes.create!(note: 'note3 for @root to check', commit_id: commit.id, noteable_type: 'Commit') }
let!(:user_mention) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource4.id, mentioned_users_ids: [1]) }
# these should get cleanup, by the migration
let!(:blank_commit_user_mention1) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource1.id)}
let!(:blank_commit_user_mention2) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource2.id)}
let!(:blank_commit_user_mention3) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource3.id)}
it 'cleanups blank user mentions' do
expect { migrate! }.to change { commit_user_mentions.count }.by(-3)
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200128134110_migrate_commit_notes_mentions_to_db')
describe MigrateCommitNotesMentionsToDb, :migration, :sidekiq do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:notes) { table(:notes) }
let(:user) { users.create!(name: 'root', email: 'root@example.com', username: 'root', projects_limit: 0) }
let(:group) { namespaces.create!(name: 'group1', path: 'group1', owner_id: user.id) }
let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:resource1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
let!(:resource2) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
let!(:resource3) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check', system: true) }
# non-migrateable resources
# this note is already migrated, as it has a record in the commit_user_mentions table
let!(:resource4) { notes.create!(note: 'note3 for @root to check', commit_id: commit.id, noteable_type: 'Commit') }
let!(:user_mention) { commit_user_mentions.create!(commit_id: commit.id, note_id: resource4.id, mentioned_users_ids: [1]) }
# this should have pointed to an inexistent commit record in a commits table
# but because commit is not an AR, we'll just make it so that the note does not have mentions, i.e. no `@` char.
let!(:resource5) { notes.create!(note: 'note3 to check', commit_id: 'abc', noteable_type: 'Commit') }
before do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
end
it_behaves_like 'schedules resource mentions migration', Commit, true
end
......@@ -440,6 +440,27 @@ describe JiraService do
end
end
context 'when Remote Link already exists' do
let(:remote_link) do
double(
'remote link',
object: {
url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/-/commit/#{commit_id}"
}.with_indifferent_access
)
end
it 'does not create comment' do
allow(JIRA::Resource::Remotelink).to receive(:all).and_return([remote_link])
expect(remote_link).to receive(:save!)
@jira_service.close_issue(resource, ExternalIssue.new('JIRA-123', project))
expect(WebMock).not_to have_requested(:post, @comment_url)
end
end
it 'does not send comment or remote links to issues already closed' do
allow_any_instance_of(JIRA::Resource::Issue).to receive(:resolution).and_return(true)
......
......@@ -127,6 +127,36 @@ describe Upload do
expect(uploader.mounted_as).to eq(subject.send(:mount_point))
expect(uploader.file).not_to be_nil
end
context 'when upload has mount_point nil' do
context 'when an upload belongs to a note' do
it 'mounts it as attachment' do
project = create(:project, :legacy_storage)
merge_request = create(:merge_request, source_project: project)
note = create(:legacy_diff_note_on_merge_request, note: 'some note', project: project, noteable: merge_request)
subject = build(:upload, :with_file, :attachment_upload, model: note, mount_point: nil)
uploader = subject.retrieve_uploader
expect(uploader.upload).to eq(subject)
expect(uploader.path).to include('attachment')
expect(uploader.file).not_to be_nil
end
end
context 'when an upload does not belong to a note' do
it 'does not mount it as attachment' do
appearance = create(:appearance)
subject = build(:upload, :with_file, :attachment_upload, model: appearance, mount_point: nil)
uploader = subject.retrieve_uploader
expect(uploader.upload).to eq(subject)
expect(uploader.path).not_to include('attachment')
expect(uploader.file).not_to be_nil
end
end
end
end
describe '#needs_checksum?' do
......
......@@ -43,6 +43,28 @@ RSpec.describe X509Certificate do
expect(certificate.subject).to eq(subject)
expect(certificate.email).to eq(email)
end
it 'calls mark_commit_signatures_unverified' do
expect_any_instance_of(described_class).to receive(:mark_commit_signatures_unverified)
described_class.safe_create!(attributes)
end
context 'certificate revocation handling' do
let(:x509_certificate) { create(:x509_certificate) }
it 'starts a revoke worker if certificate is revoked' do
expect(X509CertificateRevokeWorker).to receive(:perform_async).with(x509_certificate.id)
x509_certificate.revoked!
end
it 'does not starts a revoke worker for good certificates' do
expect(X509CertificateRevokeWorker).not_to receive(:perform_async).with(x509_certificate.id)
x509_certificate
end
end
end
describe 'validators' do
......
......@@ -2455,7 +2455,7 @@ describe API::Projects do
end
it 'returns 200 when repository storage has changed' do
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/second_storage' })
stub_storage_settings('test_second_storage' => { 'path' => TestEnv::SECOND_STORAGE_PATH })
expect do
Sidekiq::Testing.fake! do
......
......@@ -11,6 +11,7 @@ describe ::PodLogs::ElasticsearchService do
let(:search) { 'foo -bar' }
let(:start_time) { '2019-01-02T12:13:14+02:00' }
let(:end_time) { '2019-01-03T12:13:14+02:00' }
let(:cursor) { '9999934,1572449784442' }
let(:params) { {} }
let(:expected_logs) do
[
......@@ -116,6 +117,36 @@ describe ::PodLogs::ElasticsearchService do
end
end
describe '#check_cursor' do
context 'with cursor provided and valid' do
let(:params) do
{
'cursor' => cursor
}
end
it 'returns success with cursor' do
result = subject.send(:check_cursor, {})
expect(result[:status]).to eq(:success)
expect(result[:cursor]).to eq(cursor)
end
end
context 'with cursor not provided' do
let(:params) do
{}
end
it 'returns success with nothing else' do
result = subject.send(:check_cursor, {})
expect(result.keys.length).to eq(1)
expect(result[:status]).to eq(:success)
end
end
end
describe '#pod_logs' do
let(:result_arg) do
{
......@@ -123,9 +154,11 @@ describe ::PodLogs::ElasticsearchService do
container_name: container_name,
search: search,
start: start_time,
end: end_time
end: end_time,
cursor: cursor
}
end
let(:expected_cursor) { '9999934,1572449784442' }
before do
create(:clusters_applications_elastic_stack, :installed, cluster: cluster)
......@@ -137,13 +170,14 @@ describe ::PodLogs::ElasticsearchService do
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
.to receive(:pod_logs)
.with(namespace, pod_name, container_name, search, start_time, end_time)
.and_return(expected_logs)
.with(namespace, pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor)
.and_return({ logs: expected_logs, cursor: expected_cursor })
result = subject.send(:pod_logs, result_arg)
expect(result[:status]).to eq(:success)
expect(result[:logs]).to eq(expected_logs)
expect(result[:cursor]).to eq(expected_cursor)
end
it 'returns an error when ES is unreachable' do
......@@ -170,5 +204,19 @@ describe ::PodLogs::ElasticsearchService do
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable')
end
it 'handles cursor errors from elasticsearch' do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
.to receive(:pod_logs)
.and_raise(::Gitlab::Elasticsearch::Logs::InvalidCursor.new)
result = subject.send(:pod_logs, result_arg)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Invalid cursor value provided')
end
end
end
......@@ -311,9 +311,10 @@ describe Projects::ForkService do
fork_before_move = fork_project(project)
# Stub everything required to move a project to a Gitaly shard that does not exist
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/second_storage' })
allow_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror).and_return(true)
allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum).and_return(::Gitlab::Git::BLANK_SHA)
stub_storage_settings('test_second_storage' => { 'path' => TestEnv::SECOND_STORAGE_PATH })
allow_any_instance_of(Gitlab::Git::Repository).to receive(:replicate)
allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum)
.and_return(::Gitlab::Git::BLANK_SHA)
Projects::UpdateRepositoryStorageService.new(project).execute('test_second_storage')
fork_after_move = fork_project(project)
......
......@@ -32,8 +32,8 @@ describe Projects::UpdateRepositoryStorageService do
project.repository.path_to_repo
end
expect(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
expect(project_repository_double).to receive(:checksum)
.and_return(checksum)
......@@ -49,16 +49,18 @@ describe Projects::UpdateRepositoryStorageService do
context 'when the project is already on the target storage' do
it 'bails out and does nothing' do
expect do
subject.execute(project.repository_storage)
end.to raise_error(described_class::RepositoryAlreadyMoved)
result = subject.execute(project.repository_storage)
expect(result[:status]).to eq(:error)
expect(result[:message]).to match(/repository and source have the same storage/)
end
end
context 'when the move fails' do
it 'unmarks the repository as read-only without updating the repository storage' do
expect(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(false)
expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
.and_raise(Gitlab::Git::CommandError)
expect(GitlabShellWorker).not_to receive(:perform_async)
result = subject.execute('test_second_storage')
......@@ -71,8 +73,8 @@ describe Projects::UpdateRepositoryStorageService do
context 'when the checksum does not match' do
it 'unmarks the repository as read-only without updating the repository storage' do
expect(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
expect(project_repository_double).to receive(:checksum)
.and_return('not matching checksum')
expect(GitlabShellWorker).not_to receive(:perform_async)
......@@ -89,8 +91,8 @@ describe Projects::UpdateRepositoryStorageService do
let!(:pool) { create(:pool_repository, :ready, source_project: project) }
it 'leaves the pool' do
expect(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
expect(project_repository_double).to receive(:checksum)
.and_return(checksum)
......
# frozen_string_literal: true
require 'spec_helper'
describe X509CertificateRevokeService do
describe '#execute' do
let(:service) { described_class.new }
let!(:x509_signature_1) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified ) }
let!(:x509_signature_2) { create(:x509_commit_signature, x509_certificate: x509_certificate, verification_status: :verified ) }
context 'for revoked certificates' do
let(:x509_certificate) { create(:x509_certificate, certificate_status: :revoked ) }
it 'update all commit signatures' do
expect do
service.execute(x509_certificate)
x509_signature_1.reload
x509_signature_2.reload
end
.to change(x509_signature_1, :verification_status).from('verified').to('unverified')
.and change(x509_signature_2, :verification_status).from('verified').to('unverified')
end
end
context 'for good certificates' do
RSpec::Matchers.define_negated_matcher :not_change, :change
let(:x509_certificate) { create(:x509_certificate) }
it 'do not update any commit signature' do
expect do
service.execute(x509_certificate)
x509_signature_1.reload
x509_signature_2.reload
end
.to not_change(x509_signature_1, :verification_status)
.and not_change(x509_signature_2, :verification_status)
end
end
end
end
......@@ -84,6 +84,7 @@ module TestEnv
TMP_TEST_PATH = Rails.root.join('tmp', 'tests', '**')
REPOS_STORAGE = 'default'.freeze
SECOND_STORAGE_PATH = Rails.root.join('tmp', 'tests', 'second_storage')
# Test environment
#
......@@ -141,6 +142,7 @@ module TestEnv
end
FileUtils.mkdir_p(repos_path)
FileUtils.mkdir_p(SECOND_STORAGE_PATH)
FileUtils.mkdir_p(backup_path)
FileUtils.mkdir_p(pages_path)
FileUtils.mkdir_p(artifacts_path)
......@@ -176,8 +178,6 @@ module TestEnv
return
end
FileUtils.mkdir_p("tmp/tests/second_storage") unless File.exist?("tmp/tests/second_storage")
spawn_script = Rails.root.join('scripts/gitaly-test-spawn').to_s
Bundler.with_original_env do
unless system(spawn_script)
......
......@@ -22,14 +22,13 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
context 'when the move succeeds', :clean_gitlab_redis_shared_state do
before do
allow(project_repository_double).to receive(:fetch_repository_as_mirror)
allow(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
.and_return(true)
allow(project_repository_double).to receive(:checksum)
.and_return(project_repository_checksum)
allow(repository_double).to receive(:fetch_repository_as_mirror)
.with(repository.raw).and_return(true)
allow(repository_double).to receive(:replicate)
.with(repository.raw)
allow(repository_double).to receive(:checksum)
.and_return(repository_checksum)
end
......@@ -82,20 +81,23 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
context 'when the project is already on the target storage' do
it 'bails out and does nothing' do
expect do
subject.execute(project.repository_storage)
end.to raise_error(described_class::RepositoryAlreadyMoved)
result = subject.execute(project.repository_storage)
expect(result[:status]).to eq(:error)
expect(result[:message]).to match(/repository and source have the same storage/)
end
end
context "when the move of the #{repository_type} repository fails" do
it 'unmarks the repository as read-only without updating the repository storage' do
allow(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
allow(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
allow(project_repository_double).to receive(:checksum)
.and_return(project_repository_checksum)
allow(repository_double).to receive(:fetch_repository_as_mirror)
.with(repository.raw).and_return(false)
allow(repository_double).to receive(:replicate)
.with(repository.raw)
.and_raise(Gitlab::Git::CommandError)
expect(GitlabShellWorker).not_to receive(:perform_async)
......@@ -109,13 +111,13 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
context "when the checksum of the #{repository_type} repository does not match" do
it 'unmarks the repository as read-only without updating the repository storage' do
allow(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
allow(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
allow(project_repository_double).to receive(:checksum)
.and_return(project_repository_checksum)
allow(repository_double).to receive(:fetch_repository_as_mirror)
.with(repository.raw).and_return(true)
allow(repository_double).to receive(:replicate)
.with(repository.raw)
allow(repository_double).to receive(:checksum)
.and_return('not matching checksum')
......
......@@ -9,16 +9,11 @@ describe ProjectUpdateRepositoryStorageWorker do
describe "#perform" do
it "calls the update repository storage service" do
expect_any_instance_of(Projects::UpdateRepositoryStorageService)
.to receive(:execute).with('new_storage')
expect_next_instance_of(Projects::UpdateRepositoryStorageService) do |instance|
expect(instance).to receive(:execute).with('new_storage')
end
subject.perform(project.id, 'new_storage')
end
it 'catches and logs RepositoryAlreadyMoved' do
expect(Rails.logger).to receive(:info).with(/repository already moved/)
expect { subject.perform(project.id, project.repository_storage) }.not_to raise_error
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe X509CertificateRevokeWorker do
describe '#perform' do
context 'with a revoked certificate' do
subject { described_class.new }
let(:x509_certificate) { create(:x509_certificate, certificate_status: :revoked) }
let(:job_args) { x509_certificate.id }
include_examples 'an idempotent worker' do
it 'executes the revoke service' do
spy_service = X509CertificateRevokeService.new
allow(X509CertificateRevokeService).to receive(:new) { spy_service }
expect(spy_service).to receive(:execute)
.exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES).times
.with(x509_certificate)
.and_call_original
subject
end
end
it 'executes the revoke service' do
spy_service = X509CertificateRevokeService.new
allow(X509CertificateRevokeService).to receive(:new) { spy_service }
expect_next_instance_of(X509CertificateRevokeService) do |service|
expect(service).to receive(:execute).with(x509_certificate)
end
subject
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment