Commit 154b9bae authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 2c156e3c
...@@ -43,6 +43,7 @@ eslint-report.html ...@@ -43,6 +43,7 @@ eslint-report.html
/config/redis.shared_state.yml /config/redis.shared_state.yml
/config/unicorn.rb /config/unicorn.rb
/config/puma.rb /config/puma.rb
/config/puma_actioncable.rb
/config/secrets.yml /config/secrets.yml
/config/sidekiq.yml /config/sidekiq.yml
/config/registry.key /config/registry.key
......
...@@ -3,6 +3,7 @@ import { __ } from '~/locale'; ...@@ -3,6 +3,7 @@ import { __ } from '~/locale';
import { mapActions, mapState } from 'vuex'; import { mapActions, mapState } from 'vuex';
import { ADD_CI_VARIABLE_MODAL_ID } from '../constants'; import { ADD_CI_VARIABLE_MODAL_ID } from '../constants';
import { import {
GlButton,
GlModal, GlModal,
GlFormSelect, GlFormSelect,
GlFormGroup, GlFormGroup,
...@@ -16,6 +17,7 @@ import { ...@@ -16,6 +17,7 @@ import {
export default { export default {
modalId: ADD_CI_VARIABLE_MODAL_ID, modalId: ADD_CI_VARIABLE_MODAL_ID,
components: { components: {
GlButton,
GlModal, GlModal,
GlFormSelect, GlFormSelect,
GlFormGroup, GlFormGroup,
...@@ -66,20 +68,6 @@ export default { ...@@ -66,20 +68,6 @@ export default {
attributes: { variant: 'success', disabled: !this.canSubmit }, attributes: { variant: 'success', disabled: !this.canSubmit },
}; };
}, },
deleteAction() {
if (this.variableBeingEdited) {
return {
text: __('Delete variable'),
attributes: { variant: 'danger', category: 'secondary' },
};
}
return null;
},
cancelAction() {
return {
text: __('Cancel'),
};
},
maskedFeedback() { maskedFeedback() {
return __('This variable can not be masked'); return __('This variable can not be masked');
}, },
...@@ -99,6 +87,7 @@ export default { ...@@ -99,6 +87,7 @@ export default {
} else { } else {
this.addVariable(); this.addVariable();
} }
this.hideModal();
}, },
resetModalHandler() { resetModalHandler() {
if (this.variableBeingEdited) { if (this.variableBeingEdited) {
...@@ -107,20 +96,23 @@ export default { ...@@ -107,20 +96,23 @@ export default {
this.clearModal(); this.clearModal();
} }
}, },
hideModal() {
this.$refs.modal.hide();
},
deleteVarAndClose() {
this.deleteVariable(this.variableBeingEdited);
this.hideModal();
},
}, },
}; };
</script> </script>
<template> <template>
<gl-modal <gl-modal
ref="modal"
:modal-id="$options.modalId" :modal-id="$options.modalId"
:title="modalActionText" :title="modalActionText"
:action-primary="primaryAction"
:action-secondary="deleteAction"
:action-cancel="cancelAction"
@ok="updateOrAddVariable"
@hidden="resetModalHandler" @hidden="resetModalHandler"
@secondary="deleteVariable(variableBeingEdited)"
> >
<form> <form>
<gl-form-group :label="__('Key')" label-for="ci-variable-key"> <gl-form-group :label="__('Key')" label-for="ci-variable-key">
...@@ -210,5 +202,23 @@ export default { ...@@ -210,5 +202,23 @@ export default {
</gl-form-checkbox> </gl-form-checkbox>
</gl-form-group> </gl-form-group>
</form> </form>
<template #modal-footer>
<gl-button @click="hideModal">{{ __('Cancel') }}</gl-button>
<gl-button
v-if="variableBeingEdited"
ref="deleteCiVariable"
category="secondary"
variant="danger"
@click="deleteVarAndClose"
>{{ __('Delete variable') }}</gl-button
>
<gl-button
ref="updateOrAddVariable"
:disabled="!canSubmit"
variant="success"
@click="updateOrAddVariable"
>{{ modalActionText }}
</gl-button>
</template>
</gl-modal> </gl-modal>
</template> </template>
...@@ -120,8 +120,8 @@ kbd { ...@@ -120,8 +120,8 @@ kbd {
code { code {
padding: 2px 4px; padding: 2px 4px;
color: $red-600; color: $code-color;
background-color: $red-100; background-color: $gray-100;
border-radius: $border-radius-default; border-radius: $border-radius-default;
.code > & { .code > & {
......
...@@ -592,7 +592,7 @@ pre { ...@@ -592,7 +592,7 @@ pre {
word-wrap: break-word; word-wrap: break-word;
color: $gl-text-color; color: $gl-text-color;
background-color: $gray-light; background-color: $gray-light;
border: 1px solid $border-color; border: 1px solid $gray-200;
border-radius: $border-radius-small; border-radius: $border-radius-small;
} }
......
...@@ -356,6 +356,7 @@ $list-text-height: 42px; ...@@ -356,6 +356,7 @@ $list-text-height: 42px;
*/ */
$code-font-size: 90%; $code-font-size: 90%;
$code-line-height: 1.6; $code-line-height: 1.6;
$code-color: $gray-950;
/* /*
* Tooltips * Tooltips
......
...@@ -7,7 +7,7 @@ module Groups ...@@ -7,7 +7,7 @@ module Groups
before_action :authorize_admin_group! before_action :authorize_admin_group!
before_action :authorize_update_max_artifacts_size!, only: [:update] before_action :authorize_update_max_artifacts_size!, only: [:update]
before_action do before_action do
push_frontend_feature_flag(:new_variables_ui, @group) push_frontend_feature_flag(:new_variables_ui, @group, default_enabled: true)
end end
before_action :define_variables, only: [:show, :create_deploy_token] before_action :define_variables, only: [:show, :create_deploy_token]
......
...@@ -60,10 +60,10 @@ class Projects::PipelinesController < Projects::ApplicationController ...@@ -60,10 +60,10 @@ class Projects::PipelinesController < Projects::ApplicationController
.new(project, current_user, create_params) .new(project, current_user, create_params)
.execute(:web, ignore_skip_ci: true, save_on_errors: false) .execute(:web, ignore_skip_ci: true, save_on_errors: false)
if @pipeline.persisted? if @pipeline.created_successfully?
redirect_to project_pipeline_path(project, @pipeline) redirect_to project_pipeline_path(project, @pipeline)
else else
render 'new' render 'new', status: :bad_request
end end
end end
......
...@@ -6,7 +6,7 @@ module Projects ...@@ -6,7 +6,7 @@ module Projects
before_action :authorize_admin_pipeline! before_action :authorize_admin_pipeline!
before_action :define_variables before_action :define_variables
before_action do before_action do
push_frontend_feature_flag(:new_variables_ui, @project) push_frontend_feature_flag(:new_variables_ui, @project, default_enabled: true)
end end
def show def show
......
...@@ -17,6 +17,11 @@ module Clusters ...@@ -17,6 +17,11 @@ module Clusters
default_value_for :version, VERSION default_value_for :version, VERSION
attr_encrypted :alert_manager_token,
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_truncated,
algorithm: 'aes-256-gcm'
after_destroy do after_destroy do
run_after_commit do run_after_commit do
disable_prometheus_integration disable_prometheus_integration
...@@ -103,8 +108,18 @@ module Clusters ...@@ -103,8 +108,18 @@ module Clusters
false false
end end
def generate_alert_manager_token!
unless alert_manager_token.present?
update!(alert_manager_token: generate_token)
end
end
private private
def generate_token
SecureRandom.hex
end
def disable_prometheus_integration def disable_prometheus_integration
::Clusters::Applications::DeactivateServiceWorker ::Clusters::Applications::DeactivateServiceWorker
.perform_async(cluster_id, ::PrometheusService.to_param) # rubocop:disable CodeReuse/ServiceClass .perform_async(cluster_id, ::PrometheusService.to_param) # rubocop:disable CodeReuse/ServiceClass
......
...@@ -16,7 +16,7 @@ module VersionedDescription ...@@ -16,7 +16,7 @@ module VersionedDescription
def save_description_version def save_description_version
self.saved_description_version = nil self.saved_description_version = nil
return unless Feature.enabled?(:save_description_versions, issuing_parent) return unless Feature.enabled?(:save_description_versions, issuing_parent, default_enabled: true)
return unless saved_change_to_description? return unless saved_change_to_description?
unless description_versions.exists? unless description_versions.exists?
......
...@@ -12,6 +12,7 @@ class NotePolicy < BasePolicy ...@@ -12,6 +12,7 @@ class NotePolicy < BasePolicy
condition(:editable, scope: :subject) { @subject.editable? } condition(:editable, scope: :subject) { @subject.editable? }
condition(:can_read_noteable) { can?(:"read_#{@subject.noteable_ability_name}") } condition(:can_read_noteable) { can?(:"read_#{@subject.noteable_ability_name}") }
condition(:commit_is_deleted) { @subject.for_commit? && @subject.noteable.blank? }
condition(:is_visible) { @subject.system_note_with_references_visible_for?(@user) } condition(:is_visible) { @subject.system_note_with_references_visible_for?(@user) }
...@@ -25,12 +26,16 @@ class NotePolicy < BasePolicy ...@@ -25,12 +26,16 @@ class NotePolicy < BasePolicy
# If user can't read the issue/MR/etc then they should not be allowed to do anything to their own notes # If user can't read the issue/MR/etc then they should not be allowed to do anything to their own notes
rule { ~can_read_noteable }.policy do rule { ~can_read_noteable }.policy do
prevent :read_note
prevent :admin_note prevent :admin_note
prevent :resolve_note prevent :resolve_note
prevent :award_emoji prevent :award_emoji
end end
# Special rule for deleted commits
rule { ~(can_read_noteable | commit_is_deleted) }.policy do
prevent :read_note
end
rule { is_author }.policy do rule { is_author }.policy do
enable :read_note enable :read_note
enable :admin_note enable :admin_note
......
# frozen_string_literal: true
class SnippetPolicy < PersonalSnippetPolicy
end
...@@ -4,84 +4,89 @@ ...@@ -4,84 +4,89 @@
module Metrics module Metrics
module Dashboard module Dashboard
class UpdateDashboardService < ::BaseService class UpdateDashboardService < ::BaseService
include Stepable
ALLOWED_FILE_TYPE = '.yml' ALLOWED_FILE_TYPE = '.yml'
USER_DASHBOARDS_DIR = ::Metrics::Dashboard::ProjectDashboardService::DASHBOARD_ROOT USER_DASHBOARDS_DIR = ::Metrics::Dashboard::ProjectDashboardService::DASHBOARD_ROOT
# rubocop:disable Cop/BanCatchThrow steps :check_push_authorized,
def execute :check_branch_name,
catch(:error) do :check_file_type,
throw(:error, error(_(%q(You are not allowed to push into this branch. Create another branch or open a merge request.)), :forbidden)) unless push_authorized? :update_file
result = ::Files::UpdateService.new(project, current_user, dashboard_attrs).execute
throw(:error, result.merge(http_status: :bad_request)) unless result[:status] == :success
success(result.merge(http_status: :created, dashboard: dashboard_details)) def execute
end execute_steps
end end
# rubocop:enable Cop/BanCatchThrow
private private
def dashboard_attrs def check_push_authorized(result)
{ return error(_('You are not allowed to push into this branch. Create another branch or open a merge request.'), :forbidden) unless push_authorized?
commit_message: params[:commit_message],
file_path: update_dashboard_path, success(result)
file_content: update_dashboard_content,
encoding: 'text',
branch_name: branch,
start_branch: repository.branch_exists?(branch) ? branch : project.default_branch
}
end end
def dashboard_details def check_branch_name(result)
{ return error(_('There was an error updating the dashboard, branch name is invalid.'), :bad_request) unless valid_branch_name?
path: update_dashboard_path, return error(_('There was an error updating the dashboard, branch named: %{branch} already exists.') % { branch: params[:branch] }, :bad_request) unless new_or_default_branch?
display_name: ::Metrics::Dashboard::ProjectDashboardService.name_for_path(update_dashboard_path),
default: false, success(result)
system_dashboard: false
}
end end
def push_authorized? def check_file_type(result)
Gitlab::UserAccess.new(current_user, project: project).can_push_to_branch?(branch) return error(_('The file name should have a .yml extension'), :bad_request) unless target_file_type_valid?
success(result)
end end
# rubocop:disable Cop/BanCatchThrow def update_file(result)
def branch file_update_response = ::Files::UpdateService.new(project, current_user, dashboard_attrs).execute
@branch ||= begin
throw(:error, error(_('There was an error updating the dashboard, branch name is invalid.'), :bad_request)) unless valid_branch_name?
throw(:error, error(_('There was an error updating the dashboard, branch named: %{branch} already exists.') % { branch: params[:branch] }, :bad_request)) unless new_or_default_branch?
params[:branch] if file_update_response[:status] == :success
success(result.merge(file_update_response, http_status: :created, dashboard: dashboard_details))
else
error(file_update_response[:message], :bad_request)
end end
end end
# rubocop:enable Cop/BanCatchThrow
def new_or_default_branch? def push_authorized?
!repository.branch_exists?(params[:branch]) || project.default_branch == params[:branch] Gitlab::UserAccess.new(current_user, project: project).can_push_to_branch?(branch)
end end
def valid_branch_name? def valid_branch_name?
Gitlab::GitRefValidator.validate(params[:branch]) Gitlab::GitRefValidator.validate(branch)
end end
def update_dashboard_path def new_or_default_branch?
File.join(USER_DASHBOARDS_DIR, file_name) !repository.branch_exists?(branch) || project.default_branch == branch
end end
def target_file_type_valid? def target_file_type_valid?
File.extname(params[:file_name]) == ALLOWED_FILE_TYPE File.extname(params[:file_name]) == ALLOWED_FILE_TYPE
end end
# rubocop:disable Cop/BanCatchThrow def dashboard_attrs
{
commit_message: params[:commit_message],
file_path: update_dashboard_path,
file_content: update_dashboard_content,
encoding: 'text',
branch_name: branch,
start_branch: repository.branch_exists?(branch) ? branch : project.default_branch
}
end
def update_dashboard_path
File.join(USER_DASHBOARDS_DIR, file_name)
end
def file_name def file_name
@file_name ||= begin @file_name ||= File.basename(CGI.unescape(params[:file_name]))
throw(:error, error(_('The file name should have a .yml extension'), :bad_request)) unless target_file_type_valid? end
File.basename(CGI.unescape(params[:file_name])) def branch
end @branch ||= params[:branch]
end end
# rubocop:enable Cop/BanCatchThrow
def update_dashboard_content def update_dashboard_content
::PerformanceMonitoring::PrometheusDashboard.from_json(params[:file_content]).to_yaml ::PerformanceMonitoring::PrometheusDashboard.from_json(params[:file_content]).to_yaml
...@@ -90,6 +95,15 @@ module Metrics ...@@ -90,6 +95,15 @@ module Metrics
def repository def repository
@repository ||= project.repository @repository ||= project.repository
end end
def dashboard_details
{
path: update_dashboard_path,
display_name: ::Metrics::Dashboard::ProjectDashboardService.name_for_path(update_dashboard_path),
default: false,
system_dashboard: false
}
end
end end
end end
end end
...@@ -2,8 +2,6 @@ ...@@ -2,8 +2,6 @@
module Projects module Projects
class ImportService < BaseService class ImportService < BaseService
include Gitlab::ShellAdapter
Error = Class.new(StandardError) Error = Class.new(StandardError)
# Returns true if this importer is supposed to perform its work in the # Returns true if this importer is supposed to perform its work in the
...@@ -72,9 +70,9 @@ module Projects ...@@ -72,9 +70,9 @@ module Projects
project.ensure_repository project.ensure_repository
project.repository.fetch_as_mirror(project.import_url, refmap: refmap) project.repository.fetch_as_mirror(project.import_url, refmap: refmap)
else else
gitlab_shell.import_project_repository(project) project.repository.import_repository(project.import_url)
end end
rescue Gitlab::Shell::Error => e rescue ::Gitlab::Git::CommandError => e
# Expire cache to prevent scenarios such as: # Expire cache to prevent scenarios such as:
# 1. First import failed, but the repo was imported successfully, so +exists?+ returns true # 1. First import failed, but the repo was imported successfully, so +exists?+ returns true
# 2. Retried import, repo is broken or not imported but +exists?+ still returns true # 2. Retried import, repo is broken or not imported but +exists?+ still returns true
......
...@@ -3,6 +3,11 @@ ...@@ -3,6 +3,11 @@
class SearchService class SearchService
include Gitlab::Allowable include Gitlab::Allowable
REDACTABLE_RESULTS = [
ActiveRecord::Relation,
Gitlab::Search::FoundBlob
].freeze
SEARCH_TERM_LIMIT = 64 SEARCH_TERM_LIMIT = 64
SEARCH_CHAR_LIMIT = 4096 SEARCH_CHAR_LIMIT = 4096
...@@ -60,11 +65,52 @@ class SearchService ...@@ -60,11 +65,52 @@ class SearchService
end end
def search_objects def search_objects
@search_objects ||= search_results.objects(scope, params[:page]) @search_objects ||= redact_unauthorized_results(search_results.objects(scope, params[:page]))
end
def redactable_results
REDACTABLE_RESULTS
end end
private private
def visible_result?(object)
return true unless object.respond_to?(:to_ability_name) && DeclarativePolicy.has_policy?(object)
Ability.allowed?(current_user, :"read_#{object.to_ability_name}", object)
end
def redact_unauthorized_results(results)
return results unless redactable_results.any? { |redactable| results.is_a?(redactable) }
permitted_results = results.select do |object|
visible_result?(object)
end
filtered_results = (results - permitted_results).each_with_object({}) do |object, memo|
memo[object.id] = { ability: :"read_#{object.to_ability_name}", id: object.id, class_name: object.class.name }
end
log_redacted_search_results(filtered_results.values) if filtered_results.any?
return results.id_not_in(filtered_results.keys) if results.is_a?(ActiveRecord::Relation)
Kaminari.paginate_array(
permitted_results,
total_count: results.total_count,
limit: results.limit_value,
offset: results.offset_value
)
end
def log_redacted_search_results(filtered_results)
logger.error(message: "redacted_search_results", filtered: filtered_results, current_user_id: current_user&.id, query: params[:search])
end
def logger
@logger ||= ::Gitlab::RedactedSearchResultsLogger.build
end
def search_service def search_service
@search_service ||= @search_service ||=
if project if project
......
...@@ -394,7 +394,7 @@ module ObjectStorage ...@@ -394,7 +394,7 @@ module ObjectStorage
def storage_for(store) def storage_for(store)
case store case store
when Store::REMOTE when Store::REMOTE
raise 'Object Storage is not enabled' unless self.class.object_store_enabled? raise "Object Storage is not enabled for #{self.class}" unless self.class.object_store_enabled?
CarrierWave::Storage::Fog.new(self) CarrierWave::Storage::Fog.new(self)
when Store::LOCAL when Store::LOCAL
......
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
- link_start = '<a href="%{url}">'.html_safe % { url: help_page_path('ci/variables/README', anchor: 'protected-variables') } - link_start = '<a href="%{url}">'.html_safe % { url: help_page_path('ci/variables/README', anchor: 'protected-variables') }
= s_('Environment variables are configured by your administrator to be %{link_start}protected%{link_end} by default').html_safe % { link_start: link_start, link_end: '</a>'.html_safe } = s_('Environment variables are configured by your administrator to be %{link_start}protected%{link_end} by default').html_safe % { link_start: link_start, link_end: '</a>'.html_safe }
- if Feature.enabled?(:new_variables_ui, @project || @group) - if Feature.enabled?(:new_variables_ui, @project || @group, default_enabled: true)
- is_group = !@group.nil? - is_group = !@group.nil?
#js-ci-project-variables{ data: { endpoint: save_endpoint, project_id: @project&.id || '', group: is_group.to_s, maskable_regex: ci_variable_maskable_regex} } #js-ci-project-variables{ data: { endpoint: save_endpoint, project_id: @project&.id || '', group: is_group.to_s, maskable_regex: ci_variable_maskable_regex} }
......
...@@ -866,8 +866,8 @@ ...@@ -866,8 +866,8 @@
:idempotent: :idempotent:
- :name: chat_notification - :name: chat_notification
:feature_category: :chatops :feature_category: :chatops
:has_external_dependencies: :has_external_dependencies: true
:urgency: :high :urgency: :low
:resource_boundary: :unknown :resource_boundary: :unknown
:weight: 2 :weight: 2
:idempotent: :idempotent:
......
...@@ -7,13 +7,9 @@ class ChatNotificationWorker # rubocop:disable Scalability/IdempotentWorker ...@@ -7,13 +7,9 @@ class ChatNotificationWorker # rubocop:disable Scalability/IdempotentWorker
sidekiq_options retry: false sidekiq_options retry: false
feature_category :chatops feature_category :chatops
urgency :high urgency :low # Can't be high as it has external dependencies
weight 2 weight 2
worker_has_external_dependencies!
# TODO: break this into multiple jobs
# as the `responder` uses external dependencies
# See https://gitlab.com/gitlab-com/gl-infra/scalability/issues/34
# worker_has_external_dependencies!
RESCHEDULE_INTERVAL = 2.seconds RESCHEDULE_INTERVAL = 2.seconds
RESCHEDULE_TIMEOUT = 5.minutes RESCHEDULE_TIMEOUT = 5.minutes
......
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
class RepositoryForkWorker # rubocop:disable Scalability/IdempotentWorker class RepositoryForkWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker include ApplicationWorker
include Gitlab::ShellAdapter
include ProjectStartImport include ProjectStartImport
include ProjectImportOptions include ProjectImportOptions
...@@ -27,18 +26,8 @@ class RepositoryForkWorker # rubocop:disable Scalability/IdempotentWorker ...@@ -27,18 +26,8 @@ class RepositoryForkWorker # rubocop:disable Scalability/IdempotentWorker
Gitlab::Metrics.add_event(:fork_repository) Gitlab::Metrics.add_event(:fork_repository)
result = gitlab_shell.fork_repository(source_project, target_project) gitaly_fork!(source_project, target_project)
link_lfs_objects(source_project, target_project)
if result
link_lfs_objects(source_project, target_project)
else
raise_fork_failure(
source_project,
target_project,
'Failed to create fork repository'
)
end
target_project.after_import target_project.after_import
end end
...@@ -49,6 +38,17 @@ class RepositoryForkWorker # rubocop:disable Scalability/IdempotentWorker ...@@ -49,6 +38,17 @@ class RepositoryForkWorker # rubocop:disable Scalability/IdempotentWorker
false false
end end
def gitaly_fork!(source_project, target_project)
source_repo = source_project.repository.raw
target_repo = target_project.repository.raw
::Gitlab::GitalyClient::RepositoryService.new(target_repo).fork_repository(source_repo)
rescue GRPC::BadStatus => e
Gitlab::ErrorTracking.track_exception(e, source_project_id: source_project.id, target_project_id: target_project.id)
raise_fork_failure(source_project, target_project, 'Failed to create fork repository')
end
def link_lfs_objects(source_project, target_project) def link_lfs_objects(source_project, target_project)
Projects::LfsPointers::LfsLinkService Projects::LfsPointers::LfsLinkService
.new(target_project) .new(target_project)
......
#!/bin/sh
set -e
cd $(dirname $0)/..
app_root=$(pwd)
puma_pidfile="$app_root/tmp/pids/puma_actioncable.pid"
puma_config="$app_root/config/puma_actioncable.rb"
spawn_puma()
{
exec bundle exec puma --config "${puma_config}" --environment "$RAILS_ENV" "$@"
}
get_puma_pid()
{
pid=$(cat "${puma_pidfile}")
if [ -z "$pid" ] ; then
echo "Could not find a PID in $puma_pidfile"
exit 1
fi
echo "${pid}"
}
start()
{
spawn_puma -d
}
start_foreground()
{
spawn_puma
}
stop()
{
get_puma_pid
kill -QUIT "$(get_puma_pid)"
}
reload()
{
kill -USR2 "$(get_puma_pid)"
}
case "$1" in
start)
start
;;
start_foreground)
start_foreground
;;
stop)
stop
;;
reload)
reload
;;
*)
echo "Usage: RAILS_ENV=your_env $0 {start|start_foreground|stop|reload}"
;;
esac
# frozen_string_literal: true
require ::File.expand_path('../../config/environment', __FILE__)
Rails.application.eager_load!
run ActionCable.server
---
title: Deemphasized styles for inline code blocks
merge_request:
author:
type: changed
---
title: Update UI for project and group settings CI variables
merge_request: 27411
author:
type: added
...@@ -8,6 +8,7 @@ require 'active_record/railtie' ...@@ -8,6 +8,7 @@ require 'active_record/railtie'
require 'action_controller/railtie' require 'action_controller/railtie'
require 'action_view/railtie' require 'action_view/railtie'
require 'action_mailer/railtie' require 'action_mailer/railtie'
require 'action_cable/engine'
require 'rails/test_unit/railtie' require 'rails/test_unit/railtie'
Bundler.require(*Rails.groups) Bundler.require(*Rails.groups)
......
# frozen_string_literal: true
Rails.application.configure do
# Prevents the default engine from being mounted because
# we're running ActionCable as a standalone server
config.action_cable.mount_path = nil
config.action_cable.url = Gitlab::Utils.append_path(Gitlab.config.gitlab.relative_url_root, '/-/cable')
end
# frozen_string_literal: true
# -----------------------------------------------------------------------
# This file is used by the GDK to generate a default config/puma_actioncable.rb file
# Note that `/home/git` will be substituted for the actual GDK root
# directory when this file is generated
# -----------------------------------------------------------------------
# Load "path" as a rackup file.
#
# The default is "cable/config.ru".
#
rackup 'cable/config.ru'
pidfile '/home/git/gitlab/tmp/pids/puma_actioncable.pid'
state_path '/home/git/gitlab/tmp/pids/puma_actioncable.state'
## Uncomment the lines if you would like to write puma stdout & stderr streams
## to a different location than rails logs.
## When using GitLab Development Kit, by default, these logs will be consumed
## by runit and can be accessed using `gdk tail rails-actioncable`
# stdout_redirect '/home/git/gitlab/log/puma_actioncable.stdout.log',
# '/home/git/gitlab/log/puma_actioncable.stderr.log',
# true
# Configure "min" to be the minimum number of threads to use to answer
# requests and "max" the maximum.
#
# The default is "0, 16".
#
threads 1, 4
# By default, workers accept all requests and queue them to pass to handlers.
# When false, workers accept the number of simultaneous requests configured.
#
# Queueing requests generally improves performance, but can cause deadlocks if
# the app is waiting on a request to itself. See https://github.com/puma/puma/issues/612
#
# When set to false this may require a reverse proxy to handle slow clients and
# queue requests before they reach puma. This is due to disabling HTTP keepalive
queue_requests false
# Bind the server to "url". "tcp://", "unix://" and "ssl://" are the only
# accepted protocols.
bind 'unix:///home/git/gitlab_actioncable.socket'
workers 2
require_relative "/home/git/gitlab/lib/gitlab/cluster/lifecycle_events"
on_restart do
# Signal application hooks that we're about to restart
Gitlab::Cluster::LifecycleEvents.do_before_master_restart
end
before_fork do
# Signal to the puma killer
Gitlab::Cluster::PumaWorkerKillerInitializer.start @config.options unless ENV['DISABLE_PUMA_WORKER_KILLER']
# Signal application hooks that we're about to fork
Gitlab::Cluster::LifecycleEvents.do_before_fork
end
Gitlab::Cluster::LifecycleEvents.set_puma_options @config.options
on_worker_boot do
# Signal application hooks of worker start
Gitlab::Cluster::LifecycleEvents.do_worker_start
end
# Preload the application before starting the workers; this conflicts with
# phased restart feature. (off by default)
preload_app!
tag 'gitlab-actioncable-puma-worker'
# Verifies that all workers have checked in to the master process within
# the given timeout. If not the worker process will be restarted. Default
# value is 60 seconds.
#
worker_timeout 60
# Use json formatter
require_relative "/home/git/gitlab/lib/gitlab/puma_logging/json_formatter"
json_formatter = Gitlab::PumaLogging::JSONFormatter.new
log_formatter do |str|
json_formatter.call(str)
end
...@@ -301,12 +301,13 @@ It is required to write tests for: ...@@ -301,12 +301,13 @@ It is required to write tests for:
- The background migration itself. - The background migration itself.
- A cleanup migration. - A cleanup migration.
You can use the `:migration` RSpec tag when testing the migrations. The `:migration` and `schema: :latest` RSpec tags are automatically set for
background migration specs.
See the See the
[Testing Rails migrations](testing_guide/testing_migrations_guide.md#testing-a-non-activerecordmigration-class) [Testing Rails migrations](testing_guide/testing_migrations_guide.md#testing-a-non-activerecordmigration-class)
style guide. style guide.
When you do that, keep in mind that `before` and `after` RSpec hooks are going Keep in mind that `before` and `after` RSpec hooks are going
to migrate you database down and up, which can result in other background to migrate you database down and up, which can result in other background
migrations being called. That means that using `spy` test doubles with migrations being called. That means that using `spy` test doubles with
`have_received` is encouraged, instead of using regular test doubles, because `have_received` is encouraged, instead of using regular test doubles, because
......
...@@ -158,7 +158,9 @@ end ...@@ -158,7 +158,9 @@ end
To test a non-`ActiveRecord::Migration` test (a background migration), To test a non-`ActiveRecord::Migration` test (a background migration),
you will need to manually provide a required schema version. Please add a you will need to manually provide a required schema version. Please add a
schema tag to a context that you want to switch the database schema within. `schema` tag to a context that you want to switch the database schema within.
If not set, `schema` defaults to `:latest`.
Example: Example:
......
# Air-gapped GitLab
Computers in an air-gapped network are isolated from the public internet as a security measure.
This page lists all the information available for running GitLab in an air-gapped environment.
## Features
Follow these best practices to use GitLab's features in an offline environment:
- [Operating the GitLab Secure scanners in an offline environment](../../user/application_security/offline_deployments/index.md).
...@@ -23,7 +23,7 @@ GitLab checks the Container Scanning report, compares the found vulnerabilities ...@@ -23,7 +23,7 @@ GitLab checks the Container Scanning report, compares the found vulnerabilities
between the source and target branches, and shows the information right on the between the source and target branches, and shows the information right on the
merge request. merge request.
![Container Scanning Widget](img/container_scanning.png) ![Container Scanning Widget](img/container_scanning_v12_9.png)
## Use cases ## Use cases
......
...@@ -35,12 +35,12 @@ NOTE: **Note:** ...@@ -35,12 +35,12 @@ NOTE: **Note:**
This comparison logic uses only the latest pipeline executed for the target branch's base commit. This comparison logic uses only the latest pipeline executed for the target branch's base commit.
Running the pipeline on any other commit has no effect on the merge request. Running the pipeline on any other commit has no effect on the merge request.
![DAST Widget](img/dast_all.png) ![DAST Widget](img/dast_all_v12_9.png)
By clicking on one of the detected linked vulnerabilities, you will be able to By clicking on one of the detected linked vulnerabilities, you will be able to
see the details and the URL(s) affected. see the details and the URL(s) affected.
![DAST Widget Clicked](img/dast_single.png) ![DAST Widget Clicked](img/dast_single_v12_9.png)
[Dynamic Application Security Testing (DAST)](https://en.wikipedia.org/wiki/Dynamic_Application_Security_Testing) [Dynamic Application Security Testing (DAST)](https://en.wikipedia.org/wiki/Dynamic_Application_Security_Testing)
is using the popular open source tool [OWASP ZAProxy](https://github.com/zaproxy/zaproxy) is using the popular open source tool [OWASP ZAProxy](https://github.com/zaproxy/zaproxy)
......
...@@ -25,7 +25,7 @@ that is provided by [Auto DevOps](../../../topics/autodevops/index.md). ...@@ -25,7 +25,7 @@ that is provided by [Auto DevOps](../../../topics/autodevops/index.md).
GitLab checks the SAST report, compares the found vulnerabilities between the GitLab checks the SAST report, compares the found vulnerabilities between the
source and target branches, and shows the information right on the merge request. source and target branches, and shows the information right on the merge request.
![SAST Widget](img/sast.png) ![SAST Widget](img/sast_v12_9.png)
The results are sorted by the priority of the vulnerability: The results are sorted by the priority of the vulnerability:
......
...@@ -3,8 +3,6 @@ ...@@ -3,8 +3,6 @@
module Gitlab module Gitlab
module BitbucketImport module BitbucketImport
class Importer class Importer
include Gitlab::ShellAdapter
LABELS = [{ title: 'bug', color: '#FF0000' }, LABELS = [{ title: 'bug', color: '#FF0000' },
{ title: 'enhancement', color: '#428BCA' }, { title: 'enhancement', color: '#428BCA' },
{ title: 'proposal', color: '#69D100' }, { title: 'proposal', color: '#69D100' },
...@@ -80,7 +78,7 @@ module Gitlab ...@@ -80,7 +78,7 @@ module Gitlab
wiki = WikiFormatter.new(project) wiki = WikiFormatter.new(project)
gitlab_shell.import_wiki_repository(project, wiki) project.wiki.repository.import_repository(wiki.import_url)
rescue StandardError => e rescue StandardError => e
errors << { type: :wiki, errors: e.message } errors << { type: :wiki, errors: e.message }
end end
......
...@@ -793,6 +793,14 @@ module Gitlab ...@@ -793,6 +793,14 @@ module Gitlab
end end
end end
def import_repository(url)
raise ArgumentError, "don't use disk paths with import_repository: #{url.inspect}" if url.start_with?('.', '/')
wrapped_gitaly_errors do
gitaly_repository_client.import_repository(url)
end
end
def blob_at(sha, path) def blob_at(sha, path)
Gitlab::Git::Blob.find(self, sha, path) unless Gitlab::Git.blank_ref?(sha) Gitlab::Git::Blob.find(self, sha, path) unless Gitlab::Git.blank_ref?(sha)
end end
......
...@@ -4,7 +4,6 @@ module Gitlab ...@@ -4,7 +4,6 @@ module Gitlab
module GithubImport module GithubImport
module Importer module Importer
class RepositoryImporter class RepositoryImporter
include Gitlab::ShellAdapter
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
attr_reader :project, :client, :wiki_formatter attr_reader :project, :client, :wiki_formatter
...@@ -65,10 +64,10 @@ module Gitlab ...@@ -65,10 +64,10 @@ module Gitlab
end end
def import_wiki_repository def import_wiki_repository
gitlab_shell.import_wiki_repository(project, wiki_formatter) project.wiki.repository.import_repository(wiki_formatter.import_url)
true true
rescue Gitlab::Shell::Error => e rescue ::Gitlab::Git::CommandError => e
if e.message !~ /repository not exported/ if e.message !~ /repository not exported/
project.create_wiki project.create_wiki
fail_import("Failed to import the wiki: #{e.message}") fail_import("Failed to import the wiki: #{e.message}")
......
...@@ -3,8 +3,6 @@ ...@@ -3,8 +3,6 @@
module Gitlab module Gitlab
module LegacyGithubImport module LegacyGithubImport
class Importer class Importer
include Gitlab::ShellAdapter
def self.refmap def self.refmap
Gitlab::GithubImport.refmap Gitlab::GithubImport.refmap
end end
...@@ -264,11 +262,11 @@ module Gitlab ...@@ -264,11 +262,11 @@ module Gitlab
end end
def import_wiki def import_wiki
unless project.wiki.repository_exists? return if project.wiki.repository_exists?
wiki = WikiFormatter.new(project)
gitlab_shell.import_wiki_repository(project, wiki) wiki = WikiFormatter.new(project)
end project.wiki.repository.import_repository(wiki.import_url)
rescue Gitlab::Shell::Error => e rescue ::Gitlab::Git::CommandError => e
# GitHub error message when the wiki repo has not been created, # GitHub error message when the wiki repo has not been created,
# this means that repo has wiki enabled, but have no pages. So, # this means that repo has wiki enabled, but have no pages. So,
# we can skip the import. # we can skip the import.
......
# frozen_string_literal: true
module Gitlab
class RedactedSearchResultsLogger < ::Gitlab::JsonLogger
def self.file_name_noext
'redacted_search_results'
end
end
end
# frozen_string_literal: true # frozen_string_literal: true
# Gitaly note: SSH key operations are not part of Gitaly so will never be migrated.
require 'securerandom' require 'securerandom'
module Gitlab module Gitlab
# This class is an artifact of a time when common repository operations were
# performed by calling out to scripts in the gitlab-shell project. Now, these
# operations are all performed by Gitaly, and are mostly accessible through
# the Repository class. Prefer using a Repository to functionality here.
#
# Legacy code relating to namespaces still relies on Gitlab::Shell; it can be
# converted to a module once https://gitlab.com/groups/gitlab-org/-/epics/2320
# is completed. https://gitlab.com/gitlab-org/gitlab/-/issues/25095 tracks it.
class Shell class Shell
Error = Class.new(StandardError) Error = Class.new(StandardError)
...@@ -77,47 +83,6 @@ module Gitlab ...@@ -77,47 +83,6 @@ module Gitlab
end end
end end
# Import wiki repository from external service
#
# @param [Project] project
# @param [Gitlab::LegacyGithubImport::WikiFormatter, Gitlab::BitbucketImport::WikiFormatter] wiki_formatter
# @return [Boolean] whether repository could be imported
def import_wiki_repository(project, wiki_formatter)
import_repository(project.repository_storage, wiki_formatter.disk_path, wiki_formatter.import_url, project.wiki.full_path)
end
# Import project repository from external service
#
# @param [Project] project
# @return [Boolean] whether repository could be imported
def import_project_repository(project)
import_repository(project.repository_storage, project.disk_path, project.import_url, project.full_path)
end
# Import repository
#
# @example Import a repository
# import_repository("nfs-file06", "gitlab/gitlab-ci", "https://gitlab.com/gitlab-org/gitlab-test.git", "gitlab/gitlab-ci")
#
# @param [String] storage project's storage name
# @param [String] disk_path project path on disk
# @param [String] url from external resource to import from
# @param [String] gl_project_path project name
# @return [Boolean] whether repository could be imported
def import_repository(storage, disk_path, url, gl_project_path)
if url.start_with?('.', '/')
raise Error.new("don't use disk paths with import_repository: #{url.inspect}")
end
relative_path = "#{disk_path}.git"
cmd = GitalyGitlabProjects.new(storage, relative_path, gl_project_path)
success = cmd.import_project(url, git_timeout)
raise Error, cmd.output unless success
success
end
# Move or rename a repository # Move or rename a repository
# #
# @example Move/rename a repository # @example Move/rename a repository
...@@ -127,6 +92,8 @@ module Gitlab ...@@ -127,6 +92,8 @@ module Gitlab
# @param [String] disk_path current project path on disk # @param [String] disk_path current project path on disk
# @param [String] new_disk_path new project path on disk # @param [String] new_disk_path new project path on disk
# @return [Boolean] whether repository could be moved/renamed on disk # @return [Boolean] whether repository could be moved/renamed on disk
#
# @deprecated
def mv_repository(storage, disk_path, new_disk_path) def mv_repository(storage, disk_path, new_disk_path)
return false if disk_path.empty? || new_disk_path.empty? return false if disk_path.empty? || new_disk_path.empty?
...@@ -139,17 +106,6 @@ module Gitlab ...@@ -139,17 +106,6 @@ module Gitlab
false false
end end
# Fork repository to new path
#
# @param [Project] source_project forked-from Project
# @param [Project] target_project forked-to Project
def fork_repository(source_project, target_project)
forked_from_relative_path = "#{source_project.disk_path}.git"
fork_args = [target_project.repository_storage, "#{target_project.disk_path}.git", target_project.full_path]
GitalyGitlabProjects.new(source_project.repository_storage, forked_from_relative_path, source_project.full_path).fork_repository(*fork_args)
end
# Removes a repository from file system, using rm_diretory which is an alias # Removes a repository from file system, using rm_diretory which is an alias
# for rm_namespace. Given the underlying implementation removes the name # for rm_namespace. Given the underlying implementation removes the name
# passed as second argument on the passed storage. # passed as second argument on the passed storage.
...@@ -159,6 +115,8 @@ module Gitlab ...@@ -159,6 +115,8 @@ module Gitlab
# #
# @param [String] storage project's storage path # @param [String] storage project's storage path
# @param [String] disk_path current project path on disk # @param [String] disk_path current project path on disk
#
# @deprecated
def remove_repository(storage, disk_path) def remove_repository(storage, disk_path)
return false if disk_path.empty? return false if disk_path.empty?
...@@ -179,6 +137,8 @@ module Gitlab ...@@ -179,6 +137,8 @@ module Gitlab
# #
# @param [String] storage project's storage path # @param [String] storage project's storage path
# @param [String] name namespace name # @param [String] name namespace name
#
# @deprecated
def add_namespace(storage, name) def add_namespace(storage, name)
Gitlab::GitalyClient.allow_n_plus_1_calls do Gitlab::GitalyClient.allow_n_plus_1_calls do
Gitlab::GitalyClient::NamespaceService.new(storage).add(name) Gitlab::GitalyClient::NamespaceService.new(storage).add(name)
...@@ -195,6 +155,8 @@ module Gitlab ...@@ -195,6 +155,8 @@ module Gitlab
# #
# @param [String] storage project's storage path # @param [String] storage project's storage path
# @param [String] name namespace name # @param [String] name namespace name
#
# @deprecated
def rm_namespace(storage, name) def rm_namespace(storage, name)
Gitlab::GitalyClient::NamespaceService.new(storage).remove(name) Gitlab::GitalyClient::NamespaceService.new(storage).remove(name)
rescue GRPC::InvalidArgument => e rescue GRPC::InvalidArgument => e
...@@ -210,6 +172,8 @@ module Gitlab ...@@ -210,6 +172,8 @@ module Gitlab
# @param [String] storage project's storage path # @param [String] storage project's storage path
# @param [String] old_name current namespace name # @param [String] old_name current namespace name
# @param [String] new_name new namespace name # @param [String] new_name new namespace name
#
# @deprecated
def mv_namespace(storage, old_name, new_name) def mv_namespace(storage, old_name, new_name)
Gitlab::GitalyClient::NamespaceService.new(storage).rename(old_name, new_name) Gitlab::GitalyClient::NamespaceService.new(storage).rename(old_name, new_name)
rescue GRPC::InvalidArgument => e rescue GRPC::InvalidArgument => e
...@@ -226,67 +190,12 @@ module Gitlab ...@@ -226,67 +190,12 @@ module Gitlab
# @return [Boolean] whether repository exists or not # @return [Boolean] whether repository exists or not
# @param [String] storage project's storage path # @param [String] storage project's storage path
# @param [Object] dir_name repository dir name # @param [Object] dir_name repository dir name
#
# @deprecated
def repository_exists?(storage, dir_name) def repository_exists?(storage, dir_name)
Gitlab::Git::Repository.new(storage, dir_name, nil, nil).exists? Gitlab::Git::Repository.new(storage, dir_name, nil, nil).exists?
rescue GRPC::Internal rescue GRPC::Internal
false false
end end
protected
def full_path(storage, dir_name)
raise ArgumentError.new("Directory name can't be blank") if dir_name.blank?
File.join(Gitlab.config.repositories.storages[storage].legacy_disk_path, dir_name)
end
private
def git_timeout
Gitlab.config.gitlab_shell.git_timeout
end
def wrapped_gitaly_errors
yield
rescue GRPC::NotFound, GRPC::BadStatus => e
# Old Popen code returns [Error, output] to the caller, so we
# need to do the same here...
raise Error, e
end
class GitalyGitlabProjects
attr_reader :shard_name, :repository_relative_path, :output, :gl_project_path
def initialize(shard_name, repository_relative_path, gl_project_path)
@shard_name = shard_name
@repository_relative_path = repository_relative_path
@output = ''
@gl_project_path = gl_project_path
end
def import_project(source, _timeout)
raw_repository = Gitlab::Git::Repository.new(shard_name, repository_relative_path, nil, gl_project_path)
Gitlab::GitalyClient::RepositoryService.new(raw_repository).import_repository(source)
true
rescue GRPC::BadStatus => e
@output = e.message
false
end
def fork_repository(new_shard_name, new_repository_relative_path, new_project_name)
target_repository = Gitlab::Git::Repository.new(new_shard_name, new_repository_relative_path, nil, new_project_name)
raw_repository = Gitlab::Git::Repository.new(shard_name, repository_relative_path, nil, gl_project_path)
Gitlab::GitalyClient::RepositoryService.new(target_repository).fork_repository(raw_repository)
rescue GRPC::BadStatus => e
logger.error "fork-repository failed: #{e.message}"
false
end
def logger
Rails.logger # rubocop:disable Gitlab/RailsLogger
end
end
end end
end end
...@@ -7,6 +7,8 @@ module Quality ...@@ -7,6 +7,8 @@ module Quality
TEST_LEVEL_FOLDERS = { TEST_LEVEL_FOLDERS = {
migration: %w[ migration: %w[
migrations migrations
],
background_migration: %w[
lib/gitlab/background_migration lib/gitlab/background_migration
lib/ee/gitlab/background_migration lib/ee/gitlab/background_migration
], ],
...@@ -70,7 +72,7 @@ module Quality ...@@ -70,7 +72,7 @@ module Quality
case file_path case file_path
# Detect migration first since some background migration tests are under # Detect migration first since some background migration tests are under
# spec/lib/gitlab/background_migration and tests under spec/lib are unit by default # spec/lib/gitlab/background_migration and tests under spec/lib are unit by default
when regexp(:migration) when regexp(:migration), regexp(:background_migration)
:migration :migration
when regexp(:unit) when regexp(:unit)
:unit :unit
...@@ -83,6 +85,10 @@ module Quality ...@@ -83,6 +85,10 @@ module Quality
end end
end end
def background_migration?(file_path)
!!(file_path =~ regexp(:background_migration))
end
private private
def folders_pattern(level) def folders_pattern(level)
......
...@@ -28,7 +28,7 @@ HOOK_DATA = <<~HOOK ...@@ -28,7 +28,7 @@ HOOK_DATA = <<~HOOK
if [ -e "$harness" ] if [ -e "$harness" ]
then then
if [["$url" != *"gitlab-org/security/"*]] if [[ "$url" != *"gitlab-org/security/"* ]]
then then
echo "Pushing to remotes other than gitlab.com/gitlab-org/security has been disabled!" echo "Pushing to remotes other than gitlab.com/gitlab-org/security has been disabled!"
echo "Run scripts/security-harness to disable this check." echo "Run scripts/security-harness to disable this check."
......
...@@ -634,7 +634,8 @@ describe Projects::PipelinesController do ...@@ -634,7 +634,8 @@ describe Projects::PipelinesController do
it 'does not persist a pipeline' do it 'does not persist a pipeline' do
expect { post_request }.not_to change { project.ci_pipelines.count } expect { post_request }.not_to change { project.ci_pipelines.count }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:bad_request)
expect(response).to render_template('new')
end end
end end
......
import Vuex from 'vuex'; import Vuex from 'vuex';
import { createLocalVue, shallowMount } from '@vue/test-utils'; import { createLocalVue, shallowMount } from '@vue/test-utils';
import { GlModal } from '@gitlab/ui'; import { GlButton } from '@gitlab/ui';
import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue'; import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
import createStore from '~/ci_variable_list/store'; import createStore from '~/ci_variable_list/store';
import mockData from '../services/mock_data'; import mockData from '../services/mock_data';
import ModalStub from '../stubs';
const localVue = createLocalVue(); const localVue = createLocalVue();
localVue.use(Vuex); localVue.use(Vuex);
...@@ -15,12 +16,23 @@ describe('Ci variable modal', () => { ...@@ -15,12 +16,23 @@ describe('Ci variable modal', () => {
const createComponent = () => { const createComponent = () => {
store = createStore(); store = createStore();
wrapper = shallowMount(CiVariableModal, { wrapper = shallowMount(CiVariableModal, {
stubs: {
GlModal: ModalStub,
},
localVue, localVue,
store, store,
}); });
}; };
const findModal = () => wrapper.find(GlModal); const findModal = () => wrapper.find(ModalStub);
const addOrUpdateButton = index =>
findModal()
.findAll(GlButton)
.at(index);
const deleteVariableButton = () =>
findModal()
.findAll(GlButton)
.at(1);
beforeEach(() => { beforeEach(() => {
createComponent(); createComponent();
...@@ -32,7 +44,7 @@ describe('Ci variable modal', () => { ...@@ -32,7 +44,7 @@ describe('Ci variable modal', () => {
}); });
it('button is disabled when no key/value pair are present', () => { it('button is disabled when no key/value pair are present', () => {
expect(findModal().props('actionPrimary').attributes.disabled).toBeTruthy(); expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
}); });
describe('Adding a new variable', () => { describe('Adding a new variable', () => {
...@@ -42,11 +54,11 @@ describe('Ci variable modal', () => { ...@@ -42,11 +54,11 @@ describe('Ci variable modal', () => {
}); });
it('button is enabled when key/value pair are present', () => { it('button is enabled when key/value pair are present', () => {
expect(findModal().props('actionPrimary').attributes.disabled).toBeFalsy(); expect(addOrUpdateButton(1).attributes('disabled')).toBeFalsy();
}); });
it('Add variable button dispatches addVariable action', () => { it('Add variable button dispatches addVariable action', () => {
findModal().vm.$emit('ok'); addOrUpdateButton(1).vm.$emit('click');
expect(store.dispatch).toHaveBeenCalledWith('addVariable'); expect(store.dispatch).toHaveBeenCalledWith('addVariable');
}); });
...@@ -63,11 +75,11 @@ describe('Ci variable modal', () => { ...@@ -63,11 +75,11 @@ describe('Ci variable modal', () => {
}); });
it('button text is Update variable when updating', () => { it('button text is Update variable when updating', () => {
expect(wrapper.vm.modalActionText).toBe('Update variable'); expect(addOrUpdateButton(2).text()).toBe('Update variable');
}); });
it('Update variable button dispatches updateVariable with correct variable', () => { it('Update variable button dispatches updateVariable with correct variable', () => {
findModal().vm.$emit('ok'); addOrUpdateButton(2).vm.$emit('click');
expect(store.dispatch).toHaveBeenCalledWith( expect(store.dispatch).toHaveBeenCalledWith(
'updateVariable', 'updateVariable',
store.state.variableBeingEdited, store.state.variableBeingEdited,
...@@ -80,7 +92,7 @@ describe('Ci variable modal', () => { ...@@ -80,7 +92,7 @@ describe('Ci variable modal', () => {
}); });
it('dispatches deleteVariable with correct variable to delete', () => { it('dispatches deleteVariable with correct variable to delete', () => {
findModal().vm.$emit('secondary'); deleteVariableButton().vm.$emit('click');
expect(store.dispatch).toHaveBeenCalledWith('deleteVariable', mockData.mockVariables[0]); expect(store.dispatch).toHaveBeenCalledWith('deleteVariable', mockData.mockVariables[0]);
}); });
}); });
......
const ModalStub = {
name: 'glmodal-stub',
template: `
<div>
<slot></slot>
<slot name="modal-footer"></slot>
</div>
`,
methods: {
hide: jest.fn(),
},
};
export default ModalStub;
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
require 'spec_helper' require 'spec_helper'
# rubocop:disable RSpec/FactoriesInMigrationSpecs # rubocop:disable RSpec/FactoriesInMigrationSpecs
describe Gitlab::BackgroundMigration::BackfillProjectRepositories, schema: :latest do describe Gitlab::BackgroundMigration::BackfillProjectRepositories do
let(:group) { create(:group, name: 'foo', path: 'foo') } let(:group) { create(:group, name: 'foo', path: 'foo') }
describe described_class::ShardFinder do describe described_class::ShardFinder do
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
# rubocop: disable RSpec/FactoriesInMigrationSpecs # rubocop: disable RSpec/FactoriesInMigrationSpecs
describe Gitlab::BackgroundMigration::LegacyUploadMover, schema: :latest do describe Gitlab::BackgroundMigration::LegacyUploadMover do
let(:test_dir) { FileUploader.options['storage_path'] } let(:test_dir) { FileUploader.options['storage_path'] }
let(:filename) { 'image.png' } let(:filename) { 'image.png' }
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
# rubocop: disable RSpec/FactoriesInMigrationSpecs # rubocop: disable RSpec/FactoriesInMigrationSpecs
describe Gitlab::BackgroundMigration::LegacyUploadsMigrator, schema: :latest do describe Gitlab::BackgroundMigration::LegacyUploadsMigrator do
let(:test_dir) { FileUploader.options['storage_path'] } let(:test_dir) { FileUploader.options['storage_path'] }
let!(:hashed_project) { create(:project) } let!(:hashed_project) { create(:project) }
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::BackgroundMigration::MergeRequestAssigneesMigrationProgressCheck, schema: :latest do describe Gitlab::BackgroundMigration::MergeRequestAssigneesMigrationProgressCheck do
context 'rescheduling' do context 'rescheduling' do
context 'when there are ongoing and no dead jobs' do context 'when there are ongoing and no dead jobs' do
it 'reschedules check' do it 'reschedules check' do
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::BackgroundMigration, schema: :latest do describe Gitlab::BackgroundMigration do
describe '.queue' do describe '.queue' do
it 'returns background migration worker queue' do it 'returns background migration worker queue' do
expect(described_class.queue) expect(described_class.queue)
......
...@@ -80,8 +80,7 @@ describe Gitlab::BitbucketImport::Importer do ...@@ -80,8 +80,7 @@ describe Gitlab::BitbucketImport::Importer do
end end
let(:importer) { described_class.new(project) } let(:importer) { described_class.new(project) }
let(:gitlab_shell) { double } let(:sample) { RepoHelpers.sample_compare }
let(:issues_statuses_sample_data) do let(:issues_statuses_sample_data) do
{ {
count: sample_issues_statuses.count, count: sample_issues_statuses.count,
...@@ -89,12 +88,6 @@ describe Gitlab::BitbucketImport::Importer do ...@@ -89,12 +88,6 @@ describe Gitlab::BitbucketImport::Importer do
} }
end end
let(:sample) { RepoHelpers.sample_compare }
before do
allow(importer).to receive(:gitlab_shell) { gitlab_shell }
end
subject { described_class.new(project) } subject { described_class.new(project) }
describe '#import_pull_requests' do describe '#import_pull_requests' do
...@@ -316,7 +309,7 @@ describe Gitlab::BitbucketImport::Importer do ...@@ -316,7 +309,7 @@ describe Gitlab::BitbucketImport::Importer do
describe 'wiki import' do describe 'wiki import' do
it 'is skipped when the wiki exists' do it 'is skipped when the wiki exists' do
expect(project.wiki).to receive(:repository_exists?) { true } expect(project.wiki).to receive(:repository_exists?) { true }
expect(importer.gitlab_shell).not_to receive(:import_wiki_repository) expect(project.wiki.repository).not_to receive(:import_repository)
importer.execute importer.execute
...@@ -325,7 +318,7 @@ describe Gitlab::BitbucketImport::Importer do ...@@ -325,7 +318,7 @@ describe Gitlab::BitbucketImport::Importer do
it 'imports to the project disk_path' do it 'imports to the project disk_path' do
expect(project.wiki).to receive(:repository_exists?) { false } expect(project.wiki).to receive(:repository_exists?) { false }
expect(importer.gitlab_shell).to receive(:import_wiki_repository) expect(project.wiki.repository).to receive(:import_repository)
importer.execute importer.execute
......
...@@ -2138,6 +2138,33 @@ describe Gitlab::Git::Repository, :seed_helper do ...@@ -2138,6 +2138,33 @@ describe Gitlab::Git::Repository, :seed_helper do
end end
end end
describe '#import_repository' do
let_it_be(:project) { create(:project) }
let(:repository) { project.repository }
let(:url) { 'http://invalid.invalid' }
it 'raises an error if a relative path is provided' do
expect { repository.import_repository('/foo') }.to raise_error(ArgumentError, /disk path/)
end
it 'raises an error if an absolute path is provided' do
expect { repository.import_repository('./foo') }.to raise_error(ArgumentError, /disk path/)
end
it 'delegates to Gitaly' do
expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |svc|
expect(svc).to receive(:import_repository).with(url).and_return(nil)
end
repository.import_repository(url)
end
it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RepositoryService, :import_repository do
subject { repository.import_repository('http://invalid.invalid') }
end
end
describe '#replicate' do describe '#replicate' do
let(:new_repository) do let(:new_repository) do
Gitlab::Git::Repository.new('test_second_storage', TEST_REPO_PATH, '', 'group/project') Gitlab::Git::Repository.new('test_second_storage', TEST_REPO_PATH, '', 'group/project')
......
...@@ -11,10 +11,15 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do ...@@ -11,10 +11,15 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do
double( double(
:wiki, :wiki,
disk_path: 'foo.wiki', disk_path: 'foo.wiki',
full_path: 'group/foo.wiki' full_path: 'group/foo.wiki',
repository: wiki_repository
) )
end end
let(:wiki_repository) do
double(:wiki_repository)
end
let(:project) do let(:project) do
double( double(
:project, :project,
...@@ -221,17 +226,19 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do ...@@ -221,17 +226,19 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do
describe '#import_wiki_repository' do describe '#import_wiki_repository' do
it 'imports the wiki repository' do it 'imports the wiki repository' do
expect(importer.gitlab_shell) expect(wiki_repository)
.to receive(:import_repository) .to receive(:import_repository)
.with('foo', 'foo.wiki', 'foo.wiki.git', 'group/foo.wiki') .with(importer.wiki_url)
.and_return(true)
expect(importer.import_wiki_repository).to eq(true) expect(importer.import_wiki_repository).to eq(true)
end end
it 'marks the import as failed and creates an empty repo if an error was raised' do it 'marks the import as failed and creates an empty repo if an error was raised' do
expect(importer.gitlab_shell) expect(wiki_repository)
.to receive(:import_repository) .to receive(:import_repository)
.and_raise(Gitlab::Shell::Error) .with(importer.wiki_url)
.and_raise(Gitlab::Git::CommandError)
expect(importer) expect(importer)
.to receive(:fail_import) .to receive(:fail_import)
......
...@@ -45,7 +45,7 @@ describe Gitlab::LegacyGithubImport::Importer do ...@@ -45,7 +45,7 @@ describe Gitlab::LegacyGithubImport::Importer do
allow(Rails).to receive(:cache).and_return(ActiveSupport::Cache::MemoryStore.new) allow(Rails).to receive(:cache).and_return(ActiveSupport::Cache::MemoryStore.new)
allow_any_instance_of(Octokit::Client).to receive(:rate_limit!).and_raise(Octokit::NotFound) allow_any_instance_of(Octokit::Client).to receive(:rate_limit!).and_raise(Octokit::NotFound)
allow_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_raise(Gitlab::Shell::Error) allow(project.wiki.repository).to receive(:import_repository).and_raise(Gitlab::Git::CommandError)
allow_any_instance_of(Octokit::Client).to receive(:user).and_return(octocat) allow_any_instance_of(Octokit::Client).to receive(:user).and_return(octocat)
allow_any_instance_of(Octokit::Client).to receive(:labels).and_return([label1, label2]) allow_any_instance_of(Octokit::Client).to receive(:labels).and_return([label1, label2])
...@@ -169,7 +169,7 @@ describe Gitlab::LegacyGithubImport::Importer do ...@@ -169,7 +169,7 @@ describe Gitlab::LegacyGithubImport::Importer do
errors: [ errors: [
{ type: :label, url: "#{api_root}/repos/octocat/Hello-World/labels/bug", errors: "Validation failed: Title can't be blank, Title is invalid" }, { type: :label, url: "#{api_root}/repos/octocat/Hello-World/labels/bug", errors: "Validation failed: Title can't be blank, Title is invalid" },
{ type: :issue, url: "#{api_root}/repos/octocat/Hello-World/issues/1348", errors: "Validation failed: Title can't be blank" }, { type: :issue, url: "#{api_root}/repos/octocat/Hello-World/issues/1348", errors: "Validation failed: Title can't be blank" },
{ type: :wiki, errors: "Gitlab::Shell::Error" } { type: :wiki, errors: "Gitlab::Git::CommandError" }
] ]
} }
......
...@@ -9,7 +9,6 @@ describe Gitlab::Shell do ...@@ -9,7 +9,6 @@ describe Gitlab::Shell do
let(:gitlab_shell) { described_class.new } let(:gitlab_shell) { described_class.new }
it { is_expected.to respond_to :remove_repository } it { is_expected.to respond_to :remove_repository }
it { is_expected.to respond_to :fork_repository }
describe '.url_to_repo' do describe '.url_to_repo' do
let(:full_path) { 'diaspora/disaspora-rails' } let(:full_path) { 'diaspora/disaspora-rails' }
...@@ -95,52 +94,6 @@ describe Gitlab::Shell do ...@@ -95,52 +94,6 @@ describe Gitlab::Shell do
expect(TestEnv.storage_dir_exists?(project2.repository_storage, "#{project2.disk_path}.git")).to be(true) expect(TestEnv.storage_dir_exists?(project2.repository_storage, "#{project2.disk_path}.git")).to be(true)
end end
end end
describe '#fork_repository' do
let(:target_project) { create(:project) }
subject do
gitlab_shell.fork_repository(project, target_project)
end
it 'returns true when the command succeeds' do
expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:fork_repository)
.with(repository.raw_repository) { :gitaly_response_object }
is_expected.to be_truthy
end
it 'return false when the command fails' do
expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:fork_repository)
.with(repository.raw_repository) { raise GRPC::BadStatus, 'bla' }
is_expected.to be_falsy
end
end
describe '#import_repository' do
let(:import_url) { 'https://gitlab.com/gitlab-org/gitlab-foss.git' }
context 'with gitaly' do
it 'returns true when the command succeeds' do
expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:import_repository).with(import_url)
result = gitlab_shell.import_repository(project.repository_storage, project.disk_path, import_url, project.full_path)
expect(result).to be_truthy
end
it 'raises an exception when the command fails' do
expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:import_repository)
.with(import_url) { raise GRPC::BadStatus, 'bla' }
expect_any_instance_of(Gitlab::Shell::GitalyGitlabProjects).to receive(:output) { 'error'}
expect do
gitlab_shell.import_repository(project.repository_storage, project.disk_path, import_url, project.full_path)
end.to raise_error(Gitlab::Shell::Error, "error")
end
end
end
end end
describe 'namespace actions' do describe 'namespace actions' do
......
...@@ -36,11 +36,13 @@ describe Gitlab::SidekiqCluster::CLI do ...@@ -36,11 +36,13 @@ describe Gitlab::SidekiqCluster::CLI do
end end
it 'allows the special * selector' do it 'allows the special * selector' do
worker_queues = %w(foo bar baz)
expect(Gitlab::SidekiqConfig::CliMethods)
.to receive(:worker_queues).and_return(worker_queues)
expect(Gitlab::SidekiqCluster) expect(Gitlab::SidekiqCluster)
.to receive(:start).with( .to receive(:start).with([worker_queues], default_options)
[Gitlab::SidekiqConfig::CliMethods.worker_queues],
default_options
)
cli.run(%w(*)) cli.run(%w(*))
end end
...@@ -157,15 +159,17 @@ describe Gitlab::SidekiqCluster::CLI do ...@@ -157,15 +159,17 @@ describe Gitlab::SidekiqCluster::CLI do
.with([['chat_notification'], ['project_export']], default_options) .with([['chat_notification'], ['project_export']], default_options)
.and_return([]) .and_return([])
cli.run(%w(--experimental-queue-selector feature_category=chatops&urgency=high resource_boundary=memory&feature_category=importers)) cli.run(%w(--experimental-queue-selector feature_category=chatops&has_external_dependencies=true resource_boundary=memory&feature_category=importers))
end end
it 'allows the special * selector' do it 'allows the special * selector' do
worker_queues = %w(foo bar baz)
expect(Gitlab::SidekiqConfig::CliMethods)
.to receive(:worker_queues).and_return(worker_queues)
expect(Gitlab::SidekiqCluster) expect(Gitlab::SidekiqCluster)
.to receive(:start).with( .to receive(:start).with([worker_queues], default_options)
[Gitlab::SidekiqConfig::CliMethods.worker_queues],
default_options
)
cli.run(%w(--experimental-queue-selector *)) cli.run(%w(--experimental-queue-selector *))
end end
......
...@@ -28,7 +28,14 @@ RSpec.describe Quality::TestLevel do ...@@ -28,7 +28,14 @@ RSpec.describe Quality::TestLevel do
context 'when level is migration' do context 'when level is migration' do
it 'returns a pattern' do it 'returns a pattern' do
expect(subject.pattern(:migration)) expect(subject.pattern(:migration))
.to eq("spec/{migrations,lib/gitlab/background_migration,lib/ee/gitlab/background_migration}{,/**/}*_spec.rb") .to eq("spec/{migrations}{,/**/}*_spec.rb")
end
end
context 'when level is background_migration' do
it 'returns a pattern' do
expect(subject.pattern(:background_migration))
.to eq("spec/{lib/gitlab/background_migration,lib/ee/gitlab/background_migration}{,/**/}*_spec.rb")
end end
end end
...@@ -89,7 +96,14 @@ RSpec.describe Quality::TestLevel do ...@@ -89,7 +96,14 @@ RSpec.describe Quality::TestLevel do
context 'when level is migration' do context 'when level is migration' do
it 'returns a regexp' do it 'returns a regexp' do
expect(subject.regexp(:migration)) expect(subject.regexp(:migration))
.to eq(%r{spec/(migrations|lib/gitlab/background_migration|lib/ee/gitlab/background_migration)}) .to eq(%r{spec/(migrations)})
end
end
context 'when level is background_migration' do
it 'returns a regexp' do
expect(subject.regexp(:background_migration))
.to eq(%r{spec/(lib/gitlab/background_migration|lib/ee/gitlab/background_migration)})
end end
end end
...@@ -160,4 +174,26 @@ RSpec.describe Quality::TestLevel do ...@@ -160,4 +174,26 @@ RSpec.describe Quality::TestLevel do
%r{Test level for spec/unknown/foo_spec.rb couldn't be set. Please rename the file properly or change the test level detection regexes in .+/lib/quality/test_level.rb.}) %r{Test level for spec/unknown/foo_spec.rb couldn't be set. Please rename the file properly or change the test level detection regexes in .+/lib/quality/test_level.rb.})
end end
end end
describe '#background_migration?' do
it 'returns false for a unit test' do
expect(subject.background_migration?('spec/models/abuse_report_spec.rb')).to be(false)
end
it 'returns true for a migration test' do
expect(subject.background_migration?('spec/migrations/add_default_and_free_plans_spec.rb')).to be(false)
end
it 'returns true for a background migration test' do
expect(subject.background_migration?('spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb')).to be(true)
end
it 'returns true for a geo migration test' do
expect(described_class.new('ee/').background_migration?('ee/spec/migrations/geo/migrate_ci_job_artifacts_to_separate_registry_spec.rb')).to be(false)
end
it 'returns true for a EE-namespaced background migration test' do
expect(described_class.new('ee/').background_migration?('ee/spec/lib/ee/gitlab/background_migration/prune_orphaned_geo_events_spec.rb')).to be(true)
end
end
end end
...@@ -330,4 +330,46 @@ describe Clusters::Applications::Prometheus do ...@@ -330,4 +330,46 @@ describe Clusters::Applications::Prometheus do
it { is_expected.to be_falsy } it { is_expected.to be_falsy }
end end
end end
describe 'alert manager token' do
subject { create(:clusters_applications_prometheus) }
context 'when not set' do
it 'is empty by default' do
expect(subject.alert_manager_token).to be_nil
expect(subject.encrypted_alert_manager_token).to be_nil
expect(subject.encrypted_alert_manager_token_iv).to be_nil
end
describe '#generate_alert_manager_token!' do
it 'generates a token' do
subject.generate_alert_manager_token!
expect(subject.alert_manager_token).to match(/\A\h{32}\z/)
end
end
end
context 'when set' do
let(:token) { SecureRandom.hex }
before do
subject.update!(alert_manager_token: token)
end
it 'reads the token' do
expect(subject.alert_manager_token).to eq(token)
expect(subject.encrypted_alert_manager_token).not_to be_nil
expect(subject.encrypted_alert_manager_token_iv).not_to be_nil
end
describe '#generate_alert_manager_token!' do
it 'does not re-generate the token' do
subject.generate_alert_manager_token!
expect(subject.alert_manager_token).to eq(token)
end
end
end
end
end end
...@@ -35,6 +35,18 @@ describe NotePolicy do ...@@ -35,6 +35,18 @@ describe NotePolicy do
end end
end end
context 'when the noteable is a deleted commit' do
let(:commit) { nil }
let(:note) { create(:note_on_commit, commit_id: '12345678', author: user, project: project) }
it 'allows to read' do
expect(policy).to be_allowed(:read_note)
expect(policy).to be_disallowed(:admin_note)
expect(policy).to be_disallowed(:resolve_note)
expect(policy).to be_disallowed(:award_emoji)
end
end
context 'when the noteable is a commit' do context 'when the noteable is a commit' do
let(:commit) { project.repository.head_commit } let(:commit) { project.repository.head_commit }
let(:note) { create(:note_on_commit, commit_id: commit.id, author: user, project: project) } let(:note) { create(:note_on_commit, commit_id: commit.id, author: user, project: project) }
......
...@@ -27,7 +27,14 @@ describe Metrics::Dashboard::UpdateDashboardService, :use_clean_rails_memory_sto ...@@ -27,7 +27,14 @@ describe Metrics::Dashboard::UpdateDashboardService, :use_clean_rails_memory_sto
end end
context 'user does not have push right to repository' do context 'user does not have push right to repository' do
it_behaves_like 'misconfigured dashboard service response', :forbidden, "You are not allowed to push into this branch. Create another branch or open a merge request." it 'returns an appropriate message and status code', :aggregate_failures do
result = service_call
expect(result.keys).to contain_exactly(:message, :http_status, :status, :last_step)
expect(result[:status]).to eq(:error)
expect(result[:http_status]).to eq(:forbidden)
expect(result[:message]).to eq("You are not allowed to push into this branch. Create another branch or open a merge request.")
end
end end
context 'with rights to push to the repository' do context 'with rights to push to the repository' do
...@@ -39,13 +46,27 @@ describe Metrics::Dashboard::UpdateDashboardService, :use_clean_rails_memory_sto ...@@ -39,13 +46,27 @@ describe Metrics::Dashboard::UpdateDashboardService, :use_clean_rails_memory_sto
context 'with a yml extension' do context 'with a yml extension' do
let(:file_name) { 'config/prometheus/../database.yml' } let(:file_name) { 'config/prometheus/../database.yml' }
it_behaves_like 'misconfigured dashboard service response', :bad_request, "A file with this name doesn't exist" it 'returns an appropriate message and status code', :aggregate_failures do
result = service_call
expect(result.keys).to contain_exactly(:message, :http_status, :status, :last_step)
expect(result[:status]).to eq(:error)
expect(result[:http_status]).to eq(:bad_request)
expect(result[:message]).to eq("A file with this name doesn't exist")
end
end end
context 'without a yml extension' do context 'without a yml extension' do
let(:file_name) { '../../..../etc/passwd' } let(:file_name) { '../../..../etc/passwd' }
it_behaves_like 'misconfigured dashboard service response', :bad_request, "The file name should have a .yml extension" it 'returns an appropriate message and status code', :aggregate_failures do
result = service_call
expect(result.keys).to contain_exactly(:message, :http_status, :status, :last_step)
expect(result[:status]).to eq(:error)
expect(result[:http_status]).to eq(:bad_request)
expect(result[:message]).to eq("The file name should have a .yml extension")
end
end end
end end
...@@ -60,7 +81,14 @@ describe Metrics::Dashboard::UpdateDashboardService, :use_clean_rails_memory_sto ...@@ -60,7 +81,14 @@ describe Metrics::Dashboard::UpdateDashboardService, :use_clean_rails_memory_sto
project.repository.add_branch(user, branch, 'master') project.repository.add_branch(user, branch, 'master')
end end
it_behaves_like 'misconfigured dashboard service response', :bad_request, "There was an error updating the dashboard, branch named: existing_branch already exists." it 'returns an appropriate message and status code', :aggregate_failures do
result = service_call
expect(result.keys).to contain_exactly(:message, :http_status, :status, :last_step)
expect(result[:status]).to eq(:error)
expect(result[:http_status]).to eq(:bad_request)
expect(result[:message]).to eq("There was an error updating the dashboard, branch named: existing_branch already exists.")
end
end end
context 'Files::UpdateService success' do context 'Files::UpdateService success' do
......
...@@ -122,7 +122,7 @@ describe Projects::ImportService do ...@@ -122,7 +122,7 @@ describe Projects::ImportService do
end end
it 'succeeds if repository import is successful' do it 'succeeds if repository import is successful' do
expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_return(true) expect(project.repository).to receive(:import_repository).and_return(true)
expect_any_instance_of(Gitlab::BitbucketImport::Importer).to receive(:execute).and_return(true) expect_any_instance_of(Gitlab::BitbucketImport::Importer).to receive(:execute).and_return(true)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute).and_return(status: :success) expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute).and_return(status: :success)
...@@ -132,7 +132,9 @@ describe Projects::ImportService do ...@@ -132,7 +132,9 @@ describe Projects::ImportService do
end end
it 'fails if repository import fails' do it 'fails if repository import fails' do
expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_raise(Gitlab::Shell::Error.new('Failed to import the repository /a/b/c')) expect(project.repository)
.to receive(:import_repository)
.and_raise(Gitlab::Git::CommandError, 'Failed to import the repository /a/b/c')
result = subject.execute result = subject.execute
...@@ -144,7 +146,7 @@ describe Projects::ImportService do ...@@ -144,7 +146,7 @@ describe Projects::ImportService do
it 'logs the error' do it 'logs the error' do
error_message = 'error message' error_message = 'error message'
expect_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_return(true) expect(project.repository).to receive(:import_repository).and_return(true)
expect_any_instance_of(Gitlab::BitbucketImport::Importer).to receive(:execute).and_return(true) expect_any_instance_of(Gitlab::BitbucketImport::Importer).to receive(:execute).and_return(true)
expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute).and_return(status: :error, message: error_message) expect_any_instance_of(Projects::LfsPointers::LfsImportService).to receive(:execute).and_return(status: :error, message: error_message)
expect(Gitlab::AppLogger).to receive(:error).with("The Lfs import process failed. #{error_message}") expect(Gitlab::AppLogger).to receive(:error).with("The Lfs import process failed. #{error_message}")
...@@ -155,7 +157,7 @@ describe Projects::ImportService do ...@@ -155,7 +157,7 @@ describe Projects::ImportService do
context 'when repository import scheduled' do context 'when repository import scheduled' do
before do before do
allow_any_instance_of(Gitlab::Shell).to receive(:import_repository).and_return(true) expect(project.repository).to receive(:import_repository).and_return(true)
allow(subject).to receive(:import_data) allow(subject).to receive(:import_data)
end end
......
...@@ -10,13 +10,16 @@ describe SearchService do ...@@ -10,13 +10,16 @@ describe SearchService do
let!(:group_member) { create(:group_member, group: accessible_group, user: user) } let!(:group_member) { create(:group_member, group: accessible_group, user: user) }
let!(:accessible_project) { create(:project, :private, name: 'accessible_project') } let!(:accessible_project) { create(:project, :private, name: 'accessible_project') }
let!(:inaccessible_project) { create(:project, :private, name: 'inaccessible_project') }
let(:note) { create(:note_on_issue, project: accessible_project) } let(:note) { create(:note_on_issue, project: accessible_project) }
let!(:inaccessible_project) { create(:project, :private, name: 'inaccessible_project') }
let(:snippet) { create(:snippet, author: user) } let(:snippet) { create(:snippet, author: user) }
let(:group_project) { create(:project, group: accessible_group, name: 'group_project') } let(:group_project) { create(:project, group: accessible_group, name: 'group_project') }
let(:public_project) { create(:project, :public, name: 'public_project') } let(:public_project) { create(:project, :public, name: 'public_project') }
subject(:search_service) { described_class.new(user, search: search, scope: scope, page: 1) }
before do before do
accessible_project.add_maintainer(user) accessible_project.add_maintainer(user)
end end
...@@ -293,5 +296,70 @@ describe SearchService do ...@@ -293,5 +296,70 @@ describe SearchService do
expect(search_objects.first).to eq public_project expect(search_objects.first).to eq public_project
end end
end end
context 'redacting search results' do
shared_examples 'it redacts incorrect results' do
before do
allow(Ability).to receive(:allowed?).and_return(allowed)
end
context 'when allowed' do
let(:allowed) { true }
it 'does nothing' do
expect(results).not_to be_empty
expect(results).to all(be_an(model_class))
end
end
context 'when disallowed' do
let(:allowed) { false }
it 'does nothing' do
expect(results).to be_empty
end
end
end
context 'issues' do
let(:issue) { create(:issue, project: accessible_project) }
let(:scope) { 'issues' }
let(:model_class) { Issue }
let(:ability) { :read_issue }
let(:search) { issue.title }
let(:results) { subject.search_objects }
it_behaves_like 'it redacts incorrect results'
end
context 'notes' do
let(:note) { create(:note_on_commit, project: accessible_project) }
let(:scope) { 'notes' }
let(:model_class) { Note }
let(:ability) { :read_note }
let(:search) { note.note }
let(:results) do
described_class.new(
user,
project_id: accessible_project.id,
scope: scope,
search: note.note
).search_objects
end
it_behaves_like 'it redacts incorrect results'
end
context 'merge_requests' do
let(:scope) { 'merge_requests' }
let(:model_class) { MergeRequest }
let(:ability) { :read_merge_request }
let(:merge_request) { create(:merge_request, source_project: accessible_project, author: user) }
let(:search) { merge_request.title }
let(:results) { subject.search_objects }
it_behaves_like 'it redacts incorrect results'
end
end
end end
end end
...@@ -81,6 +81,11 @@ RSpec.configure do |config| ...@@ -81,6 +81,11 @@ RSpec.configure do |config|
metadata[:migration] = true if metadata[:level] == :migration metadata[:migration] = true if metadata[:level] == :migration
end end
# Do not overwrite schema if it's already set
unless metadata.key?(:schema)
metadata[:schema] = :latest if quality_level.background_migration?(location)
end
# Do not overwrite type if it's already set # Do not overwrite type if it's already set
unless metadata.key?(:type) unless metadata.key?(:type)
match = location.match(%r{/spec/([^/]+)/}) match = location.match(%r{/spec/([^/]+)/})
......
...@@ -272,7 +272,7 @@ describe ObjectStorage do ...@@ -272,7 +272,7 @@ describe ObjectStorage do
end end
it "to raise an error" do it "to raise an error" do
expect { subject }.to raise_error(/Object Storage is not enabled/) expect { subject }.to raise_error(/Object Storage is not enabled for JobArtifactUploader/)
end end
end end
......
...@@ -13,7 +13,6 @@ describe RepositoryForkWorker do ...@@ -13,7 +13,6 @@ describe RepositoryForkWorker do
describe "#perform" do describe "#perform" do
let(:project) { create(:project, :public, :repository) } let(:project) { create(:project, :public, :repository) }
let(:shell) { Gitlab::Shell.new }
let(:forked_project) { create(:project, :repository, :import_scheduled) } let(:forked_project) { create(:project, :repository, :import_scheduled) }
before do before do
...@@ -21,12 +20,17 @@ describe RepositoryForkWorker do ...@@ -21,12 +20,17 @@ describe RepositoryForkWorker do
end end
shared_examples 'RepositoryForkWorker performing' do shared_examples 'RepositoryForkWorker performing' do
before do def expect_fork_repository(success:)
allow(subject).to receive(:gitlab_shell).and_return(shell) allow(::Gitlab::GitalyClient::RepositoryService).to receive(:new).and_call_original
end expect_next_instance_of(::Gitlab::GitalyClient::RepositoryService, forked_project.repository.raw) do |svc|
exp = expect(svc).to receive(:fork_repository).with(project.repository.raw)
def expect_fork_repository
expect(shell).to receive(:fork_repository).with(project, forked_project) if success
exp.and_return(true)
else
exp.and_raise(GRPC::BadStatus, 'Fork failed in tests')
end
end
end end
describe 'when a worker was reset without cleanup' do describe 'when a worker was reset without cleanup' do
...@@ -35,20 +39,20 @@ describe RepositoryForkWorker do ...@@ -35,20 +39,20 @@ describe RepositoryForkWorker do
it 'creates a new repository from a fork' do it 'creates a new repository from a fork' do
allow(subject).to receive(:jid).and_return(jid) allow(subject).to receive(:jid).and_return(jid)
expect_fork_repository.and_return(true) expect_fork_repository(success: true)
perform! perform!
end end
end end
it "creates a new repository from a fork" do it "creates a new repository from a fork" do
expect_fork_repository.and_return(true) expect_fork_repository(success: true)
perform! perform!
end end
it 'protects the default branch' do it 'protects the default branch' do
expect_fork_repository.and_return(true) expect_fork_repository(success: true)
perform! perform!
...@@ -56,7 +60,7 @@ describe RepositoryForkWorker do ...@@ -56,7 +60,7 @@ describe RepositoryForkWorker do
end end
it 'flushes various caches' do it 'flushes various caches' do
expect_fork_repository.and_return(true) expect_fork_repository(success: true)
# Works around https://github.com/rspec/rspec-mocks/issues/910 # Works around https://github.com/rspec/rspec-mocks/issues/910
expect(Project).to receive(:find).with(forked_project.id).and_return(forked_project) expect(Project).to receive(:find).with(forked_project.id).and_return(forked_project)
...@@ -75,13 +79,13 @@ describe RepositoryForkWorker do ...@@ -75,13 +79,13 @@ describe RepositoryForkWorker do
it 'handles bad fork' do it 'handles bad fork' do
error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Failed to create fork repository" error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Failed to create fork repository"
expect_fork_repository.and_return(false) expect_fork_repository(success: false)
expect { perform! }.to raise_error(StandardError, error_message) expect { perform! }.to raise_error(StandardError, error_message)
end end
it 'calls Projects::LfsPointers::LfsLinkService#execute with OIDs of source project LFS objects' do it 'calls Projects::LfsPointers::LfsLinkService#execute with OIDs of source project LFS objects' do
expect_fork_repository.and_return(true) expect_fork_repository(success: true)
expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service| expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
expect(service).to receive(:execute).with(project.all_lfs_objects_oids) expect(service).to receive(:execute).with(project.all_lfs_objects_oids)
end end
...@@ -92,7 +96,7 @@ describe RepositoryForkWorker do ...@@ -92,7 +96,7 @@ describe RepositoryForkWorker do
it "handles LFS objects link failure" do it "handles LFS objects link failure" do
error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Source project has too many LFS objects" error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Source project has too many LFS objects"
expect_fork_repository.and_return(true) expect_fork_repository(success: true)
expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service| expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
expect(service).to receive(:execute).and_raise(Projects::LfsPointers::LfsLinkService::TooManyOidsError) expect(service).to receive(:execute).and_raise(Projects::LfsPointers::LfsLinkService::TooManyOidsError)
end end
......
...@@ -796,15 +796,15 @@ ...@@ -796,15 +796,15 @@
dependencies: dependencies:
vue-eslint-parser "^7.0.0" vue-eslint-parser "^7.0.0"
"@gitlab/svgs@^1.111.0": "@gitlab/svgs@^1.113.0":
version "1.111.0" version "1.113.0"
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.111.0.tgz#10c0ba2fef9351a4d5e1f939994e39ebe5cba909" resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.113.0.tgz#0ea9cb3122a479f3ed4bb22943d68a9a38f69948"
integrity sha512-r+PMe4o7F9HAof+J9eF4aR/J068ZywAQRHBA+xU8TdyqX9gtDdBsvHBiMT65s8gr6MgLhduc3N4YlPwZua2QNQ== integrity sha512-upT+sKEnnwZDU7vzI5VSyg2l6IOd/0Icm/MLae6po/nOGbf2vftkUVfbalzISAmk9eYTuJQ1sGmRWdKXPGy1cw==
"@gitlab/ui@^9.28.0": "@gitlab/ui@^9.29.0":
version "9.28.0" version "9.29.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-9.28.0.tgz#fe307f7fdd9dc203078efb6f4d0590e0b1cb9053" resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-9.29.0.tgz#2308aec1d3d837392eaaf9d441aecec4b695ed73"
integrity sha512-de2zApLY5dD7YhguLAb/6mDROFMoA0zQ1euiGhBoVbaUwzNwSP1gremE186uPd6uGs9ZKfSp3on/qQYhXrNy9w== integrity sha512-zIY/aChWaU4UBlAjZ95PpBxgUd9VrGiXs9ujVU6Gbi+ZsHbpDvPlBHsHEG9isnUVBE2AD4GPqMLO8K9i+B9O4A==
dependencies: dependencies:
"@babel/standalone" "^7.0.0" "@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0" "@gitlab/vue-toasted" "^1.3.0"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment