Commit bffcdf9b authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 259c0cc0
...@@ -11,18 +11,19 @@ import { ...@@ -11,18 +11,19 @@ import {
GlModalDirective, GlModalDirective,
GlTooltipDirective, GlTooltipDirective,
} from '@gitlab/ui'; } from '@gitlab/ui';
import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
import { s__ } from '~/locale'; import { s__ } from '~/locale';
import createFlash from '~/flash'; import createFlash from '~/flash';
import Icon from '~/vue_shared/components/icon.vue'; import Icon from '~/vue_shared/components/icon.vue';
import { getParameterValues, mergeUrlParams, redirectTo } from '~/lib/utils/url_utility'; import { getParameterValues, mergeUrlParams, redirectTo } from '~/lib/utils/url_utility';
import invalidUrl from '~/lib/utils/invalid_url'; import invalidUrl from '~/lib/utils/invalid_url';
import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
import DateTimePicker from './date_time_picker/date_time_picker.vue'; import DateTimePicker from './date_time_picker/date_time_picker.vue';
import MonitorTimeSeriesChart from './charts/time_series.vue'; import MonitorTimeSeriesChart from './charts/time_series.vue';
import MonitorSingleStatChart from './charts/single_stat.vue'; import MonitorSingleStatChart from './charts/single_stat.vue';
import GraphGroup from './graph_group.vue'; import GraphGroup from './graph_group.vue';
import EmptyState from './empty_state.vue'; import EmptyState from './empty_state.vue';
import { getTimeDiff, isValidDate } from '../utils'; import TrackEventDirective from '~/vue_shared/directives/track_event';
import { getTimeDiff, isValidDate, getAddMetricTrackingOptions } from '../utils';
export default { export default {
components: { components: {
...@@ -43,6 +44,7 @@ export default { ...@@ -43,6 +44,7 @@ export default {
directives: { directives: {
GlModal: GlModalDirective, GlModal: GlModalDirective,
GlTooltip: GlTooltipDirective, GlTooltip: GlTooltipDirective,
TrackEvent: TrackEventDirective,
}, },
props: { props: {
externalDashboardUrl: { externalDashboardUrl: {
...@@ -298,6 +300,7 @@ export default { ...@@ -298,6 +300,7 @@ export default {
onDateTimePickerApply(timeWindowUrlParams) { onDateTimePickerApply(timeWindowUrlParams) {
return redirectTo(mergeUrlParams(timeWindowUrlParams, window.location.href)); return redirectTo(mergeUrlParams(timeWindowUrlParams, window.location.href));
}, },
getAddMetricTrackingOptions,
}, },
addMetric: { addMetric: {
title: s__('Metrics|Add metric'), title: s__('Metrics|Add metric'),
...@@ -389,9 +392,10 @@ export default { ...@@ -389,9 +392,10 @@ export default {
</gl-button> </gl-button>
<gl-button <gl-button
v-if="addingMetricsAvailable" v-if="addingMetricsAvailable"
ref="addMetricBtn"
v-gl-modal="$options.addMetric.modalId" v-gl-modal="$options.addMetric.modalId"
variant="outline-success" variant="outline-success"
class="mr-2 mt-1 js-add-metric-button" class="mr-2 mt-1"
> >
{{ $options.addMetric.title }} {{ $options.addMetric.title }}
</gl-button> </gl-button>
...@@ -411,6 +415,8 @@ export default { ...@@ -411,6 +415,8 @@ export default {
<div slot="modal-footer"> <div slot="modal-footer">
<gl-button @click="hideAddMetricModal">{{ __('Cancel') }}</gl-button> <gl-button @click="hideAddMetricModal">{{ __('Cancel') }}</gl-button>
<gl-button <gl-button
ref="submitCustomMetricsFormBtn"
v-track-event="getAddMetricTrackingOptions()"
:disabled="!formIsValid" :disabled="!formIsValid"
variant="success" variant="success"
@click="submitCustomMetricsForm" @click="submitCustomMetricsForm"
......
...@@ -115,6 +115,7 @@ export const generateLinkToChartOptions = chartLink => { ...@@ -115,6 +115,7 @@ export const generateLinkToChartOptions = chartLink => {
/** /**
* Tracks snowplow event when user downloads CSV of cluster metric * Tracks snowplow event when user downloads CSV of cluster metric
* @param {String} chart title that will be sent as a property for the event * @param {String} chart title that will be sent as a property for the event
* @return {Object} config object for event tracking
*/ */
export const downloadCSVOptions = title => { export const downloadCSVOptions = title => {
const isCLusterHealthBoard = isClusterHealthBoard(); const isCLusterHealthBoard = isClusterHealthBoard();
...@@ -129,6 +130,18 @@ export const downloadCSVOptions = title => { ...@@ -129,6 +130,18 @@ export const downloadCSVOptions = title => {
return { category, action, label: 'Chart title', property: title }; return { category, action, label: 'Chart title', property: title };
}; };
/**
* Generate options for snowplow to track adding a new metric via the dashboard
* custom metric modal
* @return {Object} config object for event tracking
*/
export const getAddMetricTrackingOptions = () => ({
category: document.body.dataset.page,
action: 'click_button',
label: 'add_new_metric',
property: 'modal',
});
/** /**
* This function validates the graph data contains exactly 3 metrics plus * This function validates the graph data contains exactly 3 metrics plus
* value validations from graphDataValidatorForValues. * value validations from graphDataValidatorForValues.
......
...@@ -15,7 +15,6 @@ ...@@ -15,7 +15,6 @@
} }
.tree-controls { .tree-controls {
display: flex;
text-align: right; text-align: right;
.btn { .btn {
......
...@@ -55,6 +55,7 @@ class Projects::ErrorTrackingController < Projects::ApplicationController ...@@ -55,6 +55,7 @@ class Projects::ErrorTrackingController < Projects::ApplicationController
render json: { render json: {
errors: serialize_errors(result[:issues]), errors: serialize_errors(result[:issues]),
pagination: result[:pagination],
external_url: service.external_url external_url: service.external_url
} }
end end
...@@ -111,7 +112,7 @@ class Projects::ErrorTrackingController < Projects::ApplicationController ...@@ -111,7 +112,7 @@ class Projects::ErrorTrackingController < Projects::ApplicationController
end end
def list_issues_params def list_issues_params
params.permit([:search_term, :sort]) params.permit(:search_term, :sort, :cursor)
end end
def list_projects_params def list_projects_params
......
...@@ -301,7 +301,8 @@ module ApplicationSettingsHelper ...@@ -301,7 +301,8 @@ module ApplicationSettingsHelper
:snowplow_iglu_registry_url, :snowplow_iglu_registry_url,
:push_event_hooks_limit, :push_event_hooks_limit,
:push_event_activities_limit, :push_event_activities_limit,
:custom_http_clone_url_root :custom_http_clone_url_root,
:snippet_size_limit
] ]
end end
......
...@@ -226,6 +226,8 @@ class ApplicationSetting < ApplicationRecord ...@@ -226,6 +226,8 @@ class ApplicationSetting < ApplicationRecord
validates :push_event_activities_limit, validates :push_event_activities_limit,
numericality: { greater_than_or_equal_to: 0 } numericality: { greater_than_or_equal_to: 0 }
validates :snippet_size_limit, numericality: { only_integer: true, greater_than: 0 }
SUPPORTED_KEY_TYPES.each do |type| SUPPORTED_KEY_TYPES.each do |type|
validates :"#{type}_key_restriction", presence: true, key_restriction: { type: type } validates :"#{type}_key_restriction", presence: true, key_restriction: { type: type }
end end
......
...@@ -26,7 +26,8 @@ module ApplicationSettingImplementation ...@@ -26,7 +26,8 @@ module ApplicationSettingImplementation
'/users', '/users',
'/users/confirmation', '/users/confirmation',
'/unsubscribes/', '/unsubscribes/',
'/import/github/personal_access_token' '/import/github/personal_access_token',
'/admin/session'
].freeze ].freeze
class_methods do class_methods do
...@@ -139,7 +140,8 @@ module ApplicationSettingImplementation ...@@ -139,7 +140,8 @@ module ApplicationSettingImplementation
snowplow_app_id: nil, snowplow_app_id: nil,
snowplow_iglu_registry_url: nil, snowplow_iglu_registry_url: nil,
custom_http_clone_url_root: nil, custom_http_clone_url_root: nil,
productivity_analytics_start_date: Time.now productivity_analytics_start_date: Time.now,
snippet_size_limit: 50.megabytes
} }
end end
......
...@@ -104,7 +104,7 @@ module ErrorTracking ...@@ -104,7 +104,7 @@ module ErrorTracking
def calculate_reactive_cache(request, opts) def calculate_reactive_cache(request, opts)
case request case request
when 'list_issues' when 'list_issues'
{ issues: sentry_client.list_issues(**opts.symbolize_keys) } sentry_client.list_issues(**opts.symbolize_keys)
when 'issue_details' when 'issue_details'
{ {
issue: sentry_client.issue_details(**opts.symbolize_keys) issue: sentry_client.issue_details(**opts.symbolize_keys)
......
...@@ -170,6 +170,7 @@ class Project < ApplicationRecord ...@@ -170,6 +170,7 @@ class Project < ApplicationRecord
has_one :microsoft_teams_service has_one :microsoft_teams_service
has_one :packagist_service has_one :packagist_service
has_one :hangouts_chat_service has_one :hangouts_chat_service
has_one :unify_circuit_service
has_one :root_of_fork_network, has_one :root_of_fork_network,
foreign_key: 'root_project_id', foreign_key: 'root_project_id',
......
# frozen_string_literal: true
class UnifyCircuitService < ChatNotificationService
def title
'Unify Circuit'
end
def description
'Receive event notifications in Unify Circuit'
end
def self.to_param
'unify_circuit'
end
def help
'This service sends notifications about projects events to a Unify Circuit conversation.<br />
To set up this service:
<ol>
<li><a href="https://www.circuit.com/unifyportalfaqdetail?articleId=164448">Set up an incoming webhook for your conversation</a>. All notifications will come to this conversation.</li>
<li>Paste the <strong>Webhook URL</strong> into the field below.</li>
<li>Select events below to enable notifications.</li>
</ol>'
end
def event_field(event)
end
def default_channel_placeholder
end
def self.supported_events
%w[push issue confidential_issue merge_request note confidential_note tag_push
pipeline wiki_page]
end
def default_fields
[
{ type: 'text', name: 'webhook', placeholder: "e.g. https://circuit.com/rest/v2/webhooks/incoming/…", required: true },
{ type: 'checkbox', name: 'notify_only_broken_pipelines' },
{ type: 'select', name: 'branches_to_be_notified', choices: BRANCH_CHOICES }
]
end
private
def notify(message, opts)
response = Gitlab::HTTP.post(webhook, body: {
subject: message.project_name,
text: message.pretext,
markdown: true
}.to_json)
response if response.success?
end
def custom_data(data)
super(data).merge(markdown: true)
end
end
...@@ -289,6 +289,7 @@ class Service < ApplicationRecord ...@@ -289,6 +289,7 @@ class Service < ApplicationRecord
slack slack
teamcity teamcity
microsoft_teams microsoft_teams
unify_circuit
] ]
if Rails.env.development? if Rails.env.development?
......
...@@ -46,6 +46,18 @@ class Snippet < ApplicationRecord ...@@ -46,6 +46,18 @@ class Snippet < ApplicationRecord
length: { maximum: 255 } length: { maximum: 255 }
validates :content, presence: true validates :content, presence: true
validates :content,
length: {
maximum: ->(_) { Gitlab::CurrentSettings.snippet_size_limit },
message: -> (_, data) do
current_value = ActiveSupport::NumberHelper.number_to_human_size(data[:value].size)
max_size = ActiveSupport::NumberHelper.number_to_human_size(Gitlab::CurrentSettings.snippet_size_limit)
_("is too long (%{current_value}). The maximum size is %{max_size}.") % { current_value: current_value, max_size: max_size }
end
},
if: :content_changed?
validates :visibility_level, inclusion: { in: Gitlab::VisibilityLevel.values } validates :visibility_level, inclusion: { in: Gitlab::VisibilityLevel.values }
# Scopes # Scopes
......
...@@ -6,37 +6,24 @@ module ErrorTracking ...@@ -6,37 +6,24 @@ module ErrorTracking
DEFAULT_LIMIT = 20 DEFAULT_LIMIT = 20
DEFAULT_SORT = 'last_seen' DEFAULT_SORT = 'last_seen'
def execute
return error('Error Tracking is not enabled') unless enabled?
return error('Access denied', :unauthorized) unless can_read?
result = project_error_tracking_setting.list_sentry_issues(
issue_status: issue_status,
limit: limit,
search_term: search_term,
sort: sort
)
# our results are not yet ready
unless result
return error('Not ready. Try again later', :no_content)
end
if result[:error].present?
return error(result[:error], http_status_for(result[:error_type]))
end
success(issues: result[:issues])
end
def external_url def external_url
project_error_tracking_setting&.sentry_external_url project_error_tracking_setting&.sentry_external_url
end end
private private
def fetch
project_error_tracking_setting.list_sentry_issues(
issue_status: issue_status,
limit: limit,
search_term: params[:search_term].presence,
sort: sort,
cursor: params[:cursor].presence
)
end
def parse_response(response) def parse_response(response)
{ issues: response[:issues] } response.slice(:issues, :pagination)
end end
def issue_status def issue_status
...@@ -47,18 +34,6 @@ module ErrorTracking ...@@ -47,18 +34,6 @@ module ErrorTracking
params[:limit] || DEFAULT_LIMIT params[:limit] || DEFAULT_LIMIT
end end
def search_term
params[:search_term].presence
end
def enabled?
project_error_tracking_setting&.enabled?
end
def can_read?
can?(current_user, :read_sentry_issue, project)
end
def sort def sort
params[:sort] || DEFAULT_SORT params[:sort] || DEFAULT_SORT
end end
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
%li.breadcrumb-item %li.breadcrumb-item
= link_to truncate(title, length: 40), browse_project_job_artifacts_path(@project, @build, path) = link_to truncate(title, length: 40), browse_project_job_artifacts_path(@project, @build, path)
.tree-controls .tree-controls<
= link_to download_project_job_artifacts_path(@project, @build), = link_to download_project_job_artifacts_path(@project, @build),
rel: 'nofollow', download: '', class: 'btn btn-default download' do rel: 'nofollow', download: '', class: 'btn btn-default download' do
= sprite_icon('download') = sprite_icon('download')
......
...@@ -17,21 +17,19 @@ ...@@ -17,21 +17,19 @@
- else - else
= link_to title, project_tree_path(@project, tree_join(@ref, path)) = link_to title, project_tree_path(@project, tree_join(@ref, path))
.tree-controls .tree-controls<
= render 'projects/find_file_link' = render 'projects/find_file_link'
-# only show normal/blame view links for text files
- if blob.readable_text?
- if blame
= link_to 'Normal view', project_blob_path(@project, @id),
class: 'btn'
- else
= link_to 'Blame', project_blame_path(@project, @id),
class: 'btn js-blob-blame-link' unless blob.empty?
.btn-group{ role: "group" }< = link_to 'History', project_commits_path(@project, @id),
-# only show normal/blame view links for text files class: 'btn'
- if blob.readable_text?
- if blame
= link_to 'Normal view', project_blob_path(@project, @id),
class: 'btn'
- else
= link_to 'Blame', project_blame_path(@project, @id),
class: 'btn js-blob-blame-link' unless blob.empty?
= link_to 'History', project_commits_path(@project, @id), = link_to 'Permalink', project_blob_path(@project,
class: 'btn' tree_join(@commit.sha, @path)), class: 'btn js-data-file-blob-permalink-url'
= link_to 'Permalink', project_blob_path(@project,
tree_join(@commit.sha, @path)), class: 'btn js-data-file-blob-permalink-url'
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
- show_menu = can_create_issue || can_create_project_snippet || can_push_code || create_mr_from_new_fork || merge_project - show_menu = can_create_issue || can_create_project_snippet || can_push_code || create_mr_from_new_fork || merge_project
- if show_menu - if show_menu
.project-action-button.dropdown.inline .project-action-button.dropdown.inline<
%a.btn.dropdown-toggle.has-tooltip.qa-create-new-dropdown{ href: '#', title: _('Create new...'), 'data-toggle' => 'dropdown', 'data-container' => 'body', 'aria-label' => _('Create new...'), 'data-display' => 'static' } %a.btn.dropdown-toggle.has-tooltip.qa-create-new-dropdown{ href: '#', title: _('Create new...'), 'data-toggle' => 'dropdown', 'data-container' => 'body', 'aria-label' => _('Create new...'), 'data-display' => 'static' }
= icon('plus') = icon('plus')
= icon("caret-down") = icon("caret-down")
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
%ul.breadcrumb.repo-breadcrumb %ul.breadcrumb.repo-breadcrumb
= commits_breadcrumbs = commits_breadcrumbs
.tree-controls.d-none.d-sm-none.d-md-block .tree-controls.d-none.d-sm-none.d-md-block<
- if @merge_request.present? - if @merge_request.present?
.control .control
= link_to _("View open merge request"), project_merge_request_path(@project, @merge_request), class: 'btn' = link_to _("View open merge request"), project_merge_request_path(@project, @merge_request), class: 'btn'
......
...@@ -75,7 +75,7 @@ ...@@ -75,7 +75,7 @@
= link_to new_project_tag_path(@project) do = link_to new_project_tag_path(@project) do
#{ _('New tag') } #{ _('New tag') }
.tree-controls .tree-controls<
= render_if_exists 'projects/tree/lock_link' = render_if_exists 'projects/tree/lock_link'
- if vue_file_list_enabled? - if vue_file_list_enabled?
#js-tree-history-link.d-inline-block{ data: { history_link: project_commits_path(@project, @ref) } } #js-tree-history-link.d-inline-block{ data: { history_link: project_commits_path(@project, @ref) } }
...@@ -85,20 +85,19 @@ ...@@ -85,20 +85,19 @@
= render 'projects/find_file_link' = render 'projects/find_file_link'
- if can_create_mr_from_fork - if can_create_mr_from_fork
= succeed " " do - if can_collaborate || current_user&.already_forked?(@project)
- if can_collaborate || current_user&.already_forked?(@project) - if vue_file_list_enabled?
- if vue_file_list_enabled? #js-tree-web-ide-link.d-inline-block
#js-tree-web-ide-link.d-inline-block
- else
= link_to ide_edit_path(@project, @ref, @path), class: 'btn btn-default qa-web-ide-button' do
= _('Web IDE')
- else - else
= link_to '#modal-confirm-fork', class: 'btn btn-default qa-web-ide-button', data: { target: '#modal-confirm-fork', toggle: 'modal'} do = link_to ide_edit_path(@project, @ref, @path), class: 'btn btn-default qa-web-ide-button' do
= _('Web IDE') = _('Web IDE')
= render 'shared/confirm_fork_modal', fork_path: ide_fork_and_edit_path(@project, @ref, @path) - else
= link_to '#modal-confirm-fork', class: 'btn btn-default qa-web-ide-button', data: { target: '#modal-confirm-fork', toggle: 'modal'} do
= _('Web IDE')
= render 'shared/confirm_fork_modal', fork_path: ide_fork_and_edit_path(@project, @ref, @path)
- if show_xcode_link?(@project) - if show_xcode_link?(@project)
.project-action-button.project-xcode.inline .project-action-button.project-xcode.inline<
= render "projects/buttons/xcode_link" = render "projects/buttons/xcode_link"
= render 'projects/buttons/download', project: @project, ref: @ref = render 'projects/buttons/download', project: @project, ref: @ref
---
title: Track adding metric via monitoring dashboard
merge_request: 20818
author:
type: added
---
title: Remove whitespaces between tree-controls elements
merge_request: 20952
author:
type: other
---
title: Add admin mode controller path to Rack::Attack defaults
merge_request: 20735
author: Diego Louzán
type: changed
---
title: Add Unify Circuit project integration service
merge_request: 19849
author: Fabio Huser
type: added
---
title: Add created_before/after filter to group/project audit events
merge_request: 20641
author:
type: added
---
title: Add limit for snippet content size
merge_request: 20346
author:
type: performance
---
title: Upgrade auto-deploy-image for helm default values file
merge_request: 20588
author:
type: changed
# frozen_string_literal: true
class AddSnippetSizeLimitToApplicationSettings < ActiveRecord::Migration[5.2]
DOWNTIME = false
def up
add_column :application_settings, :snippet_size_limit, :bigint, default: 50.megabytes, null: false
end
def down
remove_column :application_settings, :snippet_size_limit
end
end
# frozen_string_literal: true
class AddAdminModeProtectedPath < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
ADMIN_MODE_ENDPOINT = '/admin/session'
OLD_DEFAULT_PROTECTED_PATHS = [
'/users/password',
'/users/sign_in',
'/api/v3/session.json',
'/api/v3/session',
'/api/v4/session.json',
'/api/v4/session',
'/users',
'/users/confirmation',
'/unsubscribes/',
'/import/github/personal_access_token'
]
NEW_DEFAULT_PROTECTED_PATHS = OLD_DEFAULT_PROTECTED_PATHS.dup << ADMIN_MODE_ENDPOINT
class ApplicationSetting < ActiveRecord::Base
self.table_name = 'application_settings'
end
def up
change_column_default :application_settings, :protected_paths, NEW_DEFAULT_PROTECTED_PATHS
# schema allows nulls for protected_paths
ApplicationSetting.where.not(protected_paths: nil).each do |application_setting|
unless application_setting.protected_paths.include?(ADMIN_MODE_ENDPOINT)
updated_protected_paths = application_setting.protected_paths << ADMIN_MODE_ENDPOINT
application_setting.update(protected_paths: updated_protected_paths)
end
end
end
def down
change_column_default :application_settings, :protected_paths, OLD_DEFAULT_PROTECTED_PATHS
# schema allows nulls for protected_paths
ApplicationSetting.where.not(protected_paths: nil).each do |application_setting|
if application_setting.protected_paths.include?(ADMIN_MODE_ENDPOINT)
updated_protected_paths = application_setting.protected_paths - [ADMIN_MODE_ENDPOINT]
application_setting.update(protected_paths: updated_protected_paths)
end
end
end
end
...@@ -328,7 +328,7 @@ ActiveRecord::Schema.define(version: 2019_11_25_140458) do ...@@ -328,7 +328,7 @@ ActiveRecord::Schema.define(version: 2019_11_25_140458) do
t.boolean "throttle_protected_paths_enabled", default: false, null: false t.boolean "throttle_protected_paths_enabled", default: false, null: false
t.integer "throttle_protected_paths_requests_per_period", default: 10, null: false t.integer "throttle_protected_paths_requests_per_period", default: 10, null: false
t.integer "throttle_protected_paths_period_in_seconds", default: 60, null: false t.integer "throttle_protected_paths_period_in_seconds", default: 60, null: false
t.string "protected_paths", limit: 255, default: ["/users/password", "/users/sign_in", "/api/v3/session.json", "/api/v3/session", "/api/v4/session.json", "/api/v4/session", "/users", "/users/confirmation", "/unsubscribes/", "/import/github/personal_access_token"], array: true t.string "protected_paths", limit: 255, default: ["/users/password", "/users/sign_in", "/api/v3/session.json", "/api/v3/session", "/api/v4/session.json", "/api/v4/session", "/users", "/users/confirmation", "/unsubscribes/", "/import/github/personal_access_token", "/admin/session"], array: true
t.boolean "throttle_incident_management_notification_enabled", default: false, null: false t.boolean "throttle_incident_management_notification_enabled", default: false, null: false
t.integer "throttle_incident_management_notification_period_in_seconds", default: 3600 t.integer "throttle_incident_management_notification_period_in_seconds", default: 3600
t.integer "throttle_incident_management_notification_per_period", default: 3600 t.integer "throttle_incident_management_notification_per_period", default: 3600
...@@ -361,6 +361,7 @@ ActiveRecord::Schema.define(version: 2019_11_25_140458) do ...@@ -361,6 +361,7 @@ ActiveRecord::Schema.define(version: 2019_11_25_140458) do
t.string "encrypted_slack_app_secret_iv", limit: 255 t.string "encrypted_slack_app_secret_iv", limit: 255
t.text "encrypted_slack_app_verification_token" t.text "encrypted_slack_app_verification_token"
t.string "encrypted_slack_app_verification_token_iv", limit: 255 t.string "encrypted_slack_app_verification_token_iv", limit: 255
t.bigint "snippet_size_limit", default: 52428800, null: false
t.index ["custom_project_templates_group_id"], name: "index_application_settings_on_custom_project_templates_group_id" t.index ["custom_project_templates_group_id"], name: "index_application_settings_on_custom_project_templates_group_id"
t.index ["file_template_project_id"], name: "index_application_settings_on_file_template_project_id" t.index ["file_template_project_id"], name: "index_application_settings_on_file_template_project_id"
t.index ["instance_administration_project_id"], name: "index_applicationsettings_on_instance_administration_project_id" t.index ["instance_administration_project_id"], name: "index_applicationsettings_on_instance_administration_project_id"
......
...@@ -158,6 +158,10 @@ Learn how to install, configure, update, and maintain your GitLab instance. ...@@ -158,6 +158,10 @@ Learn how to install, configure, update, and maintain your GitLab instance.
- [Shared Runners pipelines quota](../user/admin_area/settings/continuous_integration.md#shared-runners-pipeline-minutes-quota-starter-only): Limit the usage of pipeline minutes for Shared Runners. **(STARTER ONLY)** - [Shared Runners pipelines quota](../user/admin_area/settings/continuous_integration.md#shared-runners-pipeline-minutes-quota-starter-only): Limit the usage of pipeline minutes for Shared Runners. **(STARTER ONLY)**
- [Enable/disable Auto DevOps](../topics/autodevops/index.md#enablingdisabling-auto-devops): Enable or disable Auto DevOps for your instance. - [Enable/disable Auto DevOps](../topics/autodevops/index.md#enablingdisabling-auto-devops): Enable or disable Auto DevOps for your instance.
## Snippet settings
- [Setting snippet content size limit](snippets/index.md): Set a maximum size limit for snippets' content.
## Git configuration options ## Git configuration options
- [Custom Git hooks](custom_hooks.md): Custom Git hooks (on the filesystem) for when webhooks aren't enough. - [Custom Git hooks](custom_hooks.md): Custom Git hooks (on the filesystem) for when webhooks aren't enough.
......
---
type: reference, howto
---
# Snippets settings **(CORE ONLY)**
Adjust the snippets' settings of your GitLab instance.
## Snippets content size limit
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/31133) in GitLab 12.6.
You can set a content size max limit in snippets. This limit can prevent
abuses of the feature. The default content size limit is **52428800 Bytes** (50MB).
### How does it work?
The content size limit will be applied when a snippet is created or
updated. Nevertheless, in order not to break any existing snippet,
the limit will only be enforced in stored snippets when the content
is updated.
### Snippets size limit configuration
This setting is not available through the [Admin Area settings](../../user/admin_area/settings/index.md).
In order to configure this setting, use either the Rails console
or the [Application settings API](../../api/settings.md).
NOTE: **IMPORTANT:**
The value of the limit **must** be in Bytes.
#### Through the Rails console
The steps to configure this setting through the Rails console are:
1. Start the Rails console:
```bash
# For Omnibus installations
sudo gitlab-rails console
# For installations from source
sudo -u git -H bundle exec rails console production
```
1. Update the snippets maximum file size:
```ruby
ApplicationSetting.first.update!(snippet_size_limit: 50.megabytes)
```
To retrieve the current value, start the Rails console and run:
```ruby
Gitlab::CurrentSettings.snippet_size_limit
```
#### Through the API
The process to set the snippets size limit through the Application Settings API is
exactly the same as you would do to [update any other setting](../../api/settings.md#change-application-settings).
```bash
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/application/settings?snippet_size_limit=52428800
```
You can also use the API to [retrieve the current value](../../api/settings.md#get-current-application-settings).
```bash
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/application/settings
```
...@@ -229,6 +229,51 @@ Get Campfire service settings for a project. ...@@ -229,6 +229,51 @@ Get Campfire service settings for a project.
GET /projects/:id/services/campfire GET /projects/:id/services/campfire
``` ```
## Unify Circuit
Unify Circuit RTC and collaboration tool.
### Create/Edit Unify Circuit service
Set Unify Circuit service for a project.
```
PUT /projects/:id/services/unify-circuit
```
Parameters:
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `webhook` | string | true | The Unify Circuit webhook. For example, `https://circuit.com/rest/v2/webhooks/incoming/...`. |
| `notify_only_broken_pipelines` | boolean | false | Send notifications for broken pipelines |
| `branches_to_be_notified` | string | all | Branches to send notifications for. Valid options are "all", "default", "protected", and "default_and_protected" |
| `push_events` | boolean | false | Enable notifications for push events |
| `issues_events` | boolean | false | Enable notifications for issue events |
| `confidential_issues_events` | boolean | false | Enable notifications for confidential issue events |
| `merge_requests_events` | boolean | false | Enable notifications for merge request events |
| `tag_push_events` | boolean | false | Enable notifications for tag push events |
| `note_events` | boolean | false | Enable notifications for note events |
| `confidential_note_events` | boolean | false | Enable notifications for confidential note events |
| `pipeline_events` | boolean | false | Enable notifications for pipeline events |
| `wiki_page_events` | boolean | false | Enable notifications for wiki page events |
### Delete Unify Circuit service
Delete Unify Circuit service for a project.
```
DELETE /projects/:id/services/unify-circuit
```
### Get Unify Circuit service settings
Get Unify Circuit service settings for a project.
```
GET /projects/:id/services/unify-circuit
```
## Custom Issue Tracker ## Custom Issue Tracker
Custom issue tracker Custom issue tracker
...@@ -480,6 +525,7 @@ Parameters: ...@@ -480,6 +525,7 @@ Parameters:
| `merge_requests_events` | boolean | false | Enable notifications for merge request events | | `merge_requests_events` | boolean | false | Enable notifications for merge request events |
| `tag_push_events` | boolean | false | Enable notifications for tag push events | | `tag_push_events` | boolean | false | Enable notifications for tag push events |
| `note_events` | boolean | false | Enable notifications for note events | | `note_events` | boolean | false | Enable notifications for note events |
| `confidential_note_events` | boolean | false | Enable notifications for confidential note events |
| `pipeline_events` | boolean | false | Enable notifications for pipeline events | | `pipeline_events` | boolean | false | Enable notifications for pipeline events |
| `wiki_page_events` | boolean | false | Enable notifications for wiki page events | | `wiki_page_events` | boolean | false | Enable notifications for wiki page events |
...@@ -1088,6 +1134,7 @@ Parameters: ...@@ -1088,6 +1134,7 @@ Parameters:
| `merge_requests_events` | boolean | false | Enable notifications for merge request events | | `merge_requests_events` | boolean | false | Enable notifications for merge request events |
| `tag_push_events` | boolean | false | Enable notifications for tag push events | | `tag_push_events` | boolean | false | Enable notifications for tag push events |
| `note_events` | boolean | false | Enable notifications for note events | | `note_events` | boolean | false | Enable notifications for note events |
| `confidential_note_events` | boolean | false | Enable notifications for confidential note events |
| `pipeline_events` | boolean | false | Enable notifications for pipeline events | | `pipeline_events` | boolean | false | Enable notifications for pipeline events |
| `wiki_page_events` | boolean | false | Enable notifications for wiki page events | | `wiki_page_events` | boolean | false | Enable notifications for wiki page events |
| `push_channel` | string | false | The name of the channel to receive push events notifications | | `push_channel` | string | false | The name of the channel to receive push events notifications |
...@@ -1095,6 +1142,7 @@ Parameters: ...@@ -1095,6 +1142,7 @@ Parameters:
| `confidential_issue_channel` | string | false | The name of the channel to receive confidential issues events notifications | | `confidential_issue_channel` | string | false | The name of the channel to receive confidential issues events notifications |
| `merge_request_channel` | string | false | The name of the channel to receive merge request events notifications | | `merge_request_channel` | string | false | The name of the channel to receive merge request events notifications |
| `note_channel` | string | false | The name of the channel to receive note events notifications | | `note_channel` | string | false | The name of the channel to receive note events notifications |
| `confidential_note_channel` | boolean | The name of the channel to receive confidential note events notifications |
| `tag_push_channel` | string | false | The name of the channel to receive tag push events notifications | | `tag_push_channel` | string | false | The name of the channel to receive tag push events notifications |
| `pipeline_channel` | string | false | The name of the channel to receive pipeline events notifications | | `pipeline_channel` | string | false | The name of the channel to receive pipeline events notifications |
| `wiki_page_channel` | string | false | The name of the channel to receive wiki page events notifications | | `wiki_page_channel` | string | false | The name of the channel to receive wiki page events notifications |
......
...@@ -350,3 +350,4 @@ are listed in the descriptions of the relevant settings. ...@@ -350,3 +350,4 @@ are listed in the descriptions of the relevant settings.
| `user_show_add_ssh_key_message` | boolean | no | When set to `false` disable the "You won't be able to pull or push project code via SSH" warning shown to users with no uploaded SSH key. | | `user_show_add_ssh_key_message` | boolean | no | When set to `false` disable the "You won't be able to pull or push project code via SSH" warning shown to users with no uploaded SSH key. |
| `version_check_enabled` | boolean | no | Let GitLab inform you when an update is available. | | `version_check_enabled` | boolean | no | Let GitLab inform you when an update is available. |
| `web_ide_clientside_preview_enabled` | boolean | no | Client side evaluation (allow live previews of JavaScript projects in the Web IDE using CodeSandbox client side evaluation). | | `web_ide_clientside_preview_enabled` | boolean | no | Client side evaluation (allow live previews of JavaScript projects in the Web IDE using CodeSandbox client side evaluation). |
| `snippet_size_limit` | integer | no | Max snippet content size in **bytes**. Default: 52428800 Bytes (50MB).|
...@@ -228,5 +228,5 @@ our very powerful [`only/except` rules system](../yaml/README.md#onlyexcept-basi ...@@ -228,5 +228,5 @@ our very powerful [`only/except` rules system](../yaml/README.md#onlyexcept-basi
```yaml ```yaml
my_job: my_job:
only: branches only: [branches]
``` ```
...@@ -30,7 +30,7 @@ Pipelines for merge requests have the following requirements and limitations: ...@@ -30,7 +30,7 @@ Pipelines for merge requests have the following requirements and limitations:
## Configuring pipelines for merge requests ## Configuring pipelines for merge requests
To configure pipelines for merge requests, add the `only: merge_requests` parameter to To configure pipelines for merge requests, add the `only: [merge_requests]` parameter to
the jobs that you want to run only for merge requests. the jobs that you want to run only for merge requests.
Then, when developers create or update merge requests, a pipeline runs Then, when developers create or update merge requests, a pipeline runs
...@@ -68,7 +68,7 @@ After the merge request is updated with new commits: ...@@ -68,7 +68,7 @@ After the merge request is updated with new commits:
- The pipeline fetches the latest code from the source branch and run tests against it. - The pipeline fetches the latest code from the source branch and run tests against it.
In the above example, the pipeline contains only a `test` job. In the above example, the pipeline contains only a `test` job.
Since the `build` and `deploy` jobs don't have the `only: merge_requests` parameter, Since the `build` and `deploy` jobs don't have the `only: [merge_requests]` parameter,
they will not run in the merge request. they will not run in the merge request.
Pipelines tagged with the **detached** badge indicate that they were triggered Pipelines tagged with the **detached** badge indicate that they were triggered
...@@ -86,7 +86,7 @@ Read the [documentation on Merge Trains](pipelines_for_merged_results/merge_trai ...@@ -86,7 +86,7 @@ Read the [documentation on Merge Trains](pipelines_for_merged_results/merge_trai
## Excluding certain jobs ## Excluding certain jobs
The behavior of the `only: merge_requests` parameter is such that _only_ jobs with The behavior of the `only: [merge_requests]` parameter is such that _only_ jobs with
that parameter are run in the context of a merge request; no other jobs will be run. that parameter are run in the context of a merge request; no other jobs will be run.
However, you may want to reverse this behavior, having all of your jobs to run _except_ However, you may want to reverse this behavior, having all of your jobs to run _except_
......
...@@ -780,7 +780,7 @@ it is possible to define a job to be created based on files modified ...@@ -780,7 +780,7 @@ it is possible to define a job to be created based on files modified
in a merge request. in a merge request.
In order to deduce the correct base SHA of the source branch, we recommend combining In order to deduce the correct base SHA of the source branch, we recommend combining
this keyword with `only: merge_requests`. This way, file differences are correctly this keyword with `only: [merge_requests]`. This way, file differences are correctly
calculated from any further commits, thus all changes in the merge requests are properly calculated from any further commits, thus all changes in the merge requests are properly
tested in pipelines. tested in pipelines.
...@@ -802,7 +802,7 @@ either files in `service-one` directory or the `Dockerfile`, GitLab creates ...@@ -802,7 +802,7 @@ either files in `service-one` directory or the `Dockerfile`, GitLab creates
and triggers the `docker build service one` job. and triggers the `docker build service one` job.
Note that if [pipelines for merge requests](../merge_request_pipelines/index.md) is Note that if [pipelines for merge requests](../merge_request_pipelines/index.md) is
combined with `only: change`, but `only: merge_requests` is omitted, there could be combined with `only: [change]`, but `only: [merge_requests]` is omitted, there could be
unwanted behavior. unwanted behavior.
For example: For example:
......
...@@ -699,7 +699,7 @@ nicely on different mobile devices. ...@@ -699,7 +699,7 @@ nicely on different mobile devices.
## Code blocks ## Code blocks
- Always wrap code added to a sentence in inline code blocks (`` ` ``). - Always wrap code added to a sentence in inline code blocks (`` ` ``).
E.g., `.gitlab-ci.yml`, `git add .`, `CODEOWNERS`, `only: master`. E.g., `.gitlab-ci.yml`, `git add .`, `CODEOWNERS`, `only: [master]`.
File names, commands, entries, and anything that refers to code should be added to code blocks. File names, commands, entries, and anything that refers to code should be added to code blocks.
To make things easier for the user, always add a full code block for things that can be To make things easier for the user, always add a full code block for things that can be
useful to copy and paste, as they can easily do it with the button on code blocks. useful to copy and paste, as they can easily do it with the button on code blocks.
......
...@@ -165,6 +165,54 @@ can quickly spiral out of control. ...@@ -165,6 +165,54 @@ can quickly spiral out of control.
There are some cases where this may be needed. If this is the case this should There are some cases where this may be needed. If this is the case this should
be clearly mentioned in the merge request description. be clearly mentioned in the merge request description.
## Batch process
**Summary:** Iterating a single process to external services (e.g. PostgreSQL, Redis, Object Storage, etc)
should be executed in a **batch-style** in order to reduce connection overheads.
For fetching rows from various tables in a batch-style, please see [Eager Loading](#eager-loading) section.
### Example: Delete multiple files from Object Storage
When you delete multiple files from object storage (e.g. GCS),
executing a single REST API call multiple times is a quite expensive
process. Ideally, this should be done in a batch-style, for example, S3 provides
[batch deletion API](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html),
so it'd be a good idea to consider such an approach.
The `FastDestroyAll` module might help this situation. It's a
small framework when you remove a bunch of database rows and its associated data
in a batch style.
## Timeout
**Summary:** You should set a reasonable timeout when the system invokes HTTP calls
to external services (e.g. Kubernetes), and it should be executed in Sidekiq, not
in Puma/Unicorn threads.
Often, GitLab needs to communicate with an external service such as Kubernetes
clusters. In this case, it's hard to estimate when the external service finishes
the requested process, for example, if it's a user-owned cluster that is inactive for some reason,
GitLab might wait for the response forever ([Example](https://gitlab.com/gitlab-org/gitlab/issues/31475)).
This could result in Puma/Unicorn timeout and should be avoided at all cost.
You should set a reasonable timeout, gracefully handle exceptions and surface the
errors in UI or logging internally.
Using [`ReactiveCaching`](https://docs.gitlab.com/ee/development/utilities.html#reactivecaching) is one of the best solutions to fetch external data.
## Keep database transaction minimal
**Summary:** You should avoid accessing to external services (e.g. Gitaly) during database
transactions, otherwise it leads to severe contention problems
as an open transaction basically blocks the release of a Postgres backend connection.
For keeping transaction as minimal as possible, please consider using `AfterCommitQueue`
module or `after_commit` AR hook.
Here is [an example](https://gitlab.com/gitlab-org/gitlab/issues/36154#note_247228859)
that one request to Gitaly instance during transaction triggered a P1 issue.
## Eager Loading ## Eager Loading
**Summary:** always eager load associations when retrieving more than one row. **Summary:** always eager load associations when retrieving more than one row.
......
...@@ -53,7 +53,8 @@ default['gitlab']['gitlab-rails']['rack_attack_protected_paths'] = [ ...@@ -53,7 +53,8 @@ default['gitlab']['gitlab-rails']['rack_attack_protected_paths'] = [
'/users', '/users',
'/users/confirmation', '/users/confirmation',
'/unsubscribes/', '/unsubscribes/',
'/import/github/personal_access_token' '/import/github/personal_access_token',
'/admin/session'
] ]
``` ```
......
...@@ -651,6 +651,8 @@ procfile exec` to replicate the environment where your application will run. ...@@ -651,6 +651,8 @@ procfile exec` to replicate the environment where your application will run.
#### Workers #### Workers
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/30628) in GitLab 12.6, `.gitlab/auto-deploy-values.yaml` will be used by default for Helm upgrades.
Some web applications need to run extra deployments for "worker processes". For Some web applications need to run extra deployments for "worker processes". For
example, it is common in a Rails application to have a separate worker process example, it is common in a Rails application to have a separate worker process
to run background tasks like sending emails. to run background tasks like sending emails.
...@@ -672,13 +674,18 @@ need to: ...@@ -672,13 +674,18 @@ need to:
- Set a CI variable `K8S_SECRET_REDIS_URL`, which the URL of this instance to - Set a CI variable `K8S_SECRET_REDIS_URL`, which the URL of this instance to
ensure it's passed into your deployments. ensure it's passed into your deployments.
Once you have configured your worker to respond to health checks, you will Once you have configured your worker to respond to health checks, run a Sidekiq
need to configure a CI variable `HELM_UPGRADE_EXTRA_ARGS` with the value worker for your Rails application. For:
`--values helm-values.yaml`.
- GitLab 12.6 and later, either:
- Add a file named `.gitlab/auto-deploy-values.yaml` to your repository. It will
be automatically used if found.
- Add a file with a different name or path to the repository, and override the value of the
`HELM_UPGRADE_VALUES_FILE` variable with the path and name.
- GitLab 12.5 and earlier, run the worker with the `--values` parameter that specifies
a file in the repository.
Then you can, for example, run a Sidekiq worker for your Rails application In any case, the file must contain the following:
by adding a file named `helm-values.yaml` to your repository with the following
content:
```yml ```yml
workers: workers:
...@@ -983,6 +990,7 @@ applications. ...@@ -983,6 +990,7 @@ applications.
| `CANARY_PRODUCTION_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md) in the production environment. Takes precedence over `CANARY_REPLICAS`. Defaults to 1. | | `CANARY_PRODUCTION_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md) in the production environment. Takes precedence over `CANARY_REPLICAS`. Defaults to 1. |
| `CANARY_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md). Defaults to 1. | | `CANARY_REPLICAS` | Number of canary replicas to deploy for [Canary Deployments](../../user/project/canary_deployments.md). Defaults to 1. |
| `HELM_RELEASE_NAME` | From GitLab 12.1, allows the `helm` release name to be overridden. Can be used to assign unique release names when deploying multiple projects to a single namespace. | | `HELM_RELEASE_NAME` | From GitLab 12.1, allows the `helm` release name to be overridden. Can be used to assign unique release names when deploying multiple projects to a single namespace. |
| `HELM_UPGRADE_VALUES_FILE` | From GitLab 12.6, allows the `helm upgrade` values file to be overridden. Defaults to `.gitlab/auto-deploy-values.yaml`. |
| `HELM_UPGRADE_EXTRA_ARGS` | From GitLab 11.11, allows extra arguments in `helm` commands when deploying the application. Note that using quotes will not prevent word splitting. **Tip:** you can use this variable to [customize the Auto Deploy Helm chart](#custom-helm-chart) by applying custom override values with `--values my-values.yaml`. | | `HELM_UPGRADE_EXTRA_ARGS` | From GitLab 11.11, allows extra arguments in `helm` commands when deploying the application. Note that using quotes will not prevent word splitting. **Tip:** you can use this variable to [customize the Auto Deploy Helm chart](#custom-helm-chart) by applying custom override values with `--values my-values.yaml`. |
| `INCREMENTAL_ROLLOUT_MODE` | From GitLab 11.4, if present, can be used to enable an [incremental rollout](#incremental-rollout-to-production-premium) of your application for the production environment. Set to `manual` for manual deployment jobs or `timed` for automatic rollout deployments with a 5 minute delay each one. | | `INCREMENTAL_ROLLOUT_MODE` | From GitLab 11.4, if present, can be used to enable an [incremental rollout](#incremental-rollout-to-production-premium) of your application for the production environment. Set to `manual` for manual deployment jobs or `timed` for automatic rollout deployments with a 5 minute delay each one. |
| `K8S_SECRET_*` | From GitLab 11.7, any variable prefixed with [`K8S_SECRET_`](#application-secret-variables) will be made available by Auto DevOps as environment variables to the deployed application. | | `K8S_SECRET_*` | From GitLab 11.7, any variable prefixed with [`K8S_SECRET_`](#application-secret-variables) will be made available by Auto DevOps as environment variables to the deployed application. |
......
...@@ -14,7 +14,8 @@ GitLab protects the following paths with Rack Attack by default: ...@@ -14,7 +14,8 @@ GitLab protects the following paths with Rack Attack by default:
'/users', '/users',
'/users/confirmation', '/users/confirmation',
'/unsubscribes/', '/unsubscribes/',
'/import/github/personal_access_token' '/import/github/personal_access_token',
'/admin/session'
``` ```
GitLab responds with HTTP status code `429` to POST requests at protected paths GitLab responds with HTTP status code `429` to POST requests at protected paths
......
...@@ -54,6 +54,7 @@ Click on the service links to see further configuration instructions and details ...@@ -54,6 +54,7 @@ Click on the service links to see further configuration instructions and details
| [Prometheus](prometheus.md) | Monitor the performance of your deployed apps | | [Prometheus](prometheus.md) | Monitor the performance of your deployed apps |
| Pushover | Pushover makes it easy to get real-time notifications on your Android device, iPhone, iPad, and Desktop | | Pushover | Pushover makes it easy to get real-time notifications on your Android device, iPhone, iPad, and Desktop |
| [Redmine](redmine.md) | Redmine issue tracker | | [Redmine](redmine.md) | Redmine issue tracker |
| [Unify Circuit](unify_circuit.md) | Receive events notifications in Unify Circuit |
| [YouTrack](youtrack.md) | YouTrack issue tracker | | [YouTrack](youtrack.md) | YouTrack issue tracker |
## Push hooks limit ## Push hooks limit
......
# Unify Circuit service
The Unify Circuit service sends notifications from GitLab to the conversation for which the webhook was created.
## On Unify Circuit
1. Open the conversation in which you want to see the notifications.
1. From the conversation menu, select **Configure Webhooks**.
1. Click **ADD WEBHOOK** and fill in the name of the bot that will post the messages. Optionally define avatar.
1. Click **SAVE** and copy the **Webhook URL** of your webhook.
For more information, see the [Unify Circuit documentation for configuring incoming webhooks](https://www.circuit.com/unifyportalfaqdetail?articleId=164448).
## On GitLab
When you have the **Webhook URL** for your Unify Circuit conversation webhook, you can set up the GitLab service.
1. Navigate to the [Integrations page](project_services.md#accessing-the-project-services) in your project's settings, i.e. **Project > Settings > Integrations**.
1. Select the **Unify Circuit** project service to configure it.
1. Check the **Active** checkbox to turn on the service.
1. Check the checkboxes corresponding to the GitLab events you want to receive in Unify Circuit.
1. Paste the **Webhook URL** that you copied from the Unify Circuit configuration step.
1. Configure the remaining options and click `Save changes`.
Your Unify Circuit conversation will now start receiving GitLab event notifications as configured.
![Unify Circuit configuration](img/unify_circuit_configuration.png)
...@@ -69,7 +69,7 @@ For example, to that on merge requests there is always a passing job even though ...@@ -69,7 +69,7 @@ For example, to that on merge requests there is always a passing job even though
```yaml ```yaml
enable_merge: enable_merge:
only: merge_requests only: [merge_requests]
script: script:
- echo true - echo true
``` ```
......
...@@ -132,6 +132,12 @@ module API ...@@ -132,6 +132,12 @@ module API
type: Boolean, type: Boolean,
desc: 'Enable notifications for note_events' desc: 'Enable notifications for note_events'
}, },
{
required: false,
name: :confidential_note_events,
type: Boolean,
desc: 'Enable notifications for confidential_note_events'
},
{ {
required: false, required: false,
name: :tag_push_events, name: :tag_push_events,
...@@ -696,7 +702,16 @@ module API ...@@ -696,7 +702,16 @@ module API
type: String, type: String,
desc: 'The password of the user' desc: 'The password of the user'
} }
] ],
'unify-circuit' => [
{
required: true,
name: :webhook,
type: String,
desc: 'The Unify Circuit webhook. e.g. https://circuit.com/rest/v2/webhooks/incoming/…'
},
chat_notification_events
].flatten
} }
end end
......
...@@ -2,6 +2,8 @@ ...@@ -2,6 +2,8 @@
module Gitaly module Gitaly
class Server class Server
SHA_VERSION_REGEX = /\A\d+\.\d+\.\d+-\d+-g([a-f0-9]{8})\z/.freeze
class << self class << self
def all def all
Gitlab.config.repositories.storages.keys.map { |s| Gitaly::Server.new(s) } Gitlab.config.repositories.storages.keys.map { |s| Gitaly::Server.new(s) }
...@@ -30,9 +32,10 @@ module Gitaly ...@@ -30,9 +32,10 @@ module Gitaly
info.git_version info.git_version
end end
def up_to_date? def expected_version?
server_version == Gitlab::GitalyClient.expected_server_version server_version == Gitlab::GitalyClient.expected_server_version || matches_sha?
end end
alias_method :up_to_date?, :expected_version?
def read_writeable? def read_writeable?
readable? && writeable? readable? && writeable?
...@@ -62,6 +65,13 @@ module Gitaly ...@@ -62,6 +65,13 @@ module Gitaly
@storage_status ||= info.storage_statuses.find { |s| s.storage_name == storage } @storage_status ||= info.storage_statuses.find { |s| s.storage_name == storage }
end end
def matches_sha?
match = server_version.match(SHA_VERSION_REGEX)
return false unless match
Gitlab::GitalyClient.expected_server_version.start_with?(match[1])
end
def info def info
@info ||= @info ||=
begin begin
......
.auto-deploy: .auto-deploy:
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.7.0" image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.8.0"
review: review:
extends: .auto-deploy extends: .auto-deploy
......
...@@ -155,6 +155,7 @@ module Gitlab ...@@ -155,6 +155,7 @@ module Gitlab
# column - The name of the column to create the foreign key on. # column - The name of the column to create the foreign key on.
# on_delete - The action to perform when associated data is removed, # on_delete - The action to perform when associated data is removed,
# defaults to "CASCADE". # defaults to "CASCADE".
# name - The name of the foreign key.
# #
# rubocop:disable Gitlab/RailsLogger # rubocop:disable Gitlab/RailsLogger
def add_concurrent_foreign_key(source, target, column:, on_delete: :cascade, name: nil) def add_concurrent_foreign_key(source, target, column:, on_delete: :cascade, name: nil)
...@@ -164,25 +165,31 @@ module Gitlab ...@@ -164,25 +165,31 @@ module Gitlab
raise 'add_concurrent_foreign_key can not be run inside a transaction' raise 'add_concurrent_foreign_key can not be run inside a transaction'
end end
on_delete = 'SET NULL' if on_delete == :nullify options = {
column: column,
on_delete: on_delete,
name: name.presence || concurrent_foreign_key_name(source, column)
}
key_name = name || concurrent_foreign_key_name(source, column) if foreign_key_exists?(source, target, options)
warning_message = "Foreign key not created because it exists already " \
unless foreign_key_exists?(source, target, column: column)
Rails.logger.warn "Foreign key not created because it exists already " \
"(this may be due to an aborted migration or similar): " \ "(this may be due to an aborted migration or similar): " \
"source: #{source}, target: #{target}, column: #{column}" "source: #{source}, target: #{target}, column: #{options[:column]}, "\
"name: #{options[:name]}, on_delete: #{options[:on_delete]}"
Rails.logger.warn warning_message
else
# Using NOT VALID allows us to create a key without immediately # Using NOT VALID allows us to create a key without immediately
# validating it. This means we keep the ALTER TABLE lock only for a # validating it. This means we keep the ALTER TABLE lock only for a
# short period of time. The key _is_ enforced for any newly created # short period of time. The key _is_ enforced for any newly created
# data. # data.
execute <<-EOF.strip_heredoc execute <<-EOF.strip_heredoc
ALTER TABLE #{source} ALTER TABLE #{source}
ADD CONSTRAINT #{key_name} ADD CONSTRAINT #{options[:name]}
FOREIGN KEY (#{column}) FOREIGN KEY (#{options[:column]})
REFERENCES #{target} (id) REFERENCES #{target} (id)
#{on_delete ? "ON DELETE #{on_delete.upcase}" : ''} #{on_delete_statement(options[:on_delete])}
NOT VALID; NOT VALID;
EOF EOF
end end
...@@ -193,18 +200,15 @@ module Gitlab ...@@ -193,18 +200,15 @@ module Gitlab
# #
# Note this is a no-op in case the constraint is VALID already # Note this is a no-op in case the constraint is VALID already
disable_statement_timeout do disable_statement_timeout do
execute("ALTER TABLE #{source} VALIDATE CONSTRAINT #{key_name};") execute("ALTER TABLE #{source} VALIDATE CONSTRAINT #{options[:name]};")
end end
end end
# rubocop:enable Gitlab/RailsLogger # rubocop:enable Gitlab/RailsLogger
def foreign_key_exists?(source, target = nil, column: nil) def foreign_key_exists?(source, target = nil, **options)
foreign_keys(source).any? do |key| foreign_keys(source).any? do |foreign_key|
if column tables_match?(target.to_s, foreign_key.to_table.to_s) &&
key.options[:column].to_s == column.to_s options_match?(foreign_key.options, options)
else
key.to_table.to_s == target.to_s
end
end end
end end
...@@ -1050,6 +1054,21 @@ into similar problems in the future (e.g. when new tables are created). ...@@ -1050,6 +1054,21 @@ into similar problems in the future (e.g. when new tables are created).
private private
def tables_match?(target_table, foreign_key_table)
target_table.blank? || foreign_key_table == target_table
end
def options_match?(foreign_key_options, options)
options.all? { |k, v| foreign_key_options[k].to_s == v.to_s }
end
def on_delete_statement(on_delete)
return '' if on_delete.blank?
return 'ON DELETE SET NULL' if on_delete == :nullify
"ON DELETE #{on_delete.upcase}"
end
def create_column_from(table, old, new, type: nil) def create_column_from(table, old, new, type: nil)
old_col = column_for(table, old) old_col = column_for(table, old)
new_type = type || old_col.type new_type = type || old_col.type
......
...@@ -34,12 +34,18 @@ module Sentry ...@@ -34,12 +34,18 @@ module Sentry
end end
def list_issues(**keyword_args) def list_issues(**keyword_args)
issues = get_issues(keyword_args) response = get_issues(keyword_args)
issues = response[:issues]
pagination = response[:pagination]
validate_size(issues) validate_size(issues)
handle_mapping_exceptions do handle_mapping_exceptions do
map_to_errors(issues) {
issues: map_to_errors(issues),
pagination: pagination
}
end end
end end
...@@ -83,36 +89,40 @@ module Sentry ...@@ -83,36 +89,40 @@ module Sentry
end end
def get_issues(**keyword_args) def get_issues(**keyword_args)
http_get( response = http_get(
issues_api_url, issues_api_url,
query: list_issue_sentry_query(keyword_args) query: list_issue_sentry_query(keyword_args)
) )
{
issues: response[:body],
pagination: Sentry::PaginationParser.parse(response[:headers])
}
end end
def list_issue_sentry_query(issue_status:, limit:, sort: nil, search_term: '') def list_issue_sentry_query(issue_status:, limit:, sort: nil, search_term: '', cursor: nil)
unless SENTRY_API_SORT_VALUE_MAP.key?(sort) unless SENTRY_API_SORT_VALUE_MAP.key?(sort)
raise BadRequestError, 'Invalid value for sort param' raise BadRequestError, 'Invalid value for sort param'
end end
query_params = { {
query: "is:#{issue_status} #{search_term}".strip, query: "is:#{issue_status} #{search_term}".strip,
limit: limit, limit: limit,
sort: SENTRY_API_SORT_VALUE_MAP[sort] sort: SENTRY_API_SORT_VALUE_MAP[sort],
} cursor: cursor
}.compact
query_params.compact
end end
def get_issue(issue_id:) def get_issue(issue_id:)
http_get(issue_api_url(issue_id)) http_get(issue_api_url(issue_id))[:body]
end end
def get_issue_latest_event(issue_id:) def get_issue_latest_event(issue_id:)
http_get(issue_latest_event_api_url(issue_id)) http_get(issue_latest_event_api_url(issue_id))[:body]
end end
def get_projects def get_projects
http_get(projects_api_url) http_get(projects_api_url)[:body]
end end
def handle_request_exceptions def handle_request_exceptions
...@@ -138,7 +148,7 @@ module Sentry ...@@ -138,7 +148,7 @@ module Sentry
raise_error "Sentry response status code: #{response.code}" raise_error "Sentry response status code: #{response.code}"
end end
response.parsed_response { body: response.parsed_response, headers: response.headers }
end end
def raise_error(message) def raise_error(message)
......
# frozen_string_literal: true
module Sentry
module PaginationParser
PATTERN = /rel=\"(?<direction>\w+)\";\sresults=\"(?<results>\w+)\";\scursor=\"(?<cursor>.+)\"/.freeze
def self.parse(headers)
links = headers['link'].to_s.split(',')
links.map { |link| parse_link(link) }.compact.to_h
end
def self.parse_link(link)
match = link.match(PATTERN)
return unless match
return if match['results'] != "true"
[match['direction'], { 'cursor' => match['cursor'] }]
end
private_class_method :parse_link
end
end
...@@ -3361,6 +3361,9 @@ msgstr "" ...@@ -3361,6 +3361,9 @@ msgstr ""
msgid "Clear" msgid "Clear"
msgstr "" msgstr ""
msgid "Clear chart filters"
msgstr ""
msgid "Clear input" msgid "Clear input"
msgstr "" msgstr ""
...@@ -20934,6 +20937,9 @@ msgstr "" ...@@ -20934,6 +20937,9 @@ msgstr ""
msgid "is not an email you own" msgid "is not an email you own"
msgstr "" msgstr ""
msgid "is too long (%{current_value}). The maximum size is %{max_size}."
msgstr ""
msgid "is too long (maximum is 100 entries)" msgid "is too long (maximum is 100 entries)"
msgstr "" msgstr ""
......
...@@ -2,8 +2,8 @@ source 'https://rubygems.org' ...@@ -2,8 +2,8 @@ source 'https://rubygems.org'
gem 'gitlab-qa' gem 'gitlab-qa'
gem 'activesupport', '5.2.3' # This should stay in sync with the root's Gemfile gem 'activesupport', '5.2.3' # This should stay in sync with the root's Gemfile
gem 'capybara', '~> 2.16.1' gem 'capybara', '~> 3.29.0'
gem 'capybara-screenshot', '~> 1.0.18' gem 'capybara-screenshot', '~> 1.0.23'
gem 'rake', '~> 12.3.0' gem 'rake', '~> 12.3.0'
gem 'rspec', '~> 3.7' gem 'rspec', '~> 3.7'
gem 'selenium-webdriver', '~> 3.12' gem 'selenium-webdriver', '~> 3.12'
......
...@@ -6,8 +6,8 @@ GEM ...@@ -6,8 +6,8 @@ GEM
i18n (>= 0.7, < 2) i18n (>= 0.7, < 2)
minitest (~> 5.1) minitest (~> 5.1)
tzinfo (~> 1.1) tzinfo (~> 1.1)
addressable (2.5.2) addressable (2.7.0)
public_suffix (>= 2.0.2, < 4.0) public_suffix (>= 2.0.2, < 5.0)
airborne (0.2.13) airborne (0.2.13)
activesupport activesupport
rack rack
...@@ -15,15 +15,16 @@ GEM ...@@ -15,15 +15,16 @@ GEM
rest-client (>= 1.7.3, < 3.0) rest-client (>= 1.7.3, < 3.0)
rspec (~> 3.1) rspec (~> 3.1)
byebug (9.1.0) byebug (9.1.0)
capybara (2.16.1) capybara (3.29.0)
addressable addressable
mini_mime (>= 0.1.3) mini_mime (>= 0.1.3)
nokogiri (>= 1.3.3) nokogiri (~> 1.8)
rack (>= 1.0.0) rack (>= 1.6.0)
rack-test (>= 0.5.4) rack-test (>= 0.6.3)
xpath (~> 2.0) regexp_parser (~> 1.5)
capybara-screenshot (1.0.18) xpath (~> 3.2)
capybara (>= 1.0, < 3) capybara-screenshot (1.0.23)
capybara (>= 1.0, < 4)
launchy launchy
childprocess (3.0.0) childprocess (3.0.0)
coderay (1.1.2) coderay (1.1.2)
...@@ -50,7 +51,7 @@ GEM ...@@ -50,7 +51,7 @@ GEM
mime-types (3.1) mime-types (3.1)
mime-types-data (~> 3.2015) mime-types-data (~> 3.2015)
mime-types-data (3.2016.0521) mime-types-data (3.2016.0521)
mini_mime (1.0.0) mini_mime (1.0.2)
mini_portile2 (2.4.0) mini_portile2 (2.4.0)
minitest (5.11.3) minitest (5.11.3)
netrc (0.11.0) netrc (0.11.0)
...@@ -65,11 +66,12 @@ GEM ...@@ -65,11 +66,12 @@ GEM
pry-byebug (3.5.1) pry-byebug (3.5.1)
byebug (~> 9.1) byebug (~> 9.1)
pry (~> 0.10) pry (~> 0.10)
public_suffix (3.0.1) public_suffix (4.0.1)
rack (2.0.6) rack (2.0.7)
rack-test (0.8.2) rack-test (0.8.3)
rack (>= 1.0, < 3) rack (>= 1.0, < 3)
rake (12.3.3) rake (12.3.0)
regexp_parser (1.6.0)
rest-client (2.0.2) rest-client (2.0.2)
http-cookie (>= 1.0.2, < 2.0) http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 4.0) mime-types (>= 1.16, < 4.0)
...@@ -103,8 +105,8 @@ GEM ...@@ -103,8 +105,8 @@ GEM
unf (0.1.4) unf (0.1.4)
unf_ext unf_ext
unf_ext (0.0.7.4) unf_ext (0.0.7.4)
xpath (2.1.0) xpath (3.2.0)
nokogiri (~> 1.3) nokogiri (~> 1.8)
PLATFORMS PLATFORMS
ruby ruby
...@@ -112,8 +114,8 @@ PLATFORMS ...@@ -112,8 +114,8 @@ PLATFORMS
DEPENDENCIES DEPENDENCIES
activesupport (= 5.2.3) activesupport (= 5.2.3)
airborne (~> 0.2.13) airborne (~> 0.2.13)
capybara (~> 2.16.1) capybara (~> 3.29.0)
capybara-screenshot (~> 1.0.18) capybara-screenshot (~> 1.0.23)
debase (~> 0.2.4.1) debase (~> 0.2.4.1)
faker (~> 1.6, >= 1.6.6) faker (~> 1.6, >= 1.6.6)
gitlab-qa gitlab-qa
......
...@@ -141,6 +141,10 @@ module QA ...@@ -141,6 +141,10 @@ module QA
page.has_no_text? text page.has_no_text? text
end end
def has_normalized_ws_text?(text, wait: Capybara.default_max_wait_time)
page.has_text?(text.gsub(/\s+/, " "), wait: wait)
end
def finished_loading? def finished_loading?
has_no_css?('.fa-spinner', wait: Capybara.default_max_wait_time) has_no_css?('.fa-spinner', wait: Capybara.default_max_wait_time)
end end
......
...@@ -14,11 +14,7 @@ module QA::Page ...@@ -14,11 +14,7 @@ module QA::Page
def click_on_latest_pipeline def click_on_latest_pipeline
css = '.js-pipeline-url-link' css = '.js-pipeline-url-link'
link = wait(reload: false) do first(css, wait: 60).click
first(css)
end
link.click
end end
def wait_for_latest_pipeline_success def wait_for_latest_pipeline_success
......
...@@ -132,6 +132,10 @@ module QA ...@@ -132,6 +132,10 @@ module QA
config.default_max_wait_time = CAPYBARA_MAX_WAIT_TIME config.default_max_wait_time = CAPYBARA_MAX_WAIT_TIME
# https://github.com/mattheworiordan/capybara-screenshot/issues/164 # https://github.com/mattheworiordan/capybara-screenshot/issues/164
config.save_path = ::File.expand_path('../../tmp', __dir__) config.save_path = ::File.expand_path('../../tmp', __dir__)
# Cabybara 3 does not normalize text by default, so older tests
# fail because of unexpected line breaks and other white space
config.default_normalize_ws = true
end end
end end
......
...@@ -52,16 +52,16 @@ module QA ...@@ -52,16 +52,16 @@ module QA
Page::Project::Show.perform(&:create_new_file!) Page::Project::Show.perform(&:create_new_file!)
Page::File::Form.perform do |form| Page::File::Form.perform do |form|
form.select_template template[:file_name], template[:name] form.select_template template[:file_name], template[:name]
end
expect(page).to have_content(content[0..100]) expect(form).to have_normalized_ws_text(content[0..100])
Page::File::Form.perform(&:commit_changes) form.commit_changes
expect(page).to have_content('The file has been successfully created.') expect(form).to have_content('The file has been successfully created.')
expect(page).to have_content(template[:file_name]) expect(form).to have_content(template[:file_name])
expect(page).to have_content('Add new file') expect(form).to have_content('Add new file')
expect(page).to have_content(content[0..100]) expect(form).to have_normalized_ws_text(content[0..100])
end
end end
end end
end end
......
...@@ -54,15 +54,15 @@ module QA ...@@ -54,15 +54,15 @@ module QA
ide.create_new_file_from_template template[:file_name], template[:name] ide.create_new_file_from_template template[:file_name], template[:name]
expect(ide.has_file?(template[:file_name])).to be_truthy expect(ide.has_file?(template[:file_name])).to be_truthy
end
expect(page).to have_button('Undo') expect(ide).to have_button('Undo')
expect(page).to have_content(content[0..100]) expect(ide).to have_normalized_ws_text(content[0..100])
Page::Project::WebIDE::Edit.perform(&:commit_changes) ide.commit_changes
expect(page).to have_content(template[:file_name]) expect(ide).to have_content(template[:file_name])
expect(page).to have_content(content[0..100]) expect(ide).to have_normalized_ws_text(content[0..100])
end
end end
end end
end end
......
...@@ -50,8 +50,6 @@ describe Projects::ErrorTrackingController do ...@@ -50,8 +50,6 @@ describe Projects::ErrorTrackingController do
let(:external_url) { 'http://example.com' } let(:external_url) { 'http://example.com' }
context 'no data' do context 'no data' do
let(:params) { project_params(format: :json) }
let(:permitted_params) do let(:permitted_params) do
ActionController::Parameters.new({}).permit! ActionController::Parameters.new({}).permit!
end end
...@@ -72,11 +70,13 @@ describe Projects::ErrorTrackingController do ...@@ -72,11 +70,13 @@ describe Projects::ErrorTrackingController do
end end
end end
context 'with a search_term and sort params' do context 'with extra params' do
let(:params) { project_params(format: :json, search_term: 'something', sort: 'last_seen') } let(:cursor) { '1572959139000:0:0' }
let(:search_term) { 'something' }
let(:sort) { 'last_seen' }
let(:params) { project_params(format: :json, search_term: search_term, sort: sort, cursor: cursor) }
let(:permitted_params) do let(:permitted_params) do
ActionController::Parameters.new(search_term: 'something', sort: 'last_seen').permit! ActionController::Parameters.new(search_term: search_term, sort: sort, cursor: cursor).permit!
end end
before do before do
...@@ -88,7 +88,7 @@ describe Projects::ErrorTrackingController do ...@@ -88,7 +88,7 @@ describe Projects::ErrorTrackingController do
context 'service result is successful' do context 'service result is successful' do
before do before do
expect(list_issues_service).to receive(:execute) expect(list_issues_service).to receive(:execute)
.and_return(status: :success, issues: [error]) .and_return(status: :success, issues: [error], pagination: {})
expect(list_issues_service).to receive(:external_url) expect(list_issues_service).to receive(:external_url)
.and_return(external_url) .and_return(external_url)
end end
...@@ -100,13 +100,16 @@ describe Projects::ErrorTrackingController do ...@@ -100,13 +100,16 @@ describe Projects::ErrorTrackingController do
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('error_tracking/index') expect(response).to match_response_schema('error_tracking/index')
expect(json_response['external_url']).to eq(external_url) expect(json_response).to eq(
expect(json_response['errors']).to eq([error].as_json) 'errors' => [error].as_json,
'pagination' => {},
'external_url' => external_url
)
end end
end end
end end
context 'without params' do context 'without extra params' do
before do before do
expect(ErrorTracking::ListIssuesService) expect(ErrorTracking::ListIssuesService)
.to receive(:new).with(project, user, {}) .to receive(:new).with(project, user, {})
...@@ -116,7 +119,7 @@ describe Projects::ErrorTrackingController do ...@@ -116,7 +119,7 @@ describe Projects::ErrorTrackingController do
context 'service result is successful' do context 'service result is successful' do
before do before do
expect(list_issues_service).to receive(:execute) expect(list_issues_service).to receive(:execute)
.and_return(status: :success, issues: [error]) .and_return(status: :success, issues: [error], pagination: {})
expect(list_issues_service).to receive(:external_url) expect(list_issues_service).to receive(:external_url)
.and_return(external_url) .and_return(external_url)
end end
...@@ -128,8 +131,11 @@ describe Projects::ErrorTrackingController do ...@@ -128,8 +131,11 @@ describe Projects::ErrorTrackingController do
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('error_tracking/index') expect(response).to match_response_schema('error_tracking/index')
expect(json_response['external_url']).to eq(external_url) expect(json_response).to eq(
expect(json_response['errors']).to eq([error].as_json) 'errors' => [error].as_json,
'pagination' => {},
'external_url' => external_url
)
end end
end end
......
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
"type": "object", "type": "object",
"required": [ "required": [
"external_url", "external_url",
"pagination",
"errors" "errors"
], ],
"properties": { "properties": {
...@@ -9,6 +10,9 @@ ...@@ -9,6 +10,9 @@
"errors": { "errors": {
"type": "array", "type": "array",
"items": { "$ref": "error.json" } "items": { "$ref": "error.json" }
},
"pagination": {
"type": "object"
} }
}, },
"additionalProperties": false "additionalProperties": false
......
...@@ -20,6 +20,7 @@ describe('pipeline graph action component', () => { ...@@ -20,6 +20,7 @@ describe('pipeline graph action component', () => {
actionIcon: 'cancel', actionIcon: 'cancel',
}, },
sync: false, sync: false,
attachToDocument: true,
}); });
}); });
......
import { mount } from '@vue/test-utils'; import { shallowMount } from '@vue/test-utils';
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
import pipelineTriggerer from '~/pipelines/components/pipeline_triggerer.vue'; import pipelineTriggerer from '~/pipelines/components/pipeline_triggerer.vue';
describe('Pipelines Triggerer', () => { describe('Pipelines Triggerer', () => {
let wrapper; let wrapper;
const expectComponentWithProps = (Component, props = {}) => {
const componentWrapper = wrapper.find(Component);
expect(componentWrapper.isVisible()).toBe(true);
expect(componentWrapper.props()).toEqual(expect.objectContaining(props));
};
const mockData = { const mockData = {
pipeline: { pipeline: {
user: { user: {
...@@ -15,9 +22,10 @@ describe('Pipelines Triggerer', () => { ...@@ -15,9 +22,10 @@ describe('Pipelines Triggerer', () => {
}; };
const createComponent = () => { const createComponent = () => {
wrapper = mount(pipelineTriggerer, { wrapper = shallowMount(pipelineTriggerer, {
propsData: mockData, propsData: mockData,
sync: false, sync: false,
attachToDocument: true,
}); });
}; };
...@@ -33,14 +41,12 @@ describe('Pipelines Triggerer', () => { ...@@ -33,14 +41,12 @@ describe('Pipelines Triggerer', () => {
expect(wrapper.contains('.table-section')).toBe(true); expect(wrapper.contains('.table-section')).toBe(true);
}); });
it('should render triggerer information when triggerer is provided', () => { it('should pass triggerer information when triggerer is provided', () => {
const link = wrapper.find('.js-pipeline-url-user'); expectComponentWithProps(UserAvatarLink, {
linkHref: mockData.pipeline.user.path,
expect(link.attributes('href')).toEqual(mockData.pipeline.user.path); tooltipText: mockData.pipeline.user.name,
expect(link.find('.js-user-avatar-image-toolip').text()).toEqual(mockData.pipeline.user.name); imgSrc: mockData.pipeline.user.avatar_url,
expect(link.find('img.avatar').attributes('src')).toEqual( });
`${mockData.pipeline.user.avatar_url}?width=26`,
);
}); });
it('should render "API" when no triggerer is provided', () => { it('should render "API" when no triggerer is provided', () => {
...@@ -50,7 +56,7 @@ describe('Pipelines Triggerer', () => { ...@@ -50,7 +56,7 @@ describe('Pipelines Triggerer', () => {
}, },
}); });
wrapper.vm.$nextTick(() => { return wrapper.vm.$nextTick(() => {
expect(wrapper.find('.js-pipeline-url-api').text()).toEqual('API'); expect(wrapper.find('.js-pipeline-url-api').text()).toEqual('API');
}); });
}); });
......
...@@ -65,4 +65,26 @@ describe Gitaly::Server do ...@@ -65,4 +65,26 @@ describe Gitaly::Server do
end end
end end
end end
describe '#expected_version?' do
using RSpec::Parameterized::TableSyntax
where(:expected_version, :server_version, :result) do
'1.1.1' | '1.1.1' | true
'1.1.2' | '1.1.1' | false
'1.73.0' | '1.73.0-18-gf756ebe2' | false
'594c3ea3e0e5540e5915bd1c49713a0381459dd6' | '1.55.6-45-g594c3ea3' | true
'594c3ea3e0e5540e5915bd1c49713a0381459dd6' | '1.55.6-46-gabc123ff' | false
'594c3ea3e0e5540e5915bd1c49713a0381459dd6' | '1.55.6' | false
end
with_them do
it do
allow(Gitlab::GitalyClient).to receive(:expected_server_version).and_return(expected_version)
allow(server).to receive(:server_version).and_return(server_version)
expect(server.expected_version?).to eq(result)
end
end
end
end end
...@@ -212,44 +212,118 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -212,44 +212,118 @@ describe Gitlab::Database::MigrationHelpers do
allow(model).to receive(:transaction_open?).and_return(false) allow(model).to receive(:transaction_open?).and_return(false)
end end
it 'creates a concurrent foreign key and validates it' do context 'ON DELETE statements' do
expect(model).to receive(:disable_statement_timeout).and_call_original context 'on_delete: :nullify' do
expect(model).to receive(:execute).with(/statement_timeout/) it 'appends ON DELETE SET NULL statement' do
expect(model).to receive(:execute).ordered.with(/NOT VALID/) expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).with(/RESET ALL/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).with(/RESET ALL/)
expect(model).to receive(:execute).with(/ON DELETE SET NULL/)
model.add_concurrent_foreign_key(:projects, :users,
column: :user_id,
on_delete: :nullify)
end
end
model.add_concurrent_foreign_key(:projects, :users, column: :user_id) context 'on_delete: :cascade' do
end it 'appends ON DELETE CASCADE statement' do
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).with(/RESET ALL/)
expect(model).to receive(:execute).with(/ON DELETE CASCADE/)
model.add_concurrent_foreign_key(:projects, :users,
column: :user_id,
on_delete: :cascade)
end
end
context 'on_delete: nil' do
it 'appends no ON DELETE statement' do
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).with(/RESET ALL/)
it 'appends a valid ON DELETE statement' do expect(model).not_to receive(:execute).with(/ON DELETE/)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).with(/ON DELETE SET NULL/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).with(/RESET ALL/)
model.add_concurrent_foreign_key(:projects, :users, model.add_concurrent_foreign_key(:projects, :users,
column: :user_id, column: :user_id,
on_delete: :nullify) on_delete: nil)
end
end
end end
it 'does not create a foreign key if it exists already' do context 'when no custom key name is supplied' do
expect(model).to receive(:foreign_key_exists?).with(:projects, :users, column: :user_id).and_return(true) it 'creates a concurrent foreign key and validates it' do
expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/) expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).with(/RESET ALL/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end
it 'does not create a foreign key if it exists already' do
name = model.concurrent_foreign_key_name(:projects, :user_id)
expect(model).to receive(:foreign_key_exists?).with(:projects, :users,
column: :user_id,
on_delete: :cascade,
name: name).and_return(true)
expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
expect(model).to receive(:execute).with(/VALIDATE CONSTRAINT/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id) model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end
end end
it 'allows the use of a custom key name' do context 'when a custom key name is supplied' do
expect(model).to receive(:disable_statement_timeout).and_call_original context 'for creating a new foreign key for a column that does not presently exist' do
expect(model).to receive(:execute).with(/statement_timeout/) it 'creates a new foreign key' do
expect(model).to receive(:execute).ordered.with(/NOT VALID/) expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+foo/) expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).with(/RESET ALL/) expect(model).to receive(:execute).ordered.with(/NOT VALID/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+foo/)
expect(model).to receive(:execute).with(/RESET ALL/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :foo)
end
end
context 'for creating a duplicate foreign key for a column that presently exists' do
context 'when the supplied key name is the same as the existing foreign key name' do
it 'does not create a new foreign key' do
expect(model).to receive(:foreign_key_exists?).with(:projects, :users,
name: :foo,
on_delete: :cascade,
column: :user_id).and_return(true)
expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
expect(model).to receive(:execute).with(/VALIDATE CONSTRAINT/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :foo)
end
end
context 'when the supplied key name is different from the existing foreign key name' do
it 'creates a new foreign key' do
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+bar/)
expect(model).to receive(:execute).with(/RESET ALL/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :foo) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :bar)
end
end
end
end end
end end
end end
...@@ -266,23 +340,61 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -266,23 +340,61 @@ describe Gitlab::Database::MigrationHelpers do
describe '#foreign_key_exists?' do describe '#foreign_key_exists?' do
before do before do
key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(:projects, :users, { column: :non_standard_id }) key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(:projects, :users, { column: :non_standard_id, name: :fk_projects_users_non_standard_id, on_delete: :cascade })
allow(model).to receive(:foreign_keys).with(:projects).and_return([key]) allow(model).to receive(:foreign_keys).with(:projects).and_return([key])
end end
it 'finds existing foreign keys by column' do shared_examples_for 'foreign key checks' do
expect(model.foreign_key_exists?(:projects, :users, column: :non_standard_id)).to be_truthy it 'finds existing foreign keys by column' do
expect(model.foreign_key_exists?(:projects, target_table, column: :non_standard_id)).to be_truthy
end
it 'finds existing foreign keys by name' do
expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id)).to be_truthy
end
it 'finds existing foreign_keys by name and column' do
expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id, column: :non_standard_id)).to be_truthy
end
it 'finds existing foreign_keys by name, column and on_delete' do
expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id, column: :non_standard_id, on_delete: :cascade)).to be_truthy
end
it 'finds existing foreign keys by target table only' do
expect(model.foreign_key_exists?(:projects, target_table)).to be_truthy
end
it 'compares by column name if given' do
expect(model.foreign_key_exists?(:projects, target_table, column: :user_id)).to be_falsey
end
it 'compares by foreign key name if given' do
expect(model.foreign_key_exists?(:projects, target_table, name: :non_existent_foreign_key_name)).to be_falsey
end
it 'compares by foreign key name and column if given' do
expect(model.foreign_key_exists?(:projects, target_table, name: :non_existent_foreign_key_name, column: :non_standard_id)).to be_falsey
end
it 'compares by foreign key name, column and on_delete if given' do
expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id, column: :non_standard_id, on_delete: :nullify)).to be_falsey
end
end end
it 'finds existing foreign keys by target table only' do context 'without specifying a target table' do
expect(model.foreign_key_exists?(:projects, :users)).to be_truthy let(:target_table) { nil }
it_behaves_like 'foreign key checks'
end end
it 'compares by column name if given' do context 'specifying a target table' do
expect(model.foreign_key_exists?(:projects, :users, column: :user_id)).to be_falsey let(:target_table) { :users }
it_behaves_like 'foreign key checks'
end end
it 'compares by target if no column given' do it 'compares by target table if no column given' do
expect(model.foreign_key_exists?(:projects, :other_table)).to be_falsey expect(model.foreign_key_exists?(:projects, :other_table)).to be_falsey
end end
end end
......
...@@ -290,6 +290,7 @@ project: ...@@ -290,6 +290,7 @@ project:
- microsoft_teams_service - microsoft_teams_service
- mattermost_service - mattermost_service
- hangouts_chat_service - hangouts_chat_service
- unify_circuit_service
- buildkite_service - buildkite_service
- bamboo_service - bamboo_service
- teamcity_service - teamcity_service
......
...@@ -54,10 +54,20 @@ describe Sentry::Client do ...@@ -54,10 +54,20 @@ describe Sentry::Client do
end end
end end
shared_examples 'issues has correct return type' do |klass|
it "returns objects of type #{klass}" do
expect(subject[:issues]).to all( be_a(klass) )
end
end
shared_examples 'has correct length' do |length| shared_examples 'has correct length' do |length|
it { expect(subject.length).to eq(length) } it { expect(subject.length).to eq(length) }
end end
shared_examples 'issues has correct length' do |length|
it { expect(subject[:issues].length).to eq(length) }
end
# Requires sentry_api_request and subject to be defined # Requires sentry_api_request and subject to be defined
shared_examples 'calls sentry api' do shared_examples 'calls sentry api' do
it 'calls sentry api' do it 'calls sentry api' do
...@@ -95,26 +105,44 @@ describe Sentry::Client do ...@@ -95,26 +105,44 @@ describe Sentry::Client do
let(:issue_status) { 'unresolved' } let(:issue_status) { 'unresolved' }
let(:limit) { 20 } let(:limit) { 20 }
let(:search_term) { '' } let(:search_term) { '' }
let(:cursor) { nil }
let(:sort) { 'last_seen' }
let(:sentry_api_response) { issues_sample_response } let(:sentry_api_response) { issues_sample_response }
let(:sentry_request_url) { sentry_url + '/issues/?limit=20&query=is:unresolved' } let(:sentry_request_url) { sentry_url + '/issues/?limit=20&query=is:unresolved' }
let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response) } let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response) }
subject { client.list_issues(issue_status: issue_status, limit: limit, search_term: search_term, sort: 'last_seen') } subject { client.list_issues(issue_status: issue_status, limit: limit, search_term: search_term, sort: sort, cursor: cursor) }
it_behaves_like 'calls sentry api' it_behaves_like 'calls sentry api'
it_behaves_like 'has correct return type', Gitlab::ErrorTracking::Error it_behaves_like 'issues has correct return type', Gitlab::ErrorTracking::Error
it_behaves_like 'has correct length', 1 it_behaves_like 'issues has correct length', 1
shared_examples 'has correct external_url' do shared_examples 'has correct external_url' do
context 'external_url' do context 'external_url' do
it 'is constructed correctly' do it 'is constructed correctly' do
expect(subject[0].external_url).to eq('https://sentrytest.gitlab.com/sentry-org/sentry-project/issues/11') expect(subject[:issues][0].external_url).to eq('https://sentrytest.gitlab.com/sentry-org/sentry-project/issues/11')
end end
end end
end end
context 'when response has a pagination info' do
let(:headers) do
{
link: '<https://sentrytest.gitlab.com>; rel="previous"; results="true"; cursor="1573556671000:0:1", <https://sentrytest.gitlab.com>; rel="next"; results="true"; cursor="1572959139000:0:0"'
}
end
let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response, headers: headers) }
it 'parses the pagination' do
expect(subject[:pagination]).to eq(
'previous' => { 'cursor' => '1573556671000:0:1' },
'next' => { 'cursor' => '1572959139000:0:0' }
)
end
end
context 'error object created from sentry response' do context 'error object created from sentry response' do
using RSpec::Parameterized::TableSyntax using RSpec::Parameterized::TableSyntax
...@@ -137,7 +165,7 @@ describe Sentry::Client do ...@@ -137,7 +165,7 @@ describe Sentry::Client do
end end
with_them do with_them do
it { expect(subject[0].public_send(error_object)).to eq(sentry_api_response[0].dig(*sentry_response)) } it { expect(subject[:issues][0].public_send(error_object)).to eq(sentry_api_response[0].dig(*sentry_response)) }
end end
it_behaves_like 'has correct external_url' it_behaves_like 'has correct external_url'
...@@ -210,8 +238,8 @@ describe Sentry::Client do ...@@ -210,8 +238,8 @@ describe Sentry::Client do
it_behaves_like 'calls sentry api' it_behaves_like 'calls sentry api'
it_behaves_like 'has correct return type', Gitlab::ErrorTracking::Error it_behaves_like 'issues has correct return type', Gitlab::ErrorTracking::Error
it_behaves_like 'has correct length', 1 it_behaves_like 'issues has correct length', 1
it_behaves_like 'has correct external_url' it_behaves_like 'has correct external_url'
end end
...@@ -240,13 +268,23 @@ describe Sentry::Client do ...@@ -240,13 +268,23 @@ describe Sentry::Client do
it_behaves_like 'maps exceptions' it_behaves_like 'maps exceptions'
context 'when search term is present' do context 'when search term is present' do
let(:search_term) { 'NoMethodError'} let(:search_term) { 'NoMethodError' }
let(:sentry_request_url) { "#{sentry_url}/issues/?limit=20&query=is:unresolved NoMethodError" } let(:sentry_request_url) { "#{sentry_url}/issues/?limit=20&query=is:unresolved NoMethodError" }
it_behaves_like 'calls sentry api' it_behaves_like 'calls sentry api'
it_behaves_like 'has correct return type', Gitlab::ErrorTracking::Error it_behaves_like 'issues has correct return type', Gitlab::ErrorTracking::Error
it_behaves_like 'has correct length', 1 it_behaves_like 'issues has correct length', 1
end
context 'when cursor is present' do
let(:cursor) { '1572959139000:0:0' }
let(:sentry_request_url) { "#{sentry_url}/issues/?limit=20&cursor=#{cursor}&query=is:unresolved" }
it_behaves_like 'calls sentry api'
it_behaves_like 'issues has correct return type', Gitlab::ErrorTracking::Error
it_behaves_like 'issues has correct length', 1
end end
end end
......
# frozen_string_literal: true
require 'fast_spec_helper'
require 'support/helpers/fixture_helpers'
describe Sentry::PaginationParser do
include FixtureHelpers
describe '.parse' do
subject { described_class.parse(headers) }
context 'when headers do not have "link" param' do
let(:headers) { {} }
it 'returns empty hash' do
is_expected.to eq({})
end
end
context 'when headers.link has previous and next pages' do
let(:headers) do
{
'link' => '<https://sentrytest.gitlab.com>; rel="previous"; results="true"; cursor="1573556671000:0:1", <https://sentrytest.gitlab.com>; rel="next"; results="true"; cursor="1572959139000:0:0"'
}
end
it 'returns info about both pages' do
is_expected.to eq(
'previous' => { 'cursor' => '1573556671000:0:1' },
'next' => { 'cursor' => '1572959139000:0:0' }
)
end
end
context 'when headers.link has only next page' do
let(:headers) do
{
'link' => '<https://sentrytest.gitlab.com>; rel="previous"; results="false"; cursor="1573556671000:0:1", <https://sentrytest.gitlab.com>; rel="next"; results="true"; cursor="1572959139000:0:0"'
}
end
it 'returns only info about the next page' do
is_expected.to eq(
'next' => { 'cursor' => '1572959139000:0:0' }
)
end
end
context 'when headers.link has only previous page' do
let(:headers) do
{
'link' => '<https://sentrytest.gitlab.com>; rel="previous"; results="true"; cursor="1573556671000:0:1", <https://sentrytest.gitlab.com>; rel="next"; results="false"; cursor="1572959139000:0:0"'
}
end
it 'returns only info about the previous page' do
is_expected.to eq(
'previous' => { 'cursor' => '1573556671000:0:1' }
)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20191125114345_add_admin_mode_protected_path.rb')
describe AddAdminModeProtectedPath, :migration do
ADMIN_MODE_ENDPOINT = '/admin/session'
subject(:migration) { described_class.new }
let(:application_settings) { table(:application_settings) }
context 'no settings available' do
it 'makes no changes' do
expect { migrate! }.not_to change { application_settings.count }
end
end
context 'protected_paths is null' do
before do
application_settings.create!(protected_paths: nil)
end
it 'makes no changes' do
expect { migrate! }.not_to change { application_settings.first.protected_paths }
end
end
it 'appends admin mode endpoint' do
application_settings.create!(protected_paths: '{a,b,c}')
protected_paths_before = %w[a b c]
protected_paths_after = protected_paths_before.dup << ADMIN_MODE_ENDPOINT
expect { migrate! }.to change { application_settings.first.protected_paths }.from(protected_paths_before).to(protected_paths_after)
end
it 'new default includes admin mode endpoint' do
settings_before = application_settings.create!
expect(settings_before.protected_paths).not_to include(ADMIN_MODE_ENDPOINT)
migrate!
application_settings.reset_column_information
settings_after = application_settings.create!
expect(settings_after.protected_paths).to include(ADMIN_MODE_ENDPOINT)
end
end
...@@ -66,6 +66,8 @@ describe ApplicationSetting do ...@@ -66,6 +66,8 @@ describe ApplicationSetting do
it { is_expected.not_to allow_value('three').for(:push_event_activities_limit) } it { is_expected.not_to allow_value('three').for(:push_event_activities_limit) }
it { is_expected.not_to allow_value(nil).for(:push_event_activities_limit) } it { is_expected.not_to allow_value(nil).for(:push_event_activities_limit) }
it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
context 'when snowplow is enabled' do context 'when snowplow is enabled' do
before do before do
setting.snowplow_enabled = true setting.snowplow_enabled = true
......
...@@ -153,9 +153,9 @@ describe ErrorTracking::ProjectErrorTrackingSetting do ...@@ -153,9 +153,9 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
it 'returns cached issues' do it 'returns cached issues' do
expect(sentry_client).to receive(:list_issues).with(opts) expect(sentry_client).to receive(:list_issues).with(opts)
.and_return(issues) .and_return(issues: issues, pagination: {})
expect(result).to eq(issues: issues) expect(result).to eq(issues: issues, pagination: {})
end end
end end
......
# frozen_string_literal: true
require "spec_helper"
describe UnifyCircuitService do
it_behaves_like "chat service", "Unify Circuit" do
let(:client_arguments) { webhook_url }
let(:content_key) { :subject }
end
end
...@@ -33,6 +33,7 @@ describe Project do ...@@ -33,6 +33,7 @@ describe Project do
it { is_expected.to have_one(:microsoft_teams_service) } it { is_expected.to have_one(:microsoft_teams_service) }
it { is_expected.to have_one(:mattermost_service) } it { is_expected.to have_one(:mattermost_service) }
it { is_expected.to have_one(:hangouts_chat_service) } it { is_expected.to have_one(:hangouts_chat_service) }
it { is_expected.to have_one(:unify_circuit_service) }
it { is_expected.to have_one(:packagist_service) } it { is_expected.to have_one(:packagist_service) }
it { is_expected.to have_one(:pushover_service) } it { is_expected.to have_one(:pushover_service) }
it { is_expected.to have_one(:asana_service) } it { is_expected.to have_one(:asana_service) }
......
...@@ -31,6 +31,62 @@ describe Snippet do ...@@ -31,6 +31,62 @@ describe Snippet do
it { is_expected.to validate_presence_of(:content) } it { is_expected.to validate_presence_of(:content) }
it { is_expected.to validate_inclusion_of(:visibility_level).in_array(Gitlab::VisibilityLevel.values) } it { is_expected.to validate_inclusion_of(:visibility_level).in_array(Gitlab::VisibilityLevel.values) }
it do
allow(Gitlab::CurrentSettings).to receive(:snippet_size_limit).and_return(1)
is_expected
.to validate_length_of(:content)
.is_at_most(Gitlab::CurrentSettings.snippet_size_limit)
.with_message("is too long (2 Bytes). The maximum size is 1 Byte.")
end
context 'content validations' do
context 'with existing snippets' do
let(:snippet) { create(:personal_snippet, content: 'This is a valid content at the time of creation') }
before do
expect(snippet).to be_valid
stub_application_setting(snippet_size_limit: 2)
end
it 'does not raise a validation error if the content is not changed' do
snippet.title = 'new title'
expect(snippet).to be_valid
end
it 'raises and error if the content is changed and the size is bigger than limit' do
snippet.content = snippet.content + "test"
expect(snippet).not_to be_valid
end
end
context 'with new snippets' do
let(:limit) { 15 }
before do
stub_application_setting(snippet_size_limit: limit)
end
it 'is valid when content is smaller than the limit' do
snippet = build(:personal_snippet, content: 'Valid Content')
expect(snippet).to be_valid
end
it 'raises error when content is bigger than setting limit' do
snippet = build(:personal_snippet, content: 'This is an invalid content')
aggregate_failures do
expect(snippet).not_to be_valid
expect(snippet.errors[:content]).to include("is too long (#{snippet.content.size} Bytes). The maximum size is #{limit} Bytes.")
end
end
end
end
end end
describe '#to_reference' do describe '#to_reference' do
......
...@@ -36,6 +36,7 @@ describe API::Settings, 'Settings' do ...@@ -36,6 +36,7 @@ describe API::Settings, 'Settings' do
expect(json_response['allow_local_requests_from_system_hooks']).to be(true) expect(json_response['allow_local_requests_from_system_hooks']).to be(true)
expect(json_response).not_to have_key('performance_bar_allowed_group_path') expect(json_response).not_to have_key('performance_bar_allowed_group_path')
expect(json_response).not_to have_key('performance_bar_enabled') expect(json_response).not_to have_key('performance_bar_enabled')
expect(json_response['snippet_size_limit']).to eq(50.megabytes)
end end
end end
...@@ -85,7 +86,8 @@ describe API::Settings, 'Settings' do ...@@ -85,7 +86,8 @@ describe API::Settings, 'Settings' do
allow_local_requests_from_web_hooks_and_services: true, allow_local_requests_from_web_hooks_and_services: true,
allow_local_requests_from_system_hooks: false, allow_local_requests_from_system_hooks: false,
push_event_hooks_limit: 2, push_event_hooks_limit: 2,
push_event_activities_limit: 2 push_event_activities_limit: 2,
snippet_size_limit: 5
} }
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
...@@ -121,6 +123,7 @@ describe API::Settings, 'Settings' do ...@@ -121,6 +123,7 @@ describe API::Settings, 'Settings' do
expect(json_response['allow_local_requests_from_system_hooks']).to eq(false) expect(json_response['allow_local_requests_from_system_hooks']).to eq(false)
expect(json_response['push_event_hooks_limit']).to eq(2) expect(json_response['push_event_hooks_limit']).to eq(2)
expect(json_response['push_event_activities_limit']).to eq(2) expect(json_response['push_event_activities_limit']).to eq(2)
expect(json_response['snippet_size_limit']).to eq(5)
end end
end end
......
...@@ -5,13 +5,14 @@ require 'spec_helper' ...@@ -5,13 +5,14 @@ require 'spec_helper'
describe ErrorTracking::ListIssuesService do describe ErrorTracking::ListIssuesService do
set(:user) { create(:user) } set(:user) { create(:user) }
set(:project) { create(:project) } set(:project) { create(:project) }
let(:params) { { search_term: 'something', sort: 'last_seen' } } let(:params) { { search_term: 'something', sort: 'last_seen', cursor: 'some-cursor' } }
let(:list_sentry_issues_args) do let(:list_sentry_issues_args) do
{ {
issue_status: 'unresolved', issue_status: 'unresolved',
limit: 20, limit: 20,
search_term: params[:search_term], search_term: 'something',
sort: params[:sort] sort: 'last_seen',
cursor: 'some-cursor'
} }
end end
...@@ -40,11 +41,11 @@ describe ErrorTracking::ListIssuesService do ...@@ -40,11 +41,11 @@ describe ErrorTracking::ListIssuesService do
expect(error_tracking_setting) expect(error_tracking_setting)
.to receive(:list_sentry_issues) .to receive(:list_sentry_issues)
.with(list_sentry_issues_args) .with(list_sentry_issues_args)
.and_return(issues: issues) .and_return(issues: issues, pagination: {})
end end
it 'returns the issues' do it 'returns the issues' do
expect(result).to eq(status: :success, issues: issues) expect(result).to eq(status: :success, pagination: {}, issues: issues)
end end
end end
......
...@@ -80,7 +80,7 @@ shared_examples_for "chat service" do |service_name| ...@@ -80,7 +80,7 @@ shared_examples_for "chat service" do |service_name|
it_behaves_like "triggered #{service_name} service" it_behaves_like "triggered #{service_name} service"
it "specifies the webhook when it is configured" do it "specifies the webhook when it is configured", if: defined?(client) do
expect(client).to receive(:new).with(client_arguments).and_return(double(:chat_service).as_null_object) expect(client).to receive(:new).with(client_arguments).and_return(double(:chat_service).as_null_object)
subject.execute(sample_data) subject.execute(sample_data)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment