Commit 2c72daf2 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 4e9acbfb
<script>
import { GlToggle, GlFormGroup, GlFormTextarea, GlButton } from '@gitlab/ui';
export default {
name: 'MaintenanceModeSettingsApp',
components: {
GlToggle,
GlFormGroup,
GlFormTextarea,
GlButton,
},
data() {
return {
inMaintenanceMode: false,
bannerMessage: '',
};
},
};
</script>
<template>
<article>
<div class="d-flex align-items-center mb-3">
<gl-toggle v-model="inMaintenanceMode" class="mb-0" />
<div class="ml-2">
<p class="mb-0">{{ __('Enable maintenance mode') }}</p>
<p class="mb-0 text-secondary-500">
{{
__('Non-admin users can sign in with read-only access and make read-only API requests.')
}}
</p>
</div>
</div>
<gl-form-group label="Banner Message" label-for="maintenanceBannerMessage">
<gl-form-textarea
id="maintenanceBannerMessage"
v-model="bannerMessage"
:placeholder="__(`GitLab is undergoing maintenance and is operating in a read-only mode.`)"
/>
</gl-form-group>
<div class="mt-4">
<gl-button variant="success">{{ __('Save changes') }}</gl-button>
</div>
</article>
</template>
import Vue from 'vue';
import Translate from '~/vue_shared/translate';
import MaintenanceModeSettingsApp from './components/app.vue';
Vue.use(Translate);
export default () => {
const el = document.getElementById('js-maintenance-mode-settings');
return new Vue({
el,
components: {
MaintenanceModeSettingsApp,
},
render(createElement) {
return createElement('maintenance-mode-settings-app');
},
});
};
import initSettingsPanels from '~/settings_panels';
import projectSelect from '~/project_select';
import selfMonitor from '~/self_monitor';
import maintenanceModeSettings from '~/maintenance_mode_settings';
document.addEventListener('DOMContentLoaded', () => {
selfMonitor();
maintenanceModeSettings();
// Initialize expandable settings panels
initSettingsPanels();
projectSelect();
......
......@@ -37,7 +37,8 @@ export default {
text() {
return sprintf(
s__(`Labels|Promoting %{labelTitle} will make it available for all projects inside %{groupName}.
Existing project labels with the same title will be merged. This action cannot be reversed.`),
Existing project labels with the same title will be merged. If a group label with the same title exists,
it will also be merged. This action cannot be reversed.`),
{
labelTitle: this.labelTitle,
groupName: this.groupName,
......
......@@ -483,6 +483,34 @@ img.emoji {
}
/** COMMON SPACING CLASSES **/
/**
🚨 Do not use these classes — they are deprecated and being removed. 🚨
See https://gitlab.com/gitlab-org/gitlab/issues/36857 for more details.
Instead, if you need a spacing class, add it below using the following values.
$gl-spacing-scale-0: 0;
$gl-spacing-scale-1: 2px;
$gl-spacing-scale-2: 4px;
$gl-spacing-scale-3: 8px;
$gl-spacing-scale-4: 12px;
$gl-spacing-scale-5: 16px;
$gl-spacing-scale-6: 24px;
$gl-spacing-scale-7: 32px;
$gl-spacing-scale-8: 40px;
$gl-spacing-scale-9: 48px;
$gl-spacing-scale-10: 56px;
$gl-spacing-scale-11: 64px;
$gl-spacing-scale-12: 80px;
$gl-spacing-scale-13: 96px;
E.g., a padding top of 96px can be added using:
.gl-shim-pt-13 {
padding-top: 96px;
}
Please use -shim- so it can be differentiated from the old scale classes.
These will be replaced when the Gitlab UI utilities are included.
**/
@each $index, $padding in $spacing-scale {
#{'.gl-p-#{$index}'} { padding: $padding; }
#{'.gl-pl-#{$index}'} { padding-left: $padding; }
......@@ -583,13 +611,11 @@ img.emoji {
.gl-font-size-large { font-size: $gl-font-size-large; }
.gl-line-height-24 { line-height: $gl-line-height-24; }
.gl-line-height-14 { line-height: $gl-line-height-14; }
.gl-font-size-0 { font-size: 0; }
.gl-font-size-12 { font-size: $gl-font-size-12; }
.gl-font-size-14 { font-size: $gl-font-size-14; }
.gl-font-size-16 { font-size: $gl-font-size-16; }
.gl-font-size-20 { font-size: $gl-font-size-20; }
.gl-font-size-28 { font-size: $gl-font-size-28; }
.gl-font-size-42 { font-size: $gl-font-size-42; }
......@@ -599,3 +625,10 @@ img.emoji {
border-top: 1px solid $border-color;
}
/**
🚨 Do not use these classes — they clash with the Gitlab UI design system and will be removed. 🚨
See https://gitlab.com/gitlab-org/gitlab/issues/36857 for more details.
**/
.gl-line-height-14 { line-height: $gl-line-height-14; }
.gl-font-size-20 { font-size: $gl-font-size-20; }
......@@ -11,6 +11,38 @@ $default-transition-duration: 0.15s;
$contextual-sidebar-width: 220px;
$contextual-sidebar-collapsed-width: 50px;
$toggle-sidebar-height: 48px;
/**
🚨 Do not use this spacing scale — it is deprecated and being removed. 🚨
See https://gitlab.com/gitlab-org/gitlab/issues/36857 for more details.
Instead, if you need a spacing class, add it to app/assets/stylesheets/framework/common.scss,
using the following values.
$gl-spacing-scale-0: 0;
$gl-spacing-scale-1: 2px;
$gl-spacing-scale-2: 4px;
$gl-spacing-scale-3: 8px;
$gl-spacing-scale-4: 12px;
$gl-spacing-scale-5: 16px;
$gl-spacing-scale-6: 24px;
$gl-spacing-scale-7: 32px;
$gl-spacing-scale-8: 40px;
$gl-spacing-scale-9: 48px;
$gl-spacing-scale-10: 56px;
$gl-spacing-scale-11: 64px;
$gl-spacing-scale-12: 80px;
$gl-spacing-scale-13: 96px;
E.g., a padding top of 96px can be added using:
.gl-shim-pt-13 {
padding-top: 96px;
}
Please use -shim- so it can be differentiated from the old scale classes.
These will be replaced when the Gitlab UI utilities are included.
**/
$spacing-scale: (
0: 0,
1: #{0.5 * $grid-size},
......
......@@ -67,7 +67,8 @@
.gl-bg-purple-light { background-color: $purple-light; }
// Classes using mixins coming from @gitlab-ui
// can be removed once https://gitlab.com/gitlab-org/gitlab/merge_requests/19021 has been merged
// can be removed once the mixins are added.
// See https://gitlab.com/gitlab-org/gitlab/issues/36857 for more details.
.gl-bg-blue-50 { @include gl-bg-blue-50; }
.gl-bg-red-100 { @include gl-bg-red-100; }
.gl-bg-orange-100 { @include gl-bg-orange-100; }
......
......@@ -8,16 +8,16 @@ module SendsBlob
include SendFileUpload
end
def send_blob(repository, blob, params = {})
def send_blob(repository, blob, inline: true, allow_caching: false)
if blob
headers['X-Content-Type-Options'] = 'nosniff'
return if cached_blob?(blob)
return if cached_blob?(blob, allow_caching: allow_caching)
if blob.stored_externally?
send_lfs_object(blob)
send_lfs_object(blob, repository.project)
else
send_git_blob(repository, blob, params)
send_git_blob(repository, blob, inline: inline)
end
else
render_404
......@@ -26,11 +26,11 @@ module SendsBlob
private
def cached_blob?(blob)
def cached_blob?(blob, allow_caching: false)
stale = stale?(etag: blob.id) # The #stale? method sets cache headers.
# Because we are opinionated we set the cache headers ourselves.
response.cache_control[:public] = project.public?
response.cache_control[:public] = allow_caching
response.cache_control[:max_age] =
if @ref && @commit && @ref == @commit.id # rubocop:disable Gitlab/ModuleWithInstanceVariables
......@@ -48,7 +48,7 @@ module SendsBlob
!stale
end
def send_lfs_object(blob)
def send_lfs_object(blob, project)
lfs_object = find_lfs_object(blob)
if lfs_object && lfs_object.project_allowed_access?(project)
......
......@@ -2,6 +2,7 @@
module SnippetsActions
extend ActiveSupport::Concern
include SendsBlob
def edit
# We need to load some info from the existing blob
......@@ -12,16 +13,26 @@ module SnippetsActions
end
def raw
disposition = params[:inline] == 'false' ? 'attachment' : 'inline'
workhorse_set_content_type!
# Until we don't migrate all snippets to version
# snippets we need to support old `SnippetBlob`
# blobs
if defined?(blob.snippet)
send_data(
convert_line_endings(blob.data),
type: 'text/plain; charset=utf-8',
disposition: disposition,
disposition: content_disposition,
filename: Snippet.sanitized_file_name(blob.name)
)
else
send_blob(
snippet.repository,
blob,
inline: content_disposition == 'inline',
allow_caching: snippet.public?
)
end
end
def js_request?
......@@ -30,6 +41,10 @@ module SnippetsActions
private
def content_disposition
@disposition ||= params[:inline] == 'false' ? 'attachment' : 'inline'
end
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def blob
return unless snippet
......
......@@ -8,7 +8,7 @@ class Projects::AvatarsController < Projects::ApplicationController
def show
@blob = @repository.blob_at_branch(@repository.root_ref, @project.avatar_in_git)
send_blob(@repository, @blob)
send_blob(@repository, @blob, allow_caching: @project.public?)
end
def destroy
......
......@@ -16,9 +16,8 @@ module Projects
end
def import
import_state = @project.import_state || @project.create_import_state
schedule_import(jira_import_params) unless import_state.in_progress?
response = ::JiraImport::StartImportService.new(current_user, @project, jira_import_params[:jira_project_key]).execute
flash[:notice] = response.message if response.message.present?
redirect_to project_import_jira_path(@project)
end
......@@ -39,21 +38,6 @@ module Projects
redirect_to project_issues_path(@project)
end
def schedule_import(params)
import_data = @project.create_or_update_import_data(data: {}).becomes(JiraImportData)
jira_project_details = JiraImportData::JiraProjectDetails.new(
params[:jira_project_key],
Time.now.strftime('%Y-%m-%d %H:%M:%S'),
{ user_id: current_user.id, name: current_user.name }
)
import_data << jira_project_details
import_data.force_import!
@project.import_type = 'jira'
@project.import_state.schedule if @project.save
end
def jira_import_params
params.permit(:jira_project_key)
end
......
......@@ -17,7 +17,7 @@ class Projects::RawController < Projects::ApplicationController
def show
@blob = @repository.blob_at(@commit.id, @path)
send_blob(@repository, @blob, inline: (params[:inline] != 'false'))
send_blob(@repository, @blob, inline: (params[:inline] != 'false'), allow_caching: @project.public?)
end
private
......
......@@ -45,7 +45,7 @@ class Projects::WikisController < Projects::ApplicationController
render 'show'
elsif file_blob
send_blob(@project_wiki.repository, file_blob)
send_blob(@project_wiki.repository, file_blob, allow_caching: @project.public?)
elsif show_create_form?
# Assign a title to the WikiPage unless `id` is a randomly generated slug from #new
title = params[:id] unless params[:random_title].present?
......
......@@ -51,6 +51,9 @@ module Types
description: "Timestamp of the note's resolution"
field :position, Types::Notes::DiffPositionType, null: true,
description: 'The position of this note on a diff'
field :confidential, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if this note is confidential',
method: :confidential?
end
end
end
......@@ -70,6 +70,16 @@ module Ci
joins(:runner_projects).where(ci_runner_projects: { project_id: project_id })
}
scope :belonging_to_group, -> (group_id, include_ancestors: false) {
groups = ::Group.where(id: group_id)
if include_ancestors
groups = Gitlab::ObjectHierarchy.new(groups).base_and_ancestors
end
joins(:runner_namespaces).where(ci_runner_namespaces: { namespace_id: groups })
}
scope :belonging_to_parent_group_of_project, -> (project_id) {
project_groups = ::Group.joins(:projects).where(projects: { id: project_id })
hierarchy_groups = Gitlab::ObjectHierarchy.new(project_groups).base_and_ancestors
......
......@@ -48,6 +48,7 @@ class DiscussionEntity < Grape::Entity
expose :for_commit?, as: :for_commit
expose :commit_id
expose :confidential?, as: :confidential
private
......
# frozen_string_literal: true
module JiraImport
class StartImportService
attr_reader :user, :project, :jira_project_key
def initialize(user, project, jira_project_key)
@user = user
@project = project
@jira_project_key = jira_project_key
end
def execute
validation_response = validate
return validation_response if validation_response&.error?
create_and_schedule_import
end
private
def create_and_schedule_import
import_data = project.create_or_update_import_data(data: {}).becomes(JiraImportData)
jira_project_details = JiraImportData::JiraProjectDetails.new(
jira_project_key,
Time.now.strftime('%Y-%m-%d %H:%M:%S'),
{ user_id: user.id, name: user.name }
)
import_data << jira_project_details
import_data.force_import!
project.import_type = 'jira'
project.import_state.schedule if project.save!
ServiceResponse.success(payload: { import_data: import_data } )
rescue => ex
# in case project.save! raises an erorr
Gitlab::ErrorTracking.track_exception(ex, project_id: project.id)
build_error_response(ex.message)
end
def validate
return build_error_response(_('Jira import feature is disabled.')) unless Feature.enabled?(:jira_issue_import, project)
return build_error_response(_('You do not have permissions to run the import.')) unless user.can?(:admin_project, project)
return build_error_response(_('Jira integration not configured.')) unless project.jira_service&.active?
return build_error_response(_('Unable to find Jira project to import data from.')) if jira_project_key.blank?
return build_error_response(_('Jira import is already running.')) if import_in_progress?
end
def build_error_response(message)
import_data = JiraImportData.new(project: project)
import_data.errors.add(:base, message)
ServiceResponse.error(
message: import_data.errors.full_messages.to_sentence,
http_status: 400,
payload: { import_data: import_data }
)
end
def import_in_progress?
import_state = project.import_state || project.create_import_state
import_state.in_progress?
end
end
end
......@@ -36,8 +36,8 @@ module PagesDomains
when 'valid'
save_certificate(acme_order.private_key, api_order)
acme_order.destroy!
# when 'invalid'
# TODO: implement error handling
when 'invalid'
save_order_error(acme_order, api_order)
end
end
......@@ -47,5 +47,28 @@ module PagesDomains
certificate = api_order.certificate
pages_domain.update!(gitlab_provided_key: private_key, gitlab_provided_certificate: certificate)
end
def save_order_error(acme_order, api_order)
log_error(api_order)
return unless Feature.enabled?(:pages_letsencrypt_errors, pages_domain.project)
pages_domain.assign_attributes(auto_ssl_failed: true)
pages_domain.save!(validate: false)
acme_order.destroy!
end
def log_error(api_order)
Gitlab::AppLogger.error(
message: "Failed to obtain Let's Encrypt certificate",
acme_error: api_order.challenge_error,
project_id: pages_domain.project_id,
pages_domain: pages_domain.domain
)
rescue => e
# getting authorizations is an additional network request which can raise errors
Gitlab::ErrorTracking.track_exception(e)
end
end
end
......@@ -104,6 +104,18 @@
= f.submit _('Save changes'), class: "btn btn-success"
- if Feature.enabled?(:maintenance_mode)
%section.settings.no-animate#js-maintenance-mode-toggle{ class: ('expanded' if expanded_by_default?) }
.settings-header
%h4
= _('Maintenance mode')
%button.btn.btn-default.js-settings-toggle{ type: 'button' }
= expanded_by_default? ? _('Collapse') : _('Expand')
%p
= _('Prevent users from performing write operations on GitLab while performing maintenance.')
.settings-content
#js-maintenance-mode-settings
- if Feature.enabled?(:instance_level_integrations)
= render_if_exists 'admin/application_settings/elasticsearch_form'
= render 'admin/application_settings/plantuml'
......
......@@ -10,6 +10,11 @@ class PagesDomainSslRenewalCronWorker # rubocop:disable Scalability/IdempotentWo
return unless ::Gitlab::LetsEncrypt.enabled?
PagesDomain.need_auto_ssl_renewal.with_logging_info.find_each do |domain|
# Ideally that should be handled in PagesDomain.need_auto_ssl_renewal scope
# but it's hard to make scope work with feature flags
# once we remove feature flag we can modify scope to implement this behaviour
next if Feature.enabled?(:pages_letsencrypt_errors, domain.project) && domain.auto_ssl_failed
with_context(project: domain.project) do
PagesDomainSslRenewalWorker.perform_async(domain.id)
end
......
---
title: Improve message when promoting project labels
merge_request: 28265
author:
type: other
---
title: Render snippet repository blobs
merge_request: 28085
author:
type: changed
---
title: Add API endpoint to list runners for a group
merge_request: 26328
author:
type: added
---
title: Migrate the database to activate projects prometheus service integration for
projects with prometheus installed on shared k8s cluster.
merge_request: 24684
author:
type: fixed
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class FixProjectsWithoutPrometheusService < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 50_000
MIGRATION = 'FixProjectsWithoutPrometheusService'
disable_ddl_transaction!
class Project < ActiveRecord::Base
include EachBatch
end
def up
queue_background_migration_jobs_by_range_at_intervals(Project, MIGRATION, 2.minutes, batch_size: BATCH_SIZE)
end
def down
# no-op
end
end
......@@ -12747,6 +12747,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200219184219
20200219193058
20200219193117
20200220115023
20200220180944
20200221023320
20200221074028
......
......@@ -5335,6 +5335,11 @@ type Note {
"""
bodyHtml: String
"""
Indicates if this note is confidential
"""
confidential: Boolean
"""
Timestamp of the note creation
"""
......@@ -6433,6 +6438,12 @@ type Project {
state: [VulnerabilityState!]
): VulnerabilityConnection
"""
Counts for each severity of vulnerability of the project. Available only when
feature flag `first_class_vulnerabilities` is enabled
"""
vulnerabilitySeveritiesCount: VulnerabilitySeveritiesCount
"""
Web URL of the project
"""
......@@ -9110,6 +9121,41 @@ enum VulnerabilityReportType {
SAST
}
"""
Represents vulnerability counts by severity
"""
type VulnerabilitySeveritiesCount {
"""
Number of vulnerabilities of CRITICAL severity of the project
"""
critical: Int
"""
Number of vulnerabilities of HIGH severity of the project
"""
high: Int
"""
Number of vulnerabilities of INFO severity of the project
"""
info: Int
"""
Number of vulnerabilities of LOW severity of the project
"""
low: Int
"""
Number of vulnerabilities of MEDIUM severity of the project
"""
medium: Int
"""
Number of vulnerabilities of UNKNOWN severity of the project
"""
unknown: Int
}
"""
The severity of the vulnerability.
"""
......
......@@ -16149,6 +16149,20 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "confidential",
"description": "Indicates if this note is confidential",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "createdAt",
"description": "Timestamp of the note creation",
......@@ -19171,6 +19185,20 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "vulnerabilitySeveritiesCount",
"description": "Counts for each severity of vulnerability of the project. Available only when feature flag `first_class_vulnerabilities` is enabled",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "VulnerabilitySeveritiesCount",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "webUrl",
"description": "Web URL of the project",
......@@ -27431,6 +27459,103 @@
],
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "VulnerabilitySeveritiesCount",
"description": "Represents vulnerability counts by severity",
"fields": [
{
"name": "critical",
"description": "Number of vulnerabilities of CRITICAL severity of the project",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "high",
"description": "Number of vulnerabilities of HIGH severity of the project",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "info",
"description": "Number of vulnerabilities of INFO severity of the project",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "low",
"description": "Number of vulnerabilities of LOW severity of the project",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "medium",
"description": "Number of vulnerabilities of MEDIUM severity of the project",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "unknown",
"description": "Number of vulnerabilities of UNKNOWN severity of the project",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "ENUM",
"name": "VulnerabilitySeverity",
......
......@@ -810,6 +810,7 @@ Represents a milestone.
| `author` | User! | User who wrote this note |
| `body` | String! | Content of the note |
| `bodyHtml` | String | The GitLab Flavored Markdown rendering of `note` |
| `confidential` | Boolean | Indicates if this note is confidential |
| `createdAt` | Time! | Timestamp of the note creation |
| `discussion` | Discussion | The discussion this note is a part of |
| `id` | ID! | ID of the note |
......@@ -925,6 +926,7 @@ Information about pagination in a connection.
| `tagList` | String | List of project tags |
| `userPermissions` | ProjectPermissions! | Permissions for the current user on the resource |
| `visibility` | String | Visibility of the project |
| `vulnerabilitySeveritiesCount` | VulnerabilitySeveritiesCount | Counts for each severity of vulnerability of the project. Available only when feature flag `first_class_vulnerabilities` is enabled |
| `webUrl` | String | Web URL of the project |
| `wikiEnabled` | Boolean | (deprecated) Does this project have wiki enabled?. Use `wiki_access_level` instead |
......@@ -1442,3 +1444,16 @@ Represents a vulnerability.
| `state` | VulnerabilityState | State of the vulnerability (DETECTED, DISMISSED, RESOLVED, CONFIRMED) |
| `title` | String | Title of the vulnerability |
| `vulnerabilityPath` | String | URL to the vulnerability's details page |
## VulnerabilitySeveritiesCount
Represents vulnerability counts by severity
| Name | Type | Description |
| --- | ---- | ---------- |
| `critical` | Int | Number of vulnerabilities of CRITICAL severity of the project |
| `high` | Int | Number of vulnerabilities of HIGH severity of the project |
| `info` | Int | Number of vulnerabilities of INFO severity of the project |
| `low` | Int | Number of vulnerabilities of LOW severity of the project |
| `medium` | Int | Number of vulnerabilities of MEDIUM severity of the project |
| `unknown` | Int | Number of vulnerabilities of UNKNOWN severity of the project |
......@@ -54,7 +54,8 @@ GET /projects/:id/issues/:issue_iid/notes?sort=asc&order_by=updated_at
"noteable_id": 377,
"noteable_type": "Issue",
"noteable_iid": 377,
"resolvable": false
"resolvable": false,
"confidential": false
},
{
"id": 305,
......@@ -74,7 +75,8 @@ GET /projects/:id/issues/:issue_iid/notes?sort=asc&order_by=updated_at
"noteable_id": 121,
"noteable_type": "Issue",
"noteable_iid": 121,
"resolvable": false
"resolvable": false,
"confidential": true
}
]
```
......@@ -332,7 +334,8 @@ Parameters:
"noteable_id": 2,
"noteable_type": "MergeRequest",
"noteable_iid": 2,
"resolvable": false
"resolvable": false,
"confidential": false
}
```
......@@ -449,7 +452,8 @@ Parameters:
},
"expires_at": null,
"updated_at": "2013-10-02T07:34:20Z",
"created_at": "2013-10-02T07:34:20Z"
"created_at": "2013-10-02T07:34:20Z",
"confidential": false
}
```
......
......@@ -471,6 +471,66 @@ DELETE /projects/:id/runners/:runner_id
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/9/runners/9"
```
## List group's runners
List all runners (specific and shared) available in the group as well it's ancestor groups.
Shared runners are listed if at least one shared runner is defined.
```plaintext
GET /groups/:id/runners
GET /groups/:id/runners?type=group_type
GET /groups/:id/runners?status=active
GET /groups/:id/runners?tag_list=tag1,tag2
```
| Attribute | Type | Required | Description |
|------------|----------------|----------|---------------------|
| `id` | integer | yes | The ID of the group owned by the authenticated user |
| `type` | string | no | The type of runners to show, one of: `instance_type`, `group_type`, `project_type` |
| `status` | string | no | The status of runners to show, one of: `active`, `paused`, `online`, `offline` |
| `tag_list` | string array | no | List of of the runner's tags |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/9/runners"
```
Example response:
```json
[
{
"id": 3,
"description": "Shared",
"ip_address": "127.0.0.1",
"active": true,
"is_shared": true,
"name": "gitlab-runner",
"online": null,
"status": "not_connected"
},
{
"id": 6,
"description": "Test",
"ip_address": "127.0.0.1",
"active": true,
"is_shared": true,
"name": "gitlab-runner",
"online": false,
"status": "offline"
},
{
"id": 8,
"description": "Test 2",
"ip_address": "127.0.0.1",
"active": true,
"is_shared": false,
"name": "gitlab-runner",
"online": null,
"status": "not_connected"
}
]
```
## Register a new Runner
Register a new Runner for the instance.
......
......@@ -74,9 +74,14 @@ and selecting **Delete**.
#### Promote a project label to a group label
If you previously created a project label and now want to make it available for other
projects within the same group, you can promote it to a group label. If other projects
in the same group have a label with the same name, they will all be overwritten by
the new group label.
projects within the same group, you can promote it to a group label.
If other projects in the same group have a label with the same title, they will all be
merged with the new group label. If a group label with the same title exists, it will
also be merged.
All issues, merge requests, issue board lists, issue board filters, and label subscriptions
with the old labels will be assigned to the new group label.
WARNING: **Caution:**
Promoting a label is a permanent action, and cannot be reversed.
......
......@@ -23,6 +23,8 @@ module API
expose :resolved?, as: :resolved, if: ->(note, options) { note.resolvable? }
expose :resolved_by, using: Entities::UserBasic, if: ->(note, options) { note.resolvable? }
expose :confidential?, as: :confidential
# Avoid N+1 queries as much as possible
expose(:noteable_iid) { |note| note.noteable.iid if NOTEABLE_TYPES_WITH_IID.include?(note.noteable_type) }
......
......@@ -249,6 +249,10 @@ module API
authorize! :admin_project, user_project
end
def authorize_admin_group
authorize! :admin_group, user_group
end
def authorize_read_builds!
authorize! :read_build, user_project
end
......
......@@ -150,10 +150,10 @@ module API
end
get ':id/runners' do
runners = Ci::Runner.owned_or_instance_wide(user_project.id)
# scope is deprecated (for project runners), however api documentation still supports it.
# Not including them in `apply_filter` method as it's not supported for group runners
runners = filter_runners(runners, params[:scope])
runners = filter_runners(runners, params[:type], allowed_scopes: Ci::Runner::AVAILABLE_TYPES)
runners = filter_runners(runners, params[:status], allowed_scopes: Ci::Runner::AVAILABLE_STATUSES)
runners = runners.tagged_with(params[:tag_list]) if params[:tag_list]
runners = apply_filter(runners, params)
present paginate(runners), with: Entities::Runner
end
......@@ -194,6 +194,31 @@ module API
# rubocop: enable CodeReuse/ActiveRecord
end
params do
requires :id, type: String, desc: 'The ID of a group'
end
resource :groups, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
before { authorize_admin_group }
desc 'Get runners available for group' do
success Entities::Runner
end
params do
optional :type, type: String, values: Ci::Runner::AVAILABLE_TYPES,
desc: 'The type of the runners to show'
optional :status, type: String, values: Ci::Runner::AVAILABLE_STATUSES,
desc: 'The status of the runners to show'
optional :tag_list, type: Array[String], desc: 'The tags of the runners to show'
use :pagination
end
get ':id/runners' do
runners = Ci::Runner.belonging_to_group(user_group.id, include_ancestors: true)
runners = apply_filter(runners, params)
present paginate(runners), with: Entities::Runner
end
end
helpers do
def filter_runners(runners, scope, allowed_scopes: ::Ci::Runner::AVAILABLE_SCOPES)
return runners unless scope.present?
......@@ -210,6 +235,14 @@ module API
runners.public_send(scope) # rubocop:disable GitlabSecurity/PublicSend
end
def apply_filter(runners, params)
runners = filter_runners(runners, params[:type], allowed_scopes: Ci::Runner::AVAILABLE_TYPES)
runners = filter_runners(runners, params[:status], allowed_scopes: Ci::Runner::AVAILABLE_STATUSES)
runners = runners.tagged_with(params[:tag_list]) if params[:tag_list]
runners
end
def get_runner(id)
runner = Ci::Runner.find(id)
not_found!('Runner') unless runner
......
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This migration creates missing services records
# for the projects within the given range of ids
class FixProjectsWithoutPrometheusService
# There is important inconsistency between single query timeout 15s and background migration worker minimum lease 2 minutes
# to address that scheduled ids range (for minimum 2 minutes processing) should be inserted in smaller portions to fit under 15s limit.
# https://gitlab.com/gitlab-com/gl-infra/infrastructure/issues/9064#note_279857215
MAX_BATCH_SIZE = 1_000
DEFAULTS = {
'active' => true,
'properties' => "'{}'",
'type' => "'PrometheusService'",
'template' => false,
'push_events' => true,
'issues_events' => true,
'merge_requests_events' => true,
'tag_push_events' => true,
'note_events' => true,
'category' => "'monitoring'",
'default' => false,
'wiki_page_events' => true,
'pipeline_events' => true,
'confidential_issues_events' => true,
'commit_events' => true,
'job_events' => true,
'confidential_note_events' => true
}.freeze
module Migratable
module Applications
# Migration model namespace isolated from application code.
class Prometheus
def self.statuses
{
errored: -1,
installed: 3,
updated: 5
}
end
end
end
# Migration model namespace isolated from application code.
class Cluster < ActiveRecord::Base
self.table_name = 'clusters'
enum cluster_type: {
instance_type: 1,
group_type: 2
}
def self.has_prometheus_application?
joins("INNER JOIN clusters_applications_prometheus ON clusters_applications_prometheus.cluster_id = clusters.id
AND clusters_applications_prometheus.status IN (#{Applications::Prometheus.statuses[:installed]}, #{Applications::Prometheus.statuses[:updated]})").exists?
end
end
# Migration model namespace isolated from application code.
class PrometheusService < ActiveRecord::Base
self.inheritance_column = :_type_disabled
self.table_name = 'services'
default_scope { where(type: type) }
def self.type
'PrometheusService'
end
def self.template
find_by(template: true)
end
def self.values
(template&.attributes_for_insert || DEFAULTS).merge('template' => false, 'active' => true).values
end
def attributes_for_insert
slice(DEFAULTS.keys).transform_values do |v|
v.is_a?(String) ? "'#{v}'" : v
end
end
end
# Migration model namespace isolated from application code.
class Project < ActiveRecord::Base
self.table_name = 'projects'
scope :select_for_insert, -> {
select('id')
.select(PrometheusService.values.join(','))
.select("TIMEZONE('UTC', NOW()) as created_at", "TIMEZONE('UTC', NOW()) as updated_at")
}
scope :with_prometheus_services, ->(from_id, to_id) {
joins("LEFT JOIN services ON services.project_id = projects.id AND services.project_id BETWEEN #{Integer(from_id)} AND #{Integer(to_id)}
AND services.type = '#{PrometheusService.type}'")
}
scope :with_group_prometheus_installed, -> {
joins("INNER JOIN cluster_groups ON cluster_groups.group_id = projects.namespace_id")
.joins("INNER JOIN clusters_applications_prometheus ON clusters_applications_prometheus.cluster_id = cluster_groups.cluster_id
AND clusters_applications_prometheus.status IN (#{Applications::Prometheus.statuses[:installed]}, #{Applications::Prometheus.statuses[:updated]})")
}
end
end
def perform(from_id, to_id)
(from_id..to_id).each_slice(MAX_BATCH_SIZE) do |batch|
process_batch(batch.first, batch.last)
end
end
private
def process_batch(from_id, to_id)
update_inconsistent(from_id, to_id)
create_missing(from_id, to_id)
end
def create_missing(from_id, to_id)
result = ActiveRecord::Base.connection.select_one(create_sql(from_id, to_id))
return unless result
logger.info(message: "#{self.class}: created missing services for #{result['number_of_created_records']} projects in id=#{from_id}...#{to_id}")
end
def update_inconsistent(from_id, to_id)
result = ActiveRecord::Base.connection.select_one(update_sql(from_id, to_id))
return unless result
logger.info(message: "#{self.class}: updated inconsistent services for #{result['number_of_updated_records']} projects in id=#{from_id}...#{to_id}")
end
# there is no uniq constraint on project_id and type pair, which prevents us from using ON CONFLICT
def create_sql(from_id, to_id)
<<~SQL
WITH created_records AS (
INSERT INTO services (project_id, #{DEFAULTS.keys.map { |key| %("#{key}")}.join(',')}, created_at, updated_at)
#{select_insert_values_sql(from_id, to_id)}
RETURNING *
)
SELECT COUNT(*) as number_of_created_records
FROM created_records
SQL
end
# there is no uniq constraint on project_id and type pair, which prevents us from using ON CONFLICT
def update_sql(from_id, to_id)
<<~SQL
WITH updated_records AS (
UPDATE services SET active = TRUE
WHERE services.project_id BETWEEN #{Integer(from_id)} AND #{Integer(to_id)} AND services.properties = '{}' AND services.type = '#{Migratable::PrometheusService.type}'
AND #{group_cluster_condition(from_id, to_id)} AND services.active = FALSE
RETURNING *
)
SELECT COUNT(*) as number_of_updated_records
FROM updated_records
SQL
end
def group_cluster_condition(from_id, to_id)
return '1 = 1' if migrate_instance_cluster?
<<~SQL
EXISTS (
#{Migratable::Project.select(1).with_group_prometheus_installed.where("projects.id BETWEEN ? AND ?", Integer(from_id), Integer(to_id)).to_sql}
)
SQL
end
def select_insert_values_sql(from_id, to_id)
scope = Migratable::Project
.select_for_insert
.with_prometheus_services(from_id, to_id)
.where("projects.id BETWEEN ? AND ? AND services.id IS NULL", Integer(from_id), Integer(to_id))
return scope.to_sql if migrate_instance_cluster?
scope.with_group_prometheus_installed.to_sql
end
def logger
@logger ||= Gitlab::BackgroundMigration::Logger.build
end
def migrate_instance_cluster?
if instance_variable_defined?('@migrate_instance_cluster')
@migrate_instance_cluster
else
@migrate_instance_cluster = Migratable::Cluster.instance_type.has_prometheus_application?
end
end
end
end
end
......@@ -4,7 +4,7 @@
# Entrypoint is also needed as image by default set `terraform` binary as an
# entrypoint.
image:
name: hashicorp/terraform:light
name: registry.gitlab.com/gitlab-org/gitlab-build-images:terraform
entrypoint:
- '/usr/bin/env'
- 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
......@@ -18,6 +18,7 @@ cache:
- .terraform
before_script:
- alias convert_report="jq -r '([.resource_changes[].change.actions?]|flatten)|{\"create\":(map(select(.==\"create\"))|length),\"update\":(map(select(.==\"update\"))|length),\"delete\":(map(select(.==\"delete\"))|length)}'"
- terraform --version
- terraform init
......@@ -36,6 +37,7 @@ plan:
stage: build
script:
- terraform plan -out=$PLAN
- "terraform show --json $PLAN | convert_report > tfplan.json"
artifacts:
name: plan
paths:
......
......@@ -7,7 +7,7 @@ module Gitlab
@acme_challenge = acme_challenge
end
delegate :token, :file_content, :status, :request_validation, to: :acme_challenge
delegate :token, :file_content, :status, :request_validation, :error, to: :acme_challenge
private
......
......@@ -8,7 +8,6 @@ module Gitlab
end
def new_challenge
authorization = @acme_order.authorizations.first
challenge = authorization.http
::Gitlab::LetsEncrypt::Challenge.new(challenge)
end
......@@ -22,11 +21,19 @@ module Gitlab
acme_order.finalize(csr: csr)
end
def challenge_error
authorization.challenges.first&.error
end
delegate :url, :status, :expires, :certificate, to: :acme_order
private
attr_reader :acme_order
def authorization
@acme_order.authorizations.first
end
end
end
end
......@@ -17,11 +17,12 @@ module Gitlab
end
def group_name_regex
project_name_regex
@group_name_regex ||= /\A[\p{Alnum}\u{00A9}-\u{1f9ff}_][\p{Alnum}\p{Pd}\u{00A9}-\u{1f9ff}_()\. ]*\z/.freeze
end
def group_name_regex_message
project_name_regex_message
"can contain only letters, digits, emojis, '_', '.', dash, space, parenthesis. " \
"It must start with letter, digit, emoji or '_'."
end
##
......
......@@ -225,8 +225,8 @@ module Gitlab
def gitaly_server_hash(repository)
{
address: Gitlab::GitalyClient.address(repository.project.repository_storage),
token: Gitlab::GitalyClient.token(repository.project.repository_storage),
address: Gitlab::GitalyClient.address(repository.container.repository_storage),
token: Gitlab::GitalyClient.token(repository.container.repository_storage),
features: Feature::Gitaly.server_feature_flags
}
end
......
......@@ -7491,6 +7491,9 @@ msgstr ""
msgid "Enable header and footer in emails"
msgstr ""
msgid "Enable maintenance mode"
msgstr ""
msgid "Enable mirror configuration"
msgstr ""
......@@ -9633,6 +9636,9 @@ msgstr ""
msgid "GitLab is obtaining a Let's Encrypt SSL certificate for this domain. This process can take some time. Please try again later."
msgstr ""
msgid "GitLab is undergoing maintenance and is operating in a read-only mode."
msgstr ""
msgid "GitLab member or Email address"
msgstr ""
......@@ -11305,6 +11311,9 @@ msgstr ""
msgid "Jira import feature is disabled."
msgstr ""
msgid "Jira import is already running."
msgstr ""
msgid "Jira integration not configured."
msgstr ""
......@@ -11590,7 +11599,7 @@ msgstr ""
msgid "Labels|Promote Label"
msgstr ""
msgid "Labels|Promoting %{labelTitle} will make it available for all projects inside %{groupName}. Existing project labels with the same title will be merged. This action cannot be reversed."
msgid "Labels|Promoting %{labelTitle} will make it available for all projects inside %{groupName}. Existing project labels with the same title will be merged. If a group label with the same title exists, it will also be merged. This action cannot be reversed."
msgstr ""
msgid "Labels|and %{count} more"
......@@ -12136,6 +12145,9 @@ msgstr ""
msgid "Made this issue confidential."
msgstr ""
msgid "Maintenance mode"
msgstr ""
msgid "Make and review changes in the browser with the Web IDE"
msgstr ""
......@@ -13486,6 +13498,9 @@ msgstr ""
msgid "Nodes"
msgstr ""
msgid "Non-admin users can sign in with read-only access and make read-only API requests."
msgstr ""
msgid "None"
msgstr ""
......@@ -14859,6 +14874,9 @@ msgstr ""
msgid "Prevent users from modifing merge request approvers list"
msgstr ""
msgid "Prevent users from performing write operations on GitLab while performing maintenance."
msgstr ""
msgid "Preview"
msgstr ""
......@@ -23157,6 +23175,9 @@ msgstr ""
msgid "You do not have permission to run the Web Terminal. Please contact a project administrator."
msgstr ""
msgid "You do not have permissions to run the import."
msgstr ""
msgid "You do not have the correct permissions to override the settings from the LDAP group sync."
msgstr ""
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Projects::AvatarsController do
let(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
before do
controller.instance_variable_set(:@project, project)
......@@ -34,15 +34,18 @@ describe Projects::AvatarsController do
expect(response).to have_gitlab_http_status(:ok)
expect(response.header['Content-Disposition']).to eq('inline')
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq 'true'
end
it_behaves_like 'project cache control headers'
end
context 'when the avatar is stored in lfs' do
it_behaves_like 'a controller that can serve LFS files' do
let(:filename) { 'lfs_object.iso' }
let(:filepath) { "files/lfs/#{filename}" }
end
it_behaves_like 'a controller that can serve LFS files'
it_behaves_like 'project cache control headers'
end
end
end
......
......@@ -118,10 +118,9 @@ describe Projects::Import::JiraController do
end
it 'uses the existing import data' do
expect(controller).not_to receive(:schedule_import)
post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'New Project' }
expect(flash[:notice]).to eq('Jira import is already running.')
expect(response).to redirect_to(project_import_jira_path(project))
end
end
......@@ -153,8 +152,6 @@ describe Projects::Import::JiraController do
end
it 'uses the existing import data' do
expect(controller).to receive(:schedule_import).and_call_original
post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'New Project' }
project.reload
......
......@@ -1377,7 +1377,7 @@ describe Projects::IssuesController do
it 'returns discussion json' do
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes diff_discussion discussion_path individual_note resolvable resolved resolved_at resolved_by resolved_by_push commit_id for_commit project_id])
expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes diff_discussion discussion_path individual_note resolvable resolved resolved_at resolved_by resolved_by_push commit_id for_commit project_id confidential])
end
it 'renders the author status html if there is a status' do
......
......@@ -6,6 +6,7 @@ describe Projects::RawController do
include RepoHelpers
let(:project) { create(:project, :public, :repository) }
let(:inline) { nil }
describe 'GET #show' do
subject do
......@@ -13,7 +14,8 @@ describe Projects::RawController do
params: {
namespace_id: project.namespace,
project_id: project,
id: filepath
id: filepath,
inline: inline
})
end
......@@ -25,10 +27,12 @@ describe Projects::RawController do
expect(response).to have_gitlab_http_status(:ok)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
expect(response.header['Content-Disposition']).to eq('inline')
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq 'true'
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
end
it_behaves_like 'project cache control headers'
it_behaves_like 'content disposition headers'
end
context 'image header' do
......@@ -38,15 +42,20 @@ describe Projects::RawController do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.header['Content-Disposition']).to eq('inline')
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
end
it_behaves_like 'project cache control headers'
it_behaves_like 'content disposition headers'
end
it_behaves_like 'a controller that can serve LFS files' do
context 'with LFS files' do
let(:filename) { 'lfs_object.iso' }
let(:filepath) { "be93687/files/lfs/#{filename}" }
it_behaves_like 'a controller that can serve LFS files'
it_behaves_like 'project cache control headers'
end
context 'when the endpoint receives requests above the limit', :clean_gitlab_redis_cache do
......
......@@ -449,27 +449,33 @@ describe Projects::SnippetsController do
end
describe 'GET #raw' do
let(:inline) { nil }
let(:line_ending) { nil }
let(:params) do
{
namespace_id: project.namespace,
project_id: project,
id: project_snippet.to_param,
inline: inline,
line_ending: line_ending
}
end
subject { get :raw, params: params }
context 'when repository is empty' do
let(:content) { "first line\r\nsecond line\r\nthird line" }
let(:formatted_content) { content.gsub(/\r\n/, "\n") }
let(:project_snippet) do
create(
:project_snippet, :public, :repository,
:project_snippet, :public, :empty_repo,
project: project,
author: user,
content: content
)
end
let(:blob) { project_snippet.blobs.first }
context 'CRLF line ending' do
let(:params) do
{
namespace_id: project.namespace,
project_id: project,
id: project_snippet.to_param
}
end
before do
allow_next_instance_of(Blob) do |instance|
allow(instance).to receive(:data).and_return(content)
......@@ -477,18 +483,44 @@ describe Projects::SnippetsController do
end
it 'returns LF line endings by default' do
get :raw, params: params
subject
expect(response.body).to eq(formatted_content)
end
it 'does not convert line endings when parameter present' do
get :raw, params: params.merge(line_ending: :raw)
context 'when line_ending parameter present' do
let(:line_ending) { :raw }
it 'does not convert line endings' do
subject
expect(response.body).to eq(content)
end
end
end
end
context 'when repository is not empty' do
let(:project_snippet) do
create(
:project_snippet, :public, :repository,
project: project,
author: user
)
end
it 'sends the blob' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq 'true'
end
it_behaves_like 'project cache control headers'
it_behaves_like 'content disposition headers'
end
end
describe 'DELETE #destroy' do
let!(:snippet) { create(:project_snippet, :private, project: project, author: user) }
......
......@@ -144,14 +144,12 @@ describe Projects::WikisController do
let(:id) { upload_file_to_wiki(project, user, file_name) }
before do
subject
end
context 'when file is an image' do
let(:file_name) { 'dk.png' }
it 'delivers the image' do
subject
expect(response.headers['Content-Disposition']).to match(/^inline/)
expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
end
......@@ -160,19 +158,27 @@ describe Projects::WikisController do
let(:file_name) { 'unsanitized.svg' }
it 'delivers the image' do
subject
expect(response.headers['Content-Disposition']).to match(/^inline/)
expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
end
end
it_behaves_like 'project cache control headers'
end
context 'when file is a pdf' do
let(:file_name) { 'git-cheat-sheet.pdf' }
it 'sets the content type to sets the content response headers' do
subject
expect(response.headers['Content-Disposition']).to match(/^inline/)
expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
end
it_behaves_like 'project cache control headers'
end
end
end
......
......@@ -501,6 +501,11 @@ describe SnippetsController do
end
describe "GET #raw" do
let(:inline) { nil }
let(:params) { { id: snippet.to_param, inline: inline } }
subject { get :raw, params: params }
shared_examples '200 status' do
before do
subject
......@@ -511,11 +516,6 @@ describe SnippetsController do
expect(response).to have_gitlab_http_status(:ok)
end
it 'has expected headers' do
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
expect(response.header['Content-Disposition']).to match(/inline/)
end
it "sets #{Gitlab::Workhorse::DETECT_HEADER} header" do
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq 'true'
end
......@@ -551,12 +551,20 @@ describe SnippetsController do
shared_examples 'successful response' do
it_behaves_like '200 status'
it_behaves_like 'CRLF line ending'
it 'returns snippet first blob data' do
it 'has expected blob headers' do
subject
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq 'true'
end
it_behaves_like 'content disposition headers'
it 'sets cache_control public header based on snippet visibility' do
subject
expect(response.body).to eq snippet.blobs.first.data
expect(response.cache_control[:public]).to eq snippet.public?
end
context 'when feature flag version_snippets is disabled' do
......@@ -571,12 +579,33 @@ describe SnippetsController do
subject
expect(response.body).to eq snippet.content
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
end
it_behaves_like 'content disposition headers'
end
context 'when snippet repository is empty' do
before do
allow_any_instance_of(Repository).to receive(:empty?).and_return(true)
end
it_behaves_like '200 status'
it_behaves_like 'CRLF line ending'
it 'returns snippet database content' do
subject
expect(response.body).to eq snippet.content
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
end
it_behaves_like 'content disposition headers'
end
end
context 'when the personal snippet is private' do
let_it_be(:personal_snippet) { create(:personal_snippet, :private, :repository, author: user) }
let_it_be(:snippet) { create(:personal_snippet, :private, :repository, author: user) }
context 'when signed in' do
before do
......@@ -595,18 +624,13 @@ describe SnippetsController do
end
context 'when signed in user is the author' do
it_behaves_like 'successful response' do
let(:snippet) { personal_snippet }
let(:params) { { id: snippet.to_param } }
subject { get :raw, params: params }
end
it_behaves_like 'successful response'
end
end
context 'when not signed in' do
it 'redirects to the sign in page' do
get :raw, params: { id: personal_snippet.to_param }
subject
expect(response).to redirect_to(new_user_session_path)
end
......@@ -614,24 +638,19 @@ describe SnippetsController do
end
context 'when the personal snippet is internal' do
let_it_be(:personal_snippet) { create(:personal_snippet, :internal, :repository, author: user) }
let_it_be(:snippet) { create(:personal_snippet, :internal, :repository, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
it_behaves_like 'successful response' do
let(:snippet) { personal_snippet }
let(:params) { { id: snippet.to_param } }
subject { get :raw, params: params }
end
it_behaves_like 'successful response'
end
context 'when not signed in' do
it 'redirects to the sign in page' do
get :raw, params: { id: personal_snippet.to_param }
subject
expect(response).to redirect_to(new_user_session_path)
end
......@@ -639,26 +658,21 @@ describe SnippetsController do
end
context 'when the personal snippet is public' do
let_it_be(:personal_snippet) { create(:personal_snippet, :public, :repository, author: user) }
let_it_be(:snippet) { create(:personal_snippet, :public, :repository, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
it_behaves_like 'successful response' do
let(:snippet) { personal_snippet }
let(:params) { { id: snippet.to_param } }
subject { get :raw, params: params }
end
it_behaves_like 'successful response'
end
context 'when not signed in' do
it 'responds with status 200' do
get :raw, params: { id: personal_snippet.to_param }
subject
expect(assigns(:snippet)).to eq(personal_snippet)
expect(assigns(:snippet)).to eq(snippet)
expect(response).to have_gitlab_http_status(:ok)
end
end
......
......@@ -99,6 +99,30 @@ FactoryBot.define do
trait :repository_source do
config_source { Ci::Pipeline.config_sources[:repository_source] }
end
trait :detached_merge_request_pipeline do
merge_request
source { :merge_request_event }
project { merge_request.source_project }
sha { merge_request.source_branch_sha }
ref { merge_request.ref_path }
end
trait :legacy_detached_merge_request_pipeline do
detached_merge_request_pipeline
ref { merge_request.source_branch }
end
trait :merged_result_pipeline do
detached_merge_request_pipeline
sha { 'test-merge-sha'}
ref { merge_request.merge_ref_path }
source_sha { merge_request.source_branch_sha }
target_sha { merge_request.target_branch_sha }
end
end
end
end
......@@ -147,23 +147,13 @@ FactoryBot.define do
trait :with_legacy_detached_merge_request_pipeline do
after(:create) do |merge_request|
merge_request.pipelines_for_merge_request << create(:ci_pipeline,
source: :merge_request_event,
merge_request: merge_request,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.source_branch_sha)
create(:ci_pipeline, :legacy_detached_merge_request_pipeline, merge_request: merge_request)
end
end
trait :with_detached_merge_request_pipeline do
after(:create) do |merge_request|
merge_request.pipelines_for_merge_request << create(:ci_pipeline,
source: :merge_request_event,
merge_request: merge_request,
project: merge_request.source_project,
ref: merge_request.ref_path,
sha: merge_request.source_branch_sha)
create(:ci_pipeline, :detached_merge_request_pipeline, merge_request: merge_request)
end
end
......@@ -175,14 +165,12 @@ FactoryBot.define do
end
after(:create) do |merge_request, evaluator|
merge_request.pipelines_for_merge_request << create(:ci_pipeline,
source: :merge_request_event,
create(:ci_pipeline, :merged_result_pipeline,
merge_request: merge_request,
project: merge_request.source_project,
ref: merge_request.merge_ref_path,
sha: evaluator.merge_sha,
source_sha: evaluator.source_sha,
target_sha: evaluator.target_sha)
target_sha: evaluator.target_sha
)
end
end
......
......@@ -55,7 +55,8 @@
"human_access": { "type": ["string", "null"] },
"toggle_award_path": { "type": "string" },
"path": { "type": "string" },
"commands_changes": { "type": "object", "additionalProperties": true }
"commands_changes": { "type": "object", "additionalProperties": true },
"confidential": { "type": ["boolean", "null"] }
},
"required": [
"id", "attachment", "author", "created_at", "updated_at",
......
......@@ -28,7 +28,8 @@
"noteable_type": { "type": "string" },
"resolved": { "type": "boolean" },
"resolvable": { "type": "boolean" },
"resolved_by": { "type": ["string", "null"] }
"resolved_by": { "type": ["string", "null"] },
"confidential": { "type": ["boolean", "null"] }
},
"required": [
"id", "body", "attachment", "author", "created_at", "updated_at",
......
import { shallowMount } from '@vue/test-utils';
import MaintenanceModeSettingsApp from '~/maintenance_mode_settings/components/app.vue';
import { GlToggle, GlFormTextarea, GlButton } from '@gitlab/ui';
describe('MaintenanceModeSettingsApp', () => {
let wrapper;
const createComponent = () => {
wrapper = shallowMount(MaintenanceModeSettingsApp);
};
afterEach(() => {
wrapper.destroy();
});
const findMaintenanceModeSettingsContainer = () => wrapper.find('article');
const findGlToggle = () => wrapper.find(GlToggle);
const findGlFormTextarea = () => wrapper.find(GlFormTextarea);
const findGlButton = () => wrapper.find(GlButton);
describe('template', () => {
beforeEach(() => {
createComponent();
});
it('renders the Maintenance Mode Settings container', () => {
expect(findMaintenanceModeSettingsContainer().exists()).toBe(true);
});
it('renders the GlToggle', () => {
expect(findGlToggle().exists()).toBe(true);
});
it('renders the GlFormTextarea', () => {
expect(findGlFormTextarea().exists()).toBe(true);
});
it('renders the GlButton', () => {
expect(findGlButton().exists()).toBe(true);
});
});
});
......@@ -5,7 +5,7 @@ describe GitlabSchema.types['Note'] do
it 'exposes the expected fields' do
expected_fields = [:id, :project, :author, :body, :created_at,
:updated_at, :discussion, :resolvable, :position, :user_permissions,
:resolved_by, :resolved_at, :system, :body_html]
:resolved_by, :resolved_at, :system, :body_html, :confidential]
expect(described_class).to have_graphql_fields(*expected_fields)
end
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::BackgroundMigration::FixProjectsWithoutPrometheusService, :migration, schema: 2020_02_20_115023 do
def service_params_for(project_id, params = {})
{
project_id: project_id,
active: false,
properties: '{}',
type: 'PrometheusService',
template: false,
push_events: true,
issues_events: true,
merge_requests_events: true,
tag_push_events: true,
note_events: true,
category: 'monitoring',
default: false,
wiki_page_events: true,
pipeline_events: true,
confidential_issues_events: true,
commit_events: true,
job_events: true,
confidential_note_events: true,
deployment_events: false
}.merge(params)
end
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:services) { table(:services) }
let(:clusters) { table(:clusters) }
let(:cluster_groups) { table(:cluster_groups) }
let(:clusters_applications_prometheus) { table(:clusters_applications_prometheus) }
let(:namespace) { namespaces.create(name: 'user', path: 'user') }
let(:project) { projects.create(namespace_id: namespace.id) }
let(:application_statuses) do
{
errored: -1,
installed: 3,
updated: 5
}
end
let(:cluster_types) do
{
instance_type: 1,
group_type: 2,
project_type: 3
}
end
let(:columns) do
%w(project_id active properties type template push_events
issues_events merge_requests_events tag_push_events
note_events category default wiki_page_events pipeline_events
confidential_issues_events commit_events job_events
confidential_note_events deployment_events)
end
describe '#perform' do
shared_examples 'fix services entries state' do
it 'is idempotent' do
expect { subject.perform(project.id, project.id + 1) }.to change { services.order(:id).map { |row| row.attributes } }
expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
context 'non prometheus services' do
it 'does not change them' do
other_type = 'SomeOtherService'
services.create(service_params_for(project.id, active: true, type: other_type))
expect { subject.perform(project.id, project.id + 1) }.not_to change { services.where(type: other_type).order(:id).map { |row| row.attributes } }
end
end
context 'prometheus integration services do not exist' do
it 'creates missing services entries', :aggregate_failures do
expect { subject.perform(project.id, project.id + 1) }.to change { services.count }.by(1)
expect([service_params_for(project.id, active: true)]).to eq services.order(:id).map { |row| row.attributes.slice(*columns).symbolize_keys }
end
context 'template is present for prometheus services' do
it 'creates missing services entries', :aggregate_failures do
services.create(service_params_for(nil, template: true, properties: { 'from_template' => true }.to_json))
expect { subject.perform(project.id, project.id + 1) }.to change { services.count }.by(1)
updated_rows = services.where(template: false).order(:id).map { |row| row.attributes.slice(*columns).symbolize_keys }
expect([service_params_for(project.id, active: true, properties: { 'from_template' => true }.to_json)]).to eq updated_rows
end
end
end
context 'prometheus integration services exist' do
context 'in active state' do
it 'does not change them' do
services.create(service_params_for(project.id, active: true))
expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
end
context 'not in active state' do
it 'sets active attribute to true' do
service = services.create(service_params_for(project.id, active: false))
expect { subject.perform(project.id, project.id + 1) }.to change { service.reload.active? }.from(false).to(true)
end
context 'prometheus services are configured manually ' do
it 'does not change them' do
properties = '{"api_url":"http://test.dev","manual_configuration":"1"}'
services.create(service_params_for(project.id, properties: properties, active: false))
expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
end
end
end
end
context 'k8s cluster shared on instance level' do
let(:cluster) { clusters.create(name: 'cluster', cluster_type: cluster_types[:instance_type]) }
context 'with installed prometheus application' do
before do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:installed], version: '123')
end
it_behaves_like 'fix services entries state'
end
context 'with updated prometheus application' do
before do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:updated], version: '123')
end
it_behaves_like 'fix services entries state'
end
context 'with errored prometheus application' do
before do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:errored], version: '123')
end
it 'does not change services entries' do
expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
end
end
context 'k8s cluster shared on group level' do
let(:cluster) { clusters.create(name: 'cluster', cluster_type: cluster_types[:group_type]) }
before do
cluster_groups.create(cluster_id: cluster.id, group_id: project.namespace_id)
end
context 'with installed prometheus application' do
before do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:installed], version: '123')
end
it_behaves_like 'fix services entries state'
context 'second k8s cluster without application available' do
let(:namespace_2) { namespaces.create(name: 'namespace2', path: 'namespace2') }
let(:project_2) { projects.create(namespace_id: namespace_2.id) }
before do
cluster_2 = clusters.create(name: 'cluster2', cluster_type: cluster_types[:group_type])
cluster_groups.create(cluster_id: cluster_2.id, group_id: project_2.namespace_id)
end
it 'changed only affected services entries' do
expect { subject.perform(project.id, project_2.id + 1) }.to change { services.count }.by(1)
expect([service_params_for(project.id, active: true)]).to eq services.order(:id).map { |row| row.attributes.slice(*columns).symbolize_keys }
end
end
end
context 'with updated prometheus application' do
before do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:updated], version: '123')
end
it_behaves_like 'fix services entries state'
end
context 'with errored prometheus application' do
before do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:errored], version: '123')
end
it 'does not change services entries' do
expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
end
context 'with missing prometheus application' do
it 'does not change services entries' do
expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
context 'with inactive service' do
it 'does not change services entries' do
services.create(service_params_for(project.id))
expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
end
end
end
context 'k8s cluster for single project' do
let(:cluster) { clusters.create(name: 'cluster', cluster_type: cluster_types[:project_type]) }
let(:cluster_projects) { table(:cluster_projects) }
context 'with installed prometheus application' do
before do
cluster_projects.create(cluster_id: cluster.id, project_id: project.id)
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:installed], version: '123')
end
it 'does not change services entries' do
expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
end
end
end
end
......@@ -38,4 +38,23 @@ describe ::Gitlab::LetsEncrypt::Order do
order.request_certificate(domain: 'example.com', private_key: private_key)
end
end
describe '#challenge_error' do
it 'returns error if challenge has errors' do
challenge = acme_challenge_double
# error just to give an example
error = {
"type" => "urn:ietf:params:acme:error:dns",
"detail" => "No valid IP addresses found for test.example.com",
"status" => 400
}
allow(challenge).to receive(:error).and_return(error)
acme_order = acme_order_double(authorizations: [acme_authorization_double(challenge)])
expect(described_class.new(acme_order).challenge_error).to eq(error)
end
end
end
......@@ -13,10 +13,6 @@ describe Gitlab::Regex do
it { is_expected.not_to match('?gitlab') }
end
shared_examples_for 'project/group name error message' do
it { is_expected.to eq("can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'.") }
end
describe '.project_name_regex' do
subject { described_class.project_name_regex }
......@@ -27,18 +23,26 @@ describe Gitlab::Regex do
subject { described_class.group_name_regex }
it_behaves_like 'project/group name regex'
it 'allows parenthesis' do
is_expected.to match('Group One (Test)')
end
it 'does not start with parenthesis' do
is_expected.not_to match('(Invalid Group name)')
end
end
describe '.project_name_regex_message' do
subject { described_class.project_name_regex_message }
it_behaves_like 'project/group name error message'
it { is_expected.to eq("can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'.") }
end
describe '.group_name_regex_message' do
subject { described_class.group_name_regex_message }
it_behaves_like 'project/group name error message'
it { is_expected.to eq("can contain only letters, digits, emojis, '_', '.', dash, space, parenthesis. It must start with letter, digit, emoji or '_'.") }
end
describe '.environment_name_regex' do
......
# frozen_string_literal: true
#
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200220115023_fix_projects_without_prometheus_service.rb')
describe FixProjectsWithoutPrometheusService, :migration do
let(:namespace) { table(:namespaces).create(name: 'gitlab', path: 'gitlab-org') }
let!(:projects) do
[
table(:projects).create(namespace_id: namespace.id, name: 'foo 1'),
table(:projects).create(namespace_id: namespace.id, name: 'foo 2'),
table(:projects).create(namespace_id: namespace.id, name: 'foo 3')
]
end
before do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
end
around do |example|
Sidekiq::Testing.fake! do
Timecop.freeze do
example.call
end
end
end
it 'schedules jobs for ranges of projects' do
migrate!
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(2.minutes, projects[0].id, projects[1].id)
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(4.minutes, projects[2].id, projects[2].id)
end
it 'schedules jobs according to the configured batch size' do
expect { migrate! }.to change { BackgroundMigrationWorker.jobs.size }.by(2)
end
end
......@@ -838,4 +838,33 @@ describe Ci::Runner do
it { is_expected.to eq(contacted_at_stored) }
end
describe '.belonging_to_group' do
it 'returns the specific group runner' do
group = create(:group)
runner = create(:ci_runner, :group, groups: [group])
unrelated_group = create(:group)
create(:ci_runner, :group, groups: [unrelated_group])
expect(described_class.belonging_to_group(group.id)).to contain_exactly(runner)
end
context 'runner belonging to parent group' do
let_it_be(:parent_group) { create(:group) }
let_it_be(:parent_runner) { create(:ci_runner, :group, groups: [parent_group]) }
let_it_be(:group) { create(:group, parent: parent_group) }
context 'when include_parent option is passed' do
it 'returns the group runner from the parent group' do
expect(described_class.belonging_to_group(group.id, include_ancestors: true)).to contain_exactly(parent_runner)
end
end
context 'when include_parent option is not passed' do
it 'does not return the group runner from the parent group' do
expect(described_class.belonging_to_group(group.id)).to be_empty
end
end
end
end
end
......@@ -12,7 +12,6 @@ describe API::Runners do
let(:project2) { create(:project, creator_id: user.id) }
let(:group) { create(:group).tap { |group| group.add_owner(user) } }
let(:group2) { create(:group).tap { |group| group.add_owner(user) } }
let!(:shared_runner) { create(:ci_runner, :instance, description: 'Shared runner') }
let!(:project_runner) { create(:ci_runner, :project, description: 'Project runner', projects: [project]) }
......@@ -734,6 +733,24 @@ describe API::Runners do
end
end
shared_examples_for 'unauthorized access to runners list' do
context 'authorized user without maintainer privileges' do
it "does not return group's runners" do
get api("/#{entity_type}/#{entity.id}/runners", user2)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'unauthorized user' do
it "does not return project's runners" do
get api("/#{entity_type}/#{entity.id}/runners")
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
describe 'GET /projects/:id/runners' do
context 'authorized user with maintainer privileges' do
it 'returns response status and headers' do
......@@ -813,21 +830,78 @@ describe API::Runners do
end
end
context 'authorized user without maintainer privileges' do
it "does not return project's runners" do
get api("/projects/#{project.id}/runners", user2)
it_behaves_like 'unauthorized access to runners list' do
let(:entity_type) { 'projects' }
let(:entity) { project }
end
end
expect(response).to have_gitlab_http_status(:forbidden)
describe 'GET /groups/:id/runners' do
context 'authorized user with maintainer privileges' do
it 'returns all runners' do
get api("/groups/#{group.id}/runners", user)
expect(json_response).to match_array([
a_hash_including('description' => 'Group runner')
])
end
context 'filter by type' do
it 'returns record when valid and present' do
get api("/groups/#{group.id}/runners?type=group_type", user)
expect(json_response).to match_array([
a_hash_including('description' => 'Group runner')
])
end
context 'unauthorized user' do
it "does not return project's runners" do
get api("/projects/#{project.id}/runners")
it 'returns empty result when type does not match' do
get api("/groups/#{group.id}/runners?type=project_type", user)
expect(response).to have_gitlab_http_status(:unauthorized)
expect(json_response).to be_empty
end
it 'does not filter by invalid type' do
get api("/groups/#{group.id}/runners?type=bogus", user)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'filter runners by status' do
it 'returns runners by valid status' do
create(:ci_runner, :group, :inactive, description: 'Inactive group runner', groups: [group])
get api("/groups/#{group.id}/runners?status=paused", user)
expect(json_response).to match_array([
a_hash_including('description' => 'Inactive group runner')
])
end
it 'does not filter by invalid status' do
get api("/groups/#{group.id}/runners?status=bogus", user)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
it 'filters runners by tag_list' do
create(:ci_runner, :group, description: 'Runner tagged with tag1 and tag2', groups: [group], tag_list: %w[tag1 tag2])
create(:ci_runner, :group, description: 'Runner tagged with tag2', groups: [group], tag_list: %w[tag1])
get api("/groups/#{group.id}/runners?tag_list=tag1,tag2", user)
expect(json_response).to match_array([
a_hash_including('description' => 'Runner tagged with tag1 and tag2')
])
end
end
it_behaves_like 'unauthorized access to runners list' do
let(:entity_type) { 'groups' }
let(:entity) { group }
end
end
describe 'POST /projects/:id/runners' do
......
......@@ -3,23 +3,23 @@
require 'spec_helper'
describe API::Todos do
let(:group) { create(:group) }
let(:project_1) { create(:project, :repository, group: group) }
let(:project_2) { create(:project) }
let(:author_1) { create(:user) }
let(:author_2) { create(:user) }
let(:john_doe) { create(:user, username: 'john_doe') }
let(:merge_request) { create(:merge_request, source_project: project_1) }
let!(:merge_request_todo) { create(:todo, project: project_1, author: author_2, user: john_doe, target: merge_request) }
let!(:pending_1) { create(:todo, :mentioned, project: project_1, author: author_1, user: john_doe) }
let!(:pending_2) { create(:todo, project: project_2, author: author_2, user: john_doe) }
let!(:pending_3) { create(:on_commit_todo, project: project_1, author: author_2, user: john_doe) }
let!(:done) { create(:todo, :done, project: project_1, author: author_1, user: john_doe) }
let!(:award_emoji_1) { create(:award_emoji, awardable: merge_request, user: author_1, name: 'thumbsup') }
let!(:award_emoji_2) { create(:award_emoji, awardable: pending_1.target, user: author_1, name: 'thumbsup') }
let!(:award_emoji_3) { create(:award_emoji, awardable: pending_2.target, user: author_2, name: 'thumbsdown') }
before do
let_it_be(:group) { create(:group) }
let_it_be(:project_1) { create(:project, :repository, group: group) }
let_it_be(:project_2) { create(:project) }
let_it_be(:author_1) { create(:user) }
let_it_be(:author_2) { create(:user) }
let_it_be(:john_doe) { create(:user, username: 'john_doe') }
let_it_be(:merge_request) { create(:merge_request, source_project: project_1) }
let_it_be(:merge_request_todo) { create(:todo, project: project_1, author: author_2, user: john_doe, target: merge_request) }
let_it_be(:pending_1) { create(:todo, :mentioned, project: project_1, author: author_1, user: john_doe) }
let_it_be(:pending_2) { create(:todo, project: project_2, author: author_2, user: john_doe) }
let_it_be(:pending_3) { create(:on_commit_todo, project: project_1, author: author_2, user: john_doe) }
let_it_be(:done) { create(:todo, :done, project: project_1, author: author_1, user: john_doe) }
let_it_be(:award_emoji_1) { create(:award_emoji, awardable: merge_request, user: author_1, name: 'thumbsup') }
let_it_be(:award_emoji_2) { create(:award_emoji, awardable: pending_1.target, user: author_1, name: 'thumbsup') }
let_it_be(:award_emoji_3) { create(:award_emoji, awardable: pending_2.target, user: author_2, name: 'thumbsdown') }
before_all do
project_1.add_developer(john_doe)
project_2.add_developer(john_doe)
end
......@@ -29,7 +29,7 @@ describe API::Todos do
it 'returns authentication error' do
get api('/todos')
expect(response.status).to eq(401)
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
......@@ -37,7 +37,7 @@ describe API::Todos do
it 'returns an array of pending todos for current user' do
get api('/todos', john_doe)
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(4)
......@@ -74,7 +74,7 @@ describe API::Todos do
it 'filters based on author_id param' do
get api('/todos', john_doe), params: { author_id: author_2.id }
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(3)
......@@ -87,7 +87,7 @@ describe API::Todos do
get api('/todos', john_doe), params: { type: 'MergeRequest' }
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(2)
......@@ -98,7 +98,7 @@ describe API::Todos do
it 'filters based on state param' do
get api('/todos', john_doe), params: { state: 'done' }
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
......@@ -109,7 +109,7 @@ describe API::Todos do
it 'filters based on project_id param' do
get api('/todos', john_doe), params: { project_id: project_2.id }
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
......@@ -120,7 +120,7 @@ describe API::Todos do
it 'filters based on project_id param' do
get api('/todos', john_doe), params: { group_id: group.id, sort: :target_id }
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(3)
......@@ -131,7 +131,7 @@ describe API::Todos do
it 'filters based on action param' do
get api('/todos', john_doe), params: { action: 'mentioned' }
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.length).to eq(1)
......@@ -157,7 +157,7 @@ describe API::Todos do
create(:on_commit_todo, project: project_3, author: author_1, user: john_doe)
expect { get api('/todos', john_doe) }.not_to exceed_query_limit(control)
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
end
end
......@@ -189,7 +189,7 @@ describe API::Todos do
it 'returns 404 if the todo does not belong to the current user' do
post api("/todos/#{pending_1.id}/mark_as_done", author_1)
expect(response.status).to eq(404)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
......@@ -225,7 +225,7 @@ describe API::Todos do
it 'creates a todo on an issuable' do
post api("/projects/#{project_1.id}/#{issuable_type}/#{issuable.iid}/todo", john_doe)
expect(response.status).to eq(201)
expect(response).to have_gitlab_http_status(:created)
expect(json_response['project']).to be_a Hash
expect(json_response['author']).to be_a Hash
expect(json_response['target_type']).to eq(issuable.class.name)
......@@ -242,13 +242,15 @@ describe API::Todos do
post api("/projects/#{project_1.id}/#{issuable_type}/#{issuable.iid}/todo", john_doe)
expect(response.status).to eq(304)
expect(response).to have_gitlab_http_status(:not_modified)
end
it 'returns 404 if the issuable is not found' do
post api("/projects/#{project_1.id}/#{issuable_type}/123/todo", john_doe)
unknown_id = 0
post api("/projects/#{project_1.id}/#{issuable_type}/#{unknown_id}/todo", john_doe)
expect(response.status).to eq(404)
expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns an error if the issuable is not accessible' do
......@@ -268,13 +270,17 @@ describe API::Todos do
describe 'POST :id/issuable_type/:issueable_id/todo' do
context 'for an issue' do
it_behaves_like 'an issuable', 'issues' do
let(:issuable) { create(:issue, :confidential, author: author_1, project: project_1) }
let_it_be(:issuable) do
create(:issue, :confidential, author: author_1, project: project_1)
end
end
end
context 'for a merge request' do
it_behaves_like 'an issuable', 'merge_requests' do
let(:issuable) { create(:merge_request, :simple, source_project: project_1) }
let_it_be(:issuable) do
create(:merge_request, :simple, source_project: project_1)
end
end
end
end
......
......@@ -34,7 +34,8 @@ describe DiscussionEntity do
:discussion_path,
:resolved_at,
:for_commit,
:commit_id
:commit_id,
:confidential
)
end
......
# frozen_string_literal: true
require 'spec_helper'
describe JiraImport::StartImportService do
let_it_be(:user) { create(:user) }
let(:project) { create(:project) }
subject { described_class.new(user, project, '').execute }
context 'when feature flag disabled' do
before do
stub_feature_flags(jira_issue_import: false)
end
it_behaves_like 'responds with error', 'Jira import feature is disabled.'
end
context 'when feature flag enabled' do
before do
stub_feature_flags(jira_issue_import: true)
end
context 'when user does not have permissions to run the import' do
before do
project.add_developer(user)
end
it_behaves_like 'responds with error', 'You do not have permissions to run the import.'
end
context 'when user has permission to run import' do
before do
project.add_maintainer(user)
end
context 'when Jira service was not setup' do
it_behaves_like 'responds with error', 'Jira integration not configured.'
end
context 'when Jira service exists' do
let!(:jira_service) { create(:jira_service, project: project, active: true) }
context 'when Jira project key is not provided' do
it_behaves_like 'responds with error', 'Unable to find Jira project to import data from.'
end
context 'when correct data provided' do
subject { described_class.new(user, project, 'some-key').execute }
context 'when import is already running' do
let!(:import_state) { create(:import_state, project: project, status: :started) }
it_behaves_like 'responds with error', 'Jira import is already running.'
end
it 'returns success response' do
expect(subject).to be_a(ServiceResponse)
expect(subject).to be_success
end
it 'schedules jira import' do
subject
expect(project.import_state.status).to eq('scheduled')
end
it 'creates jira import data' do
subject
jira_import_data = project.import_data.becomes(JiraImportData)
expect(jira_import_data.force_import?).to be true
imported_project_data = jira_import_data.projects.last
expect(imported_project_data.key).to eq('some-key')
expect(imported_project_data.scheduled_by['user_id']).to eq(user.id)
end
end
end
end
end
end
......@@ -163,4 +163,22 @@ describe PagesDomains::ObtainLetsEncryptCertificateService do
expect(PagesDomainAcmeOrder.find_by_id(existing_order.id)).to be_nil
end
end
context 'when order is invalid' do
let(:existing_order) do
create(:pages_domain_acme_order, pages_domain: pages_domain)
end
let!(:api_order) do
stub_lets_encrypt_order(existing_order.url, 'invalid')
end
it 'saves error to domain and deletes acme order' do
expect do
service.execute
end.to change { pages_domain.reload.auto_ssl_failed }.from(false).to(true)
expect(PagesDomainAcmeOrder.find_by_id(existing_order.id)).to be_nil
end
end
end
......@@ -11,7 +11,8 @@ module LetsEncryptHelpers
status: 'pending',
token: 'tokenvalue',
file_content: 'hereisfilecontent',
request_validation: true
request_validation: true,
error: nil
}.freeze
def stub_lets_encrypt_settings
......@@ -43,16 +44,17 @@ module LetsEncryptHelpers
challenge
end
def acme_authorization_double
def acme_authorization_double(challenge = acme_challenge_double)
authorization = instance_double('Acme::Client::Resources::Authorization')
allow(authorization).to receive(:http).and_return(acme_challenge_double)
allow(authorization).to receive(:http).and_return(challenge)
allow(authorization).to receive(:challenges).and_return([challenge])
authorization
end
def acme_order_double(attributes = {})
acme_order = instance_double('Acme::Client::Resources::Order')
allow(acme_order).to receive_messages(ACME_ORDER_METHODS.merge(attributes))
allow(acme_order).to receive(:authorizations).and_return([acme_authorization_double])
allow(acme_order).to receive(:authorizations).and_return([acme_authorization_double]) unless attributes[:authorizations]
allow(acme_order).to receive(:finalize)
acme_order
end
......
# frozen_string_literal: true
RSpec.shared_examples 'project cache control headers' do
before do
project.update(visibility_level: visibility_level)
end
context 'when project is public' do
let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC }
it 'returns cache_control public header to true' do
subject
expect(response.cache_control[:public]).to be_truthy
end
end
context 'when project is private' do
let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
it 'returns cache_control public header to true' do
subject
expect(response.cache_control[:public]).to be_falsey
end
end
context 'when project is internal' do
let(:visibility_level) { Gitlab::VisibilityLevel::INTERNAL }
it 'returns cache_control public header to true' do
subject
expect(response.cache_control[:public]).to be_falsey
end
end
end
# frozen_string_literal: true
RSpec.shared_examples 'content disposition headers' do
it 'sets content disposition to inline' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.header['Content-Disposition']).to match(/inline/)
end
context 'when inline param is false' do
let(:inline) { 'false' }
it 'sets content disposition to attachment' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.header['Content-Disposition']).to match(/attachment/)
end
end
end
# frozen_string_literal: true
shared_examples 'responds with error' do |message|
it 'returns error' do
expect(subject).to be_a(ServiceResponse)
expect(subject).to be_error
expect(subject.message).to eq(message)
end
end
......@@ -33,4 +33,32 @@ describe 'admin/application_settings/general.html.haml' do
end
end
end
describe 'Maintenance mode' do
let(:maintenance_mode_flag) { true }
before do
assign(:application_setting, app_settings)
stub_feature_flags(maintenance_mode: maintenance_mode_flag)
allow(view).to receive(:current_user).and_return(user)
end
context 'when maintenance_mode feature is enabled' do
it 'show the Maintenance mode section' do
render
expect(rendered).to have_css('#js-maintenance-mode-toggle')
end
end
context 'when maintenance_mode feature is disabled' do
let(:maintenance_mode_flag) { false }
it 'hide the Maintenance mode section' do
render
expect(rendered).not_to have_css('#js-maintenance-mode-toggle')
end
end
end
end
......@@ -21,6 +21,10 @@ describe PagesDomainSslRenewalCronWorker do
let!(:domain_without_auto_certificate) do
create(:pages_domain, :without_certificate, :without_key, project: project, auto_ssl_enabled: true)
end
let!(:domain_with_failed_auto_ssl) do
create(:pages_domain, :without_certificate, :without_key, project: project,
auto_ssl_enabled: true, auto_ssl_failed: true)
end
let!(:domain_with_expired_auto_ssl) do
create(:pages_domain, :letsencrypt, :with_expired_certificate, project: project)
......@@ -34,7 +38,8 @@ describe PagesDomainSslRenewalCronWorker do
end
[domain,
domain_with_obtained_letsencrypt].each do |domain|
domain_with_obtained_letsencrypt,
domain_with_failed_auto_ssl].each do |domain|
expect(PagesDomainSslRenewalWorker).not_to receive(:perform_async).with(domain.id)
end
......
......@@ -26,6 +26,8 @@ describe PagesDomainSslRenewalWorker do
shared_examples 'does nothing' do
it 'does nothing' do
expect(::PagesDomains::ObtainLetsEncryptCertificateService).not_to receive(:new)
worker.perform(domain.id)
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment