Commit 946771d0 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent f1e2fca1
......@@ -71,8 +71,8 @@ export default {
},
computed: {
statusTitle() {
return sprintf(s__('Commits|Commit: %{commitText}'), {
commitText: this.commit.pipeline.detailedStatus.text,
return sprintf(s__('PipelineStatusTooltip|Pipeline: %{ciStatus}'), {
ciStatus: this.commit.pipeline.detailedStatus.text,
});
},
isLoading() {
......
......@@ -112,6 +112,7 @@ export default {
<div class="image">
<image-viewer
:path="imagePath"
:file-size="isNew ? newSize : oldSize"
:inner-css-classes="[
'frame',
{
......
# frozen_string_literal: true
module Mutations
module Admin
module SidekiqQueues
class DeleteJobs < BaseMutation
graphql_name 'AdminSidekiqQueuesDeleteJobs'
ADMIN_MESSAGE = 'You must be an admin to use this mutation'
Labkit::Context::KNOWN_KEYS.each do |key|
argument key,
GraphQL::STRING_TYPE,
required: false,
description: "Delete jobs matching #{key} in the context metadata"
end
argument :queue_name,
GraphQL::STRING_TYPE,
required: true,
description: 'The name of the queue to delete jobs from'
field :result,
Types::Admin::SidekiqQueues::DeleteJobsResponseType,
null: true,
description: 'Information about the status of the deletion request'
def ready?(**args)
unless current_user&.admin?
raise Gitlab::Graphql::Errors::ResourceNotAvailable, ADMIN_MESSAGE
end
super
end
def resolve(args)
{
result: Gitlab::SidekiqQueue.new(args[:queue_name]).drop_jobs!(args, timeout: 30),
errors: []
}
rescue Gitlab::SidekiqQueue::NoMetadataError
{
result: nil,
errors: ['No metadata provided']
}
rescue Gitlab::SidekiqQueue::InvalidQueueError
raise Gitlab::Graphql::Errors::ResourceNotAvailable, "Queue #{args[:queue_name]} not found"
end
end
end
end
end
# frozen_string_literal: true
module Types
module Admin
module SidekiqQueues
# We can't authorize against the value passed to this because it's
# a plain hash.
class DeleteJobsResponseType < BaseObject # rubocop:disable Graphql/AuthorizeTypes
graphql_name 'DeleteJobsResponse'
description 'The response from the AdminSidekiqQueuesDeleteJobs mutation.'
field :completed,
GraphQL::BOOLEAN_TYPE,
null: true,
description: 'Whether or not the entire queue was processed in time; if not, retrying the same request is safe'
field :deleted_jobs,
GraphQL::INT_TYPE,
null: true,
description: 'The number of matching jobs deleted'
field :queue_size,
GraphQL::INT_TYPE,
null: true,
description: 'The queue size after processing'
end
end
end
end
......@@ -6,6 +6,7 @@ module Types
graphql_name 'Mutation'
mount_mutation Mutations::Admin::SidekiqQueues::DeleteJobs
mount_mutation Mutations::AwardEmojis::Add
mount_mutation Mutations::AwardEmojis::Remove
mount_mutation Mutations::AwardEmojis::Toggle
......
......@@ -172,6 +172,7 @@ class ApplicationSetting < ApplicationRecord
validates :gitaly_timeout_default,
presence: true,
if: :gitaly_timeout_default_changed?,
numericality: {
only_integer: true,
greater_than_or_equal_to: 0,
......@@ -180,6 +181,7 @@ class ApplicationSetting < ApplicationRecord
validates :gitaly_timeout_medium,
presence: true,
if: :gitaly_timeout_medium_changed?,
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :gitaly_timeout_medium,
numericality: { less_than_or_equal_to: :gitaly_timeout_default },
......@@ -190,6 +192,7 @@ class ApplicationSetting < ApplicationRecord
validates :gitaly_timeout_fast,
presence: true,
if: :gitaly_timeout_fast_changed?,
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :gitaly_timeout_fast,
numericality: { less_than_or_equal_to: :gitaly_timeout_default },
......
......@@ -107,7 +107,7 @@ class ProjectWiki
direction_desc: direction == DIRECTION_DESC,
load_content: load_content
).map do |page|
WikiPage.new(self, page, true)
WikiPage.new(self, page)
end
end
......@@ -122,7 +122,7 @@ class ProjectWiki
page_title, page_dir = page_title_and_dir(title)
if page = wiki.page(title: page_title, version: version, dir: page_dir)
WikiPage.new(self, page, true)
WikiPage.new(self, page)
end
end
......
......@@ -70,10 +70,9 @@ class WikiPage
Gitlab::HookData::WikiPageBuilder.new(self).build
end
def initialize(wiki, page = nil, persisted = false)
def initialize(wiki, page = nil)
@wiki = wiki
@page = page
@persisted = persisted
@attributes = {}.with_indifferent_access
set_attributes if persisted?
......@@ -94,11 +93,7 @@ class WikiPage
# The formatted title of this page.
def title
if @attributes[:title]
CGI.unescape_html(self.class.unhyphenize(@attributes[:title]))
else
""
end
@attributes[:title] || ''
end
# Sets the title of this page.
......@@ -176,7 +171,7 @@ class WikiPage
# Returns boolean True or False if this instance
# has been fully created on disk or not.
def persisted?
@persisted == true
@page.present?
end
# Creates a new Wiki Page.
......@@ -196,7 +191,7 @@ class WikiPage
def create(attrs = {})
update_attributes(attrs)
save(page_details: title) do
save do
wiki.create_page(title, content, format, attrs[:message])
end
end
......@@ -222,18 +217,12 @@ class WikiPage
update_attributes(attrs)
if title_changed?
page_details = title
if wiki.find_page(page_details).present?
@attributes[:title] = @page.url_path
if title.present? && title_changed? && wiki.find_page(title).present?
@attributes[:title] = @page.title
raise PageRenameError
end
else
page_details = @page.url_path
end
save(page_details: page_details) do
save do
wiki.update_page(
@page,
content: content,
......@@ -266,7 +255,14 @@ class WikiPage
end
def title_changed?
title.present? && (@page.nil? || self.class.unhyphenize(@page.url_path) != title)
if persisted?
old_title, old_dir = wiki.page_title_and_dir(self.class.unhyphenize(@page.url_path))
new_title, new_dir = wiki.page_title_and_dir(title)
new_title != old_title || (title.include?('/') && new_dir != old_dir)
else
title.present?
end
end
# Updates the current @attributes hash by merging a hash of params
......@@ -313,26 +309,24 @@ class WikiPage
attributes[:format] = @page.format
end
def save(page_details:)
return unless valid?
def save
return false unless valid?
unless yield
errors.add(:base, wiki.error_message)
return false
end
page_title, page_dir = wiki.page_title_and_dir(page_details)
gitlab_git_wiki = wiki.wiki
@page = gitlab_git_wiki.page(title: page_title, dir: page_dir)
@page = wiki.find_page(title).page
set_attributes
@persisted = errors.blank?
true
end
def validate_path_limits
*dirnames, title = @attributes[:title].split('/')
if title.bytesize > MAX_TITLE_BYTES
if title && title.bytesize > MAX_TITLE_BYTES
errors.add(:title, _("exceeds the limit of %{bytes} bytes") % { bytes: MAX_TITLE_BYTES })
end
......
......@@ -10,6 +10,11 @@
= _('Analytics')
%ul.sidebar-sub-level-items{ data: { qa_selector: 'analytics_sidebar_submenu' } }
= nav_link(path: navbar_links.first.path, html_options: { class: "fly-out-top-item" } ) do
= link_to navbar_links.first.link do
%strong.fly-out-top-item-name
= _('Analytics')
%li.divider.fly-out-top-item
- navbar_links.each do |menu_item|
= nav_link(path: menu_item.path) do
= link_to(menu_item.link, menu_item.link_to_options) do
......
......@@ -19,9 +19,3 @@
%p.prepend-top-default
= _("You must have permission to create a project in a namespace before forking.")
.save-project-loader.hide.js-fork-content
%h2.text-center
= icon('spinner spin')
= _("Forking repository")
%p.text-center
= _("Please wait a moment, this page will automatically refresh when ready.")
......@@ -12,7 +12,7 @@
.form-group.row
.col-sm-12= f.label :title, class: 'control-label-full-width'
.col-sm-12
= f.text_field :title, class: 'form-control qa-wiki-title-textbox', value: @page.title, required: true, autofocus: !@page.persisted?, placeholder: _('Wiki|Page title')
= f.text_field :title, class: 'form-control qa-wiki-title-textbox', value: @page.title, required: true, autofocus: !@page.persisted?, placeholder: s_('Wiki|Page title')
%span.d-inline-block.mw-100.prepend-top-5
= icon('lightbulb-o')
- if @page.persisted?
......
---
title: Tweak wiki page title handling
merge_request: 25647
author:
type: changed
---
title: Fix ImportFailure when restore ci_pipelines:external_pull_request relation
merge_request: 26041
author:
type: fixed
---
title: Add title to Analytics sidebar menus
merge_request: 26265
author:
type: added
---
title: Remove .fa-spinner from app/views/projects/forks
merge_request: 25034
author: nuwe1
type: other
---
title: Add admin API endpoint to delete Sidekiq jobs matching metadata
merge_request: 25998
author:
type: added
---
title: Fix saving preferences with unrelated changes when gitaly timeouts became invalid.
merge_request: 26292
author:
type: fixed
---
title: All image diffs (except for renamed files) show the image file size in the
diff
merge_request: 25734
author:
type: added
---
title: Change tooltip text for pipeline on last commit widget
merge_request: 26315
author:
type: other
---
title: Allow creating default branch in snippet repositories
merge_request: 26294
author:
type: fixed
......@@ -858,7 +858,7 @@ you are seeing Gitaly errors. You can control the log level of the
gRPC client with the `GRPC_LOG_LEVEL` environment variable. The
default level is `WARN`.
You can run a GRPC trace with:
You can run a gRPC trace with:
```shell
GRPC_TRACE=all GRPC_VERBOSITY=DEBUG sudo gitlab-rake gitlab:gitaly:check
......
......@@ -30,6 +30,8 @@ The following metrics are available:
| `gitlab_cache_misses_total` | Counter | 10.2 | Cache read miss | controller, action |
| `gitlab_cache_operation_duration_seconds` | Histogram | 10.2 | Cache access time | |
| `gitlab_cache_operations_total` | Counter | 12.2 | Cache operations by controller/action | controller, action, operation |
| `job_waiter_started_total` | Counter | 12.9 | Number of batches of jobs started where a web request is waiting for the jobs to complete | worker |
| `job_waiter_timeouts_total` | Counter | 12.9 | Number of batches of jobs that timed out where a web request is waiting for the jobs to complete | worker |
| `gitlab_database_transaction_seconds` | Histogram | 12.1 | Time spent in database transactions, in seconds | |
| `gitlab_method_call_duration_seconds` | Histogram | 10.2 | Method calls real duration | controller, action, module, method |
| `gitlab_page_out_of_bounds` | Counter | 12.8 | Counter for the PageLimiter pagination limit being hit | controller, action, bot |
......
# Admin Sidekiq queues API
> **Note:** This feature was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25998) in GitLab 12.9
Delete jobs from a Sidekiq queue that match the given
[metadata](../development/logging.md#logging-context-metadata-through-rails-or-grape-requests).
The response has three fields:
1. `deleted_jobs` - the number of jobs deleted by the request.
1. `queue_size` - the remaining size of the queue after processing the
request.
1. `completed` - whether or not the request was able to process the
entire queue in time. If not, retrying with the same parameters may
delete further jobs (including those added after the first request
was issued).
This API endpoint is only available to admin users.
```
DELETE /admin/sidekiq/queues/:queue_name
```
| Attribute | Type | Required | Description |
| --------- | -------------- | -------- | ----------- |
| `queue_name` | string | yes | The name of the queue to delete jobs from |
| `user` | string | no | The username of the user who scheduled the jobs |
| `project` | string | no | The full path of the project where the jobs were scheduled from |
| `root_namespace` | string | no | The root namespace of the project |
| `subscription_plan` | string | no | The subscription plan of the root namespace (GitLab.com only) |
| `caller_id` | string | no | The endpoint or background job that schedule the job (for example: `ProjectsController#create`, `/api/:version/projects/:id`, `PostReceive`) |
At least one attribute, other than `queue_name`, is required.
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/admin/sidekiq/queues/authorized_projects?user=root
```
Example response:
```json
{
"completed": true,
"deleted_jobs": 7,
"queue_size": 14
}
```
......@@ -106,7 +106,8 @@ The following API resources are available in the group context:
The following API resources are available outside of project and group contexts (including `/users`):
| Resource | Available endpoints |
|:--------------------------------------------------|:------------------------------------------------------------------------|
|:---------------------------------------------------|:------------------------------------------------------------------------|
| [Admin Sidekiq queues](admin_sidekiq_queues.md) | `/admin/sidekiq/queues/:queue_name` |
| [Appearance](appearance.md) **(CORE ONLY)** | `/application/appearance` |
| [Applications](applications.md) | `/applications` |
| [Audit Events](audit_events.md) **(PREMIUM ONLY)** | `/audit_events` |
......
......@@ -38,6 +38,66 @@ type AddAwardEmojiPayload {
errors: [String!]!
}
"""
Autogenerated input type of AdminSidekiqQueuesDeleteJobs
"""
input AdminSidekiqQueuesDeleteJobsInput {
"""
Delete jobs matching caller_id in the context metadata
"""
callerId: String
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Delete jobs matching project in the context metadata
"""
project: String
"""
The name of the queue to delete jobs from
"""
queueName: String!
"""
Delete jobs matching root_namespace in the context metadata
"""
rootNamespace: String
"""
Delete jobs matching subscription_plan in the context metadata
"""
subscriptionPlan: String
"""
Delete jobs matching user in the context metadata
"""
user: String
}
"""
Autogenerated return type of AdminSidekiqQueuesDeleteJobs
"""
type AdminSidekiqQueuesDeleteJobsPayload {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Reasons why the mutation failed.
"""
errors: [String!]!
"""
Information about the status of the deletion request
"""
result: DeleteJobsResponse
}
"""
An emoji awarded by a user.
"""
......@@ -601,6 +661,26 @@ type CreateSnippetPayload {
snippet: Snippet
}
"""
The response from the AdminSidekiqQueuesDeleteJobs mutation.
"""
type DeleteJobsResponse {
"""
Whether or not the entire queue was processed in time; if not, retrying the same request is safe
"""
completed: Boolean
"""
The number of matching jobs deleted
"""
deletedJobs: Int
"""
The queue size after processing
"""
queueSize: Int
}
"""
A single design
"""
......@@ -4767,6 +4847,7 @@ enum MoveType {
type Mutation {
addAwardEmoji(input: AddAwardEmojiInput!): AddAwardEmojiPayload
adminSidekiqQueuesDeleteJobs(input: AdminSidekiqQueuesDeleteJobsInput!): AdminSidekiqQueuesDeleteJobsPayload
createDiffNote(input: CreateDiffNoteInput!): CreateDiffNotePayload
createEpic(input: CreateEpicInput!): CreateEpicPayload
createImageDiffNote(input: CreateImageDiffNoteInput!): CreateImageDiffNotePayload
......
......@@ -19106,6 +19106,33 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "adminSidekiqQueuesDeleteJobs",
"description": null,
"args": [
{
"name": "input",
"description": null,
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "INPUT_OBJECT",
"name": "AdminSidekiqQueuesDeleteJobsInput",
"ofType": null
}
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "AdminSidekiqQueuesDeleteJobsPayload",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "createDiffNote",
"description": null,
......@@ -19978,6 +20005,213 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "AdminSidekiqQueuesDeleteJobsPayload",
"description": "Autogenerated return type of AdminSidekiqQueuesDeleteJobs",
"fields": [
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "errors",
"description": "Reasons why the mutation failed.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "result",
"description": "Information about the status of the deletion request",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "DeleteJobsResponse",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "DeleteJobsResponse",
"description": "The response from the AdminSidekiqQueuesDeleteJobs mutation.",
"fields": [
{
"name": "completed",
"description": "Whether or not the entire queue was processed in time; if not, retrying the same request is safe",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "deletedJobs",
"description": "The number of matching jobs deleted",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "queueSize",
"description": "The queue size after processing",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "AdminSidekiqQueuesDeleteJobsInput",
"description": "Autogenerated input type of AdminSidekiqQueuesDeleteJobs",
"fields": null,
"inputFields": [
{
"name": "user",
"description": "Delete jobs matching user in the context metadata",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "project",
"description": "Delete jobs matching project in the context metadata",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "rootNamespace",
"description": "Delete jobs matching root_namespace in the context metadata",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "subscriptionPlan",
"description": "Delete jobs matching subscription_plan in the context metadata",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "callerId",
"description": "Delete jobs matching caller_id in the context metadata",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "queueName",
"description": "The name of the queue to delete jobs from",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
}
],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "AddAwardEmojiPayload",
......
......@@ -26,6 +26,16 @@ Autogenerated return type of AddAwardEmoji
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Reasons why the mutation failed. |
## AdminSidekiqQueuesDeleteJobsPayload
Autogenerated return type of AdminSidekiqQueuesDeleteJobs
| Name | Type | Description |
| --- | ---- | ---------- |
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Reasons why the mutation failed. |
| `result` | DeleteJobsResponse | Information about the status of the deletion request |
## AwardEmoji
An emoji awarded by a user.
......@@ -129,6 +139,16 @@ Autogenerated return type of CreateSnippet
| `errors` | String! => Array | Reasons why the mutation failed. |
| `snippet` | Snippet | The snippet after mutation |
## DeleteJobsResponse
The response from the AdminSidekiqQueuesDeleteJobs mutation.
| Name | Type | Description |
| --- | ---- | ---------- |
| `completed` | Boolean | Whether or not the entire queue was processed in time; if not, retrying the same request is safe |
| `deletedJobs` | Int | The number of matching jobs deleted |
| `queueSize` | Int | The queue size after processing |
## Design
A single design
......
......@@ -118,3 +118,11 @@ different components are making use of.
[Entity]: https://gitlab.com/gitlab-org/gitlab/blob/master/lib/api/entities.rb
[validation, and coercion of the parameters]: https://github.com/ruby-grape/grape#parameter-validation-and-coercion
[installing GitLab under a relative URL]: https://docs.gitlab.com/ee/install/relative_url.html
## Testing
When writing tests for new API endpoints, consider using a schema [fixture](./testing_guide/best_practices.md#fixtures) located in `/spec/fixtures/api/schemas`. You can `expect` a response to match a given schema:
```ruby
expect(response).to match_response_schema('merge_requests')
```
......@@ -202,13 +202,10 @@ create the actual RDS instance.
### RDS Subnet Group
1. Navigate to the RDS dashboard and select **Subnet Groups** from the left menu.
1. Give it a name (`gitlab-rds-group`), a description, and choose the VPC from
the VPC dropdown.
1. Click "Add all the subnets related to this VPC" and
remove the public ones, we only want the **private subnets**.
In the end, you should see `10.0.1.0/24` and `10.0.3.0/24` (as
we defined them in the [subnets section](#subnets)).
Click **Create** when ready.
1. Click on **Create DB Subnet Group**.
1. Under **Subnet group details**, enter a name (we'll use `gitlab-rds-group`), a description, and choose the `gitlab-vpc` from the VPC dropdown.
1. Under **Add subnets**, click **Add all the subnets related to this VPC** and remove the public ones, we only want the **private subnets**. In the end, you should see `10.0.1.0/24` and `10.0.3.0/24` (as we defined them in the [subnets section](#subnets)).
1. Click **Create** when ready.
![RDS Subnet Group](img/rds_subnet_group.png)
......@@ -217,33 +214,31 @@ create the actual RDS instance.
Now, it's time to create the database:
1. Select **Databases** from the left menu and click **Create database**.
1. Select PostgreSQL and click **Next**.
1. Since this is a production server, let's choose "Production". Click **Next**.
1. Let's see the instance specifications:
1. Leave the license model as is (`postgresql-license`).
1. For the version, select the latest of the 9.6 series (check the
[database requirements](../../install/requirements.md#postgresql-requirements))
if there are any updates on this).
1. For the size, let's select a `t2.medium` instance.
1. Multi-AZ-deployment is recommended as redundancy, so choose "Create
replica in different zone". Read more at
[High Availability (Multi-AZ)](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZ.html).
1. A Provisioned IOPS (SSD) storage type is best suited for HA (though you can
choose a General Purpose (SSD) to reduce the costs). Read more about it at
[Storage for Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html).
1. The rest of the settings on this page request a DB instance identifier, username
and a master password. We've chosen to use `gitlab-db-ha`, `gitlab` and a
very secure password respectively. Keep these in hand for later.
1. Click **Next** to proceed to the advanced settings.
1. Make sure to choose our GitLab VPC, our subnet group, set public accessibility to
**No**, and to leave it to create a new security group. The only additional
change which will be helpful is the database name for which we can use
`gitlabhq_production`. At the very bottom, there's an option to enable
auto updates to minor versions. You may want to turn it off.
1. When done, click **Create database**.
Now that the database is created, let's move on to setting up Redis with ElasticCache.
1. Select **Standard Create** for the database creation method.
1. Select **PostgreSQL** as the database engine and select **PostgreSQL 10.9-R1** from the version dropdown menu (check the [database requirements](../../install/requirements.md#postgresql-requirements) to see if there are any updates on this for your chosen version of GitLab).
1. Since this is a production server, let's choose **Production** from the **Templates** section.
1. Under **Settings**, set a DB instance identifier, a master username, and a master password. We'll use `gitlab-db-ha`, `gitlab`, and a very secure password respectively. Make a note of these as we'll need them later.
1. For the DB instance size, select **Standard classes** and select an instance size that meets your requirements from the dropdown menu. We'll use a `db.m4.large` instance.
1. Under **Storage**, configure the following:
1. Select **Provisioned IOPS (SSD)** from the storage type dropdown menu. Provisioned IOPS (SSD) storage is best suited for HA (though you can choose General Purpose (SSD) to reduce the costs). Read more about it at [Storage for Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html).
1. Allocate storage and set provisioned IOPS. We'll use the minimum values, `100` and `1000`, respectively.
1. Enable storage autoscaling (optional) and set a maximum storage threshold.
1. Under **Availability & durability**, select **Create a standby instance** to have a standby RDS instance provisioned in a different Availability Zone. Read more at [High Availability (Multi-AZ)](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZ.html).
1. Under **Connectivity**, configure the following:
1. Select the VPC we created earlier (`gitlab-vpc`) from the **Virtual Private Cloud (VPC)** dropdown menu.
1. Expand the **Additional connectivity configuration** section and select the subnet group (`gitlab-rds-group`) we created earlier.
1. Set public accessibility to **No**.
1. Under **VPC security group**, select **Create new** and enter a name. We'll use `gitlab-rds-sec-group`.
1. Leave the database port as the default `5432`.
1. For **Database authentication**, select **Password authentication**.
1. Expand the **Additional configuration** section and complete the following:
1. The initial database name. We'll use `gitlabhq_production`.
1. Configure your preferred backup settings.
1. The only other change we'll make here is to disable auto minor version updates under **Maintenance**.
1. Leave all the other settings as is or tweak according to your needs.
1. Once you're happy, click **Create database**.
Now that the database is created, let's move on to setting up Redis with ElastiCache.
## Redis with ElastiCache
......@@ -311,7 +306,7 @@ On the EC2 dashboard, look for Load Balancer in the left navigation bar:
1. For **Ping Path**, enter `/explore`. (We use `/explore` as it's a public endpoint that does
not require authorization.)
1. Keep the default **Advanced Details** or adjust them according to your needs.
1. For now, don't click **Add EC2 Instances**, as we don't have any instances to add yet. Come back
1. Click **Add EC2 Instances** but, as we don't have any instances to add yet, come back
to your load balancer after creating your GitLab instances and add them.
1. Click **Add Tags** and add any tags you need.
1. Click **Review and Create**, review all your settings, and click **Create** if you're happy.
......
---
type: reference
---
# Gitaly timeouts
![gitaly timeouts](img/gitaly_timeouts.png)
3 timeout types can be configured to make sure that long running
Gitaly calls don't needlessly take up resources.
- Default timeout
This timeout is the default for most Gitaly calls.
It should be shorter than the worker timeout that can be configured
for
[Puma](https://docs.gitlab.com/omnibus/settings/puma.html#puma-settings)
or [Unicorn](https://docs.gitlab.com/omnibus/settings/unicorn.html).
This makes sure that Gitaly calls made within a web request cannot
exceed these the entire request timeout.
The default for this timeout is 55 seconds.
- Fast timeout
This is the timeout for very short Gitaly calls.
The default for this timeout is 10 seconds.
- Medium timeout
This timeout should be between the default and the fast timeout
The default for this timeout is 30 seconds.
......@@ -24,6 +24,7 @@ include:
- [Protected paths](protected_paths.md) **(CORE ONLY)**
- [Help messages for the `/help` page and the login page](help_page.md)
- [Push event activities limit and bulk push events](push_event_activities_limit.md)
- [Gitaly timeouts](gitaly_timeouts.md)
NOTE: **Note:**
You can change the [first day of the week](../../profile/preferences.md) for the entire GitLab instance
......
......@@ -65,7 +65,7 @@ To enable Container Scanning in your pipeline, you need:
services:
- docker:19.03.1-dind
variables:
IMAGE_TAG: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_REF_SHA
IMAGE_TAG: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_SHA
script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker build -t $IMAGE_TAG .
......
......@@ -116,6 +116,7 @@ The following table depicts the various user permission levels in a project.
| Turn on/off protected branch push for devs | | | | ✓ | ✓ |
| Enable/disable tag protections | | | | ✓ | ✓ |
| Edit project | | | | ✓ | ✓ |
| Edit project badges | | | | ✓ | ✓ |
| Add deploy keys to project | | | | ✓ | ✓ |
| Configure project hooks | | | | ✓ | ✓ |
| Manage Runners | | | | ✓ | ✓ |
......
......@@ -12,7 +12,7 @@ or ways to contact the project maintainers.
## Project badges
Badges can be added to a project and will then be visible on the project's overview page.
Badges can be added to a project by Maintainers or Owners, and will then be visible on the project's overview page.
If you find that you have to add the same badges to several projects, you may want to add them at the [group level](#group-badges).
To add a new badge to a project:
......
......@@ -60,6 +60,14 @@ if you clone the wiki repository locally. All uploaded files prior to GitLab
11.3 are stored in GitLab itself. If you want them to be part of the wiki's Git
repository, you will have to upload them again.
### Special characters in page titles
Wiki pages are stored as files in a Git repository, so certain characters have a special meaning:
- Spaces are converted into hyphens when storing a page.
- Hyphens (`-`) are converted back into spaces when displaying a page.
- Slashes (`/`) can't be used, because they're used as path separator.
### Length restrictions for file and directory names
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24364) in GitLab 12.8.
......
# frozen_string_literal: true
module API
module Admin
class Sidekiq < Grape::API
before { authenticated_as_admin! }
namespace 'admin' do
namespace 'sidekiq' do
namespace 'queues' do
desc 'Drop jobs matching the given metadata from the Sidekiq queue'
params do
Labkit::Context::KNOWN_KEYS.each do |key|
optional key, type: String, allow_blank: false
end
at_least_one_of(*Labkit::Context::KNOWN_KEYS)
end
delete ':queue_name' do
result =
Gitlab::SidekiqQueue
.new(params[:queue_name])
.drop_jobs!(declared_params, timeout: 30)
present result
rescue Gitlab::SidekiqQueue::NoMetadataError
render_api_error!("Invalid metadata: #{declared_params}", 400)
rescue Gitlab::SidekiqQueue::InvalidQueueError
not_found!(params[:queue_name])
end
end
end
end
end
end
end
......@@ -110,6 +110,7 @@ module API
# Keep in alphabetical order
mount ::API::AccessRequests
mount ::API::Admin::Sidekiq
mount ::API::Appearance
mount ::API::Applications
mount ::API::Avatar
......
......@@ -17,7 +17,7 @@ module Gitlab
(start_id..stop_id).each_slice(QUERY_ITEM_SIZE).each do |range|
model
.where(lock_version: nil)
.where(id: range)
.where("ID BETWEEN ? AND ?", range.first, range.last)
.update_all(lock_version: 0)
end
end
......
......@@ -3,6 +3,7 @@
module Gitlab
module Checks
class SnippetCheck < BaseChecker
DEFAULT_BRANCH = 'master'.freeze
ERROR_MESSAGES = {
create_delete_branch: 'You can not create or delete branches.'
}.freeze
......@@ -29,6 +30,12 @@ module Gitlab
true
end
private
def creation?
@branch_name != DEFAULT_BRANCH && super
end
end
end
end
......@@ -65,6 +65,7 @@ tree:
- resource_label_events:
- label:
- :priorities
- :external_pull_requests
- ci_pipelines:
- notes:
- :author
......@@ -74,7 +75,6 @@ tree:
- :statuses
- :external_pull_request
- :merge_request
- :external_pull_requests
- :auto_devops
- :triggers
- :pipeline_schedules
......
......@@ -51,6 +51,8 @@ module Gitlab
epic
ProjectCiCdSetting
container_expiration_policy
external_pull_request
external_pull_requests
].freeze
def create
......
# frozen_string_literal: true
module Gitlab
class SidekiqQueue
include Gitlab::Utils::StrongMemoize
NoMetadataError = Class.new(StandardError)
InvalidQueueError = Class.new(StandardError)
attr_reader :queue_name
def initialize(queue_name)
@queue_name = queue_name
end
def drop_jobs!(search_metadata, timeout:)
completed = false
deleted_jobs = 0
job_search_metadata =
search_metadata
.stringify_keys
.slice(*Labkit::Context::KNOWN_KEYS)
.transform_keys { |key| "meta.#{key}" }
.compact
raise NoMetadataError if job_search_metadata.empty?
raise InvalidQueueError unless queue
begin
Timeout.timeout(timeout) do
queue.each do |job|
next unless job_matches?(job, job_search_metadata)
job.delete
deleted_jobs += 1
end
completed = true
end
rescue Timeout::Error
end
{
completed: completed,
deleted_jobs: deleted_jobs,
queue_size: queue.size
}
end
def queue
strong_memoize(:queue) do
# Sidekiq::Queue.new always returns a queue, even if it doesn't
# exist.
Sidekiq::Queue.all.find { |queue| queue.name == queue_name }
end
end
def job_matches?(job, job_search_metadata)
job_search_metadata.all? { |key, value| job[key] == value }
end
end
end
......@@ -4993,9 +4993,6 @@ msgstr ""
msgid "Commits|An error occurred while fetching merge requests data."
msgstr ""
msgid "Commits|Commit: %{commitText}"
msgstr ""
msgid "Commits|History"
msgstr ""
......@@ -8774,9 +8771,6 @@ msgstr ""
msgid "Forking in progress"
msgstr ""
msgid "Forking repository"
msgstr ""
msgid "Forks"
msgstr ""
......
......@@ -16,7 +16,7 @@ FactoryBot.define do
page { OpenStruct.new(url_path: 'some-name') }
association :wiki, factory: :project_wiki, strategy: :build
initialize_with { new(wiki, page, true) }
initialize_with { new(wiki, page) }
before(:create) do |page, evaluator|
page.attributes = evaluator.attrs
......
......@@ -139,11 +139,6 @@ describe 'Project active tab' do
it_behaves_like 'page has active sub tab', _('Repository Analytics')
end
context 'on project Analytics/Repository Analytics' do
it_behaves_like 'page has active tab', _('Analytics')
it_behaves_like 'page has active sub tab', _('Repository Analytics')
end
context 'on project Analytics/Cycle Analytics' do
before do
click_tab(_('CI / CD Analytics'))
......
......@@ -14,7 +14,7 @@ describe 'Projects > Show > User sees last commit CI status' do
page.within '.commit-detail' do
expect(page).to have_content(project.commit.sha[0..6])
expect(page).to have_selector('[aria-label="Commit: skipped"]')
expect(page).to have_selector('[aria-label="Pipeline: skipped"]')
end
end
end
......@@ -33,6 +33,8 @@ describe 'User views a wiki page' do
fill_in(:wiki_content, with: 'wiki content')
click_on('Create page')
end
expect(page).to have_content('Wiki was successfully updated.')
end
it 'shows the history of a page that has a path' do
......@@ -62,8 +64,10 @@ describe 'User views a wiki page' do
expect(page).to have_content('Edit Page')
fill_in('Content', with: 'Updated Wiki Content')
click_on('Save changes')
expect(page).to have_content('Wiki was successfully updated.')
click_on('Page history')
page.within(:css, '.nav-text') do
......@@ -132,6 +136,36 @@ describe 'User views a wiki page' do
end
end
context 'when a page has special characters in its title' do
let(:title) { '<foo> !@#$%^&*()[]{}=_+\'"\\|<>? <bar>' }
before do
wiki_page.update(title: title )
end
it 'preserves the special characters' do
visit(project_wiki_path(project, wiki_page))
expect(page).to have_css('.wiki-page-title', text: title)
expect(page).to have_css('.wiki-pages li', text: title)
end
end
context 'when a page has XSS in its title or content' do
let(:title) { '<script>alert("title")<script>' }
before do
wiki_page.update(title: title, content: 'foo <script>alert("content")</script> bar')
end
it 'safely displays the page' do
visit(project_wiki_path(project, wiki_page))
expect(page).to have_css('.wiki-page-title', text: title)
expect(page).to have_content('foo bar')
end
end
context 'when a page has XSS in its message' do
before do
wiki_page.update(message: '<script>alert(true)<script>', content: 'XSS update')
......
{
"approvals_before_merge": 0,
"archived": false,
"auto_cancel_pending_pipelines": "enabled",
"autoclose_referenced_issues": true,
"boards": [],
"build_allow_git_fetch": true,
"build_coverage_regex": null,
"build_timeout": 3600,
"ci_cd_settings": {
"group_runners_enabled": true
},
"ci_config_path": null,
"ci_pipelines": [
{
"before_sha": "0000000000000000000000000000000000000000",
"committed_at": null,
"config_source": "repository_source",
"created_at": "2020-02-25T12:08:40.615Z",
"duration": 61,
"external_pull_request": {
"created_at": "2020-02-25T12:08:40.478Z",
"id": 59023,
"project_id": 17121868,
"pull_request_iid": 4,
"source_branch": "new-branch",
"source_repository": "liptonshmidt/dotfiles",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"status": "open",
"target_branch": "master",
"target_repository": "liptonshmidt/dotfiles",
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:08:40.478Z"
},
"failure_reason": null,
"finished_at": "2020-02-25T12:09:44.464Z",
"id": 120842687,
"iid": 8,
"lock_version": 3,
"notes": [],
"project_id": 17121868,
"protected": false,
"ref": "new-branch",
"sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"source": "external_pull_request_event",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"stages": [],
"started_at": "2020-02-25T12:08:42.511Z",
"status": "success",
"tag": false,
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:09:44.473Z",
"user_id": 4087087,
"yaml_errors": null
},
{
"before_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"committed_at": null,
"config_source": "repository_source",
"created_at": "2020-02-25T12:08:37.434Z",
"duration": 57,
"external_pull_request": {
"created_at": "2020-02-25T12:08:40.478Z",
"id": 59023,
"project_id": 17121868,
"pull_request_iid": 4,
"source_branch": "new-branch",
"source_repository": "liptonshmidt/dotfiles",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"status": "open",
"target_branch": "master",
"target_repository": "liptonshmidt/dotfiles",
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:08:40.478Z"
},
"failure_reason": null,
"finished_at": "2020-02-25T12:09:36.557Z",
"id": 120842675,
"iid": 7,
"lock_version": 3,
"notes": [],
"project_id": 17121868,
"protected": false,
"ref": "new-branch",
"sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"source": "external_pull_request_event",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"stages": [],
"started_at": "2020-02-25T12:08:38.682Z",
"status": "success",
"tag": false,
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:09:36.565Z",
"user_id": 4087087,
"yaml_errors": null
}
],
"custom_attributes": [],
"delete_error": null,
"description": "Vim, Tmux and others",
"disable_overriding_approvers_per_merge_request": null,
"external_authorization_classification_label": "",
"external_pull_requests": [
{
"created_at": "2020-02-25T12:08:40.478Z",
"id": 59023,
"project_id": 17121868,
"pull_request_iid": 4,
"source_branch": "new-branch",
"source_repository": "liptonshmidt/dotfiles",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"status": "open",
"target_branch": "master",
"target_repository": "liptonshmidt/dotfiles",
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:08:40.478Z"
}
],
"external_webhook_token": "D3mVYFzZkgZ5kMfcW_wx",
"issues": [],
"labels": [],
"milestones": [],
"pipeline_schedules": [],
"project_feature": {
"builds_access_level": 20,
"created_at": "2020-02-25T11:20:09.925Z",
"forking_access_level": 20,
"id": 17494715,
"issues_access_level": 0,
"merge_requests_access_level": 0,
"pages_access_level": 20,
"project_id": 17121868,
"repository_access_level": 20,
"snippets_access_level": 0,
"updated_at": "2020-02-25T11:20:10.376Z",
"wiki_access_level": 0
},
"public_builds": true,
"releases": [],
"shared_runners_enabled": true,
"snippets": [],
"triggers": [],
"visibility_level": 20
}
......@@ -68,10 +68,10 @@ exports[`Repository last commit component renders commit widget 1`] = `
<gl-link-stub
class="js-commit-pipeline"
href="https://test.com/pipeline"
title="Commit: failed"
title="Pipeline: failed"
>
<ci-icon-stub
aria-label="Commit: failed"
aria-label="Pipeline: failed"
cssclasses=""
size="24"
status="[object Object]"
......@@ -174,10 +174,10 @@ exports[`Repository last commit component renders the signature HTML as returned
<gl-link-stub
class="js-commit-pipeline"
href="https://test.com/pipeline"
title="Commit: failed"
title="Pipeline: failed"
>
<ci-icon-stub
aria-label="Commit: failed"
aria-label="Pipeline: failed"
cssclasses=""
size="24"
status="[object Object]"
......
......@@ -2,6 +2,8 @@ import $ from 'jquery';
import Api from '~/api';
import Search from '~/pages/search/show/search';
jest.mock('~/api');
describe('Search', () => {
const fixturePath = 'search/show.html';
const searchTerm = 'some search';
......@@ -19,20 +21,19 @@ describe('Search', () => {
new Search(); // eslint-disable-line no-new
});
it('requests groups from backend when filtering', done => {
spyOn(Api, 'groups').and.callFake(term => {
it('requests groups from backend when filtering', () => {
jest.spyOn(Api, 'groups').mockImplementation(term => {
expect(term).toBe(searchTerm);
done();
});
const inputElement = fillDropdownInput('.js-search-group-dropdown');
$(inputElement).trigger('input');
});
it('requests projects from backend when filtering', done => {
spyOn(Api, 'projects').and.callFake(term => {
it('requests projects from backend when filtering', () => {
jest.spyOn(Api, 'projects').mockImplementation(term => {
expect(term).toBe(searchTerm);
done();
});
const inputElement = fillDropdownInput('.js-search-project-dropdown');
......
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import { compileToFunctions } from 'vue-template-compiler';
import { mount } from '@vue/test-utils';
import { GREEN_BOX_IMAGE_URL, RED_BOX_IMAGE_URL } from 'spec/test_constants';
import imageDiffViewer from '~/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue';
......@@ -9,50 +11,52 @@ describe('ImageDiffViewer', () => {
newPath: GREEN_BOX_IMAGE_URL,
oldPath: RED_BOX_IMAGE_URL,
};
const allProps = {
...requiredProps,
oldSize: 2048,
newSize: 1024,
};
let wrapper;
let vm;
function createComponent(props) {
const ImageDiffViewer = Vue.extend(imageDiffViewer);
vm = mountComponent(ImageDiffViewer, props);
wrapper = mount(ImageDiffViewer, { propsData: props });
vm = wrapper.vm;
}
const triggerEvent = (eventName, el = vm.$el, clientX = 0) => {
const event = document.createEvent('MouseEvents');
event.initMouseEvent(
eventName,
true,
true,
window,
1,
const event = new MouseEvent(eventName, {
bubbles: true,
cancelable: true,
view: window,
detail: 1,
screenX: clientX,
clientX,
0,
clientX,
0,
false,
false,
false,
false,
0,
null,
);
});
// JSDOM does not implement experimental APIs
event.pageX = clientX;
el.dispatchEvent(event);
};
const dragSlider = (sliderElement, dragPixel = 20) => {
const dragSlider = (sliderElement, doc, dragPixel) => {
triggerEvent('mousedown', sliderElement);
triggerEvent('mousemove', document.body, dragPixel);
triggerEvent('mouseup', document.body);
triggerEvent('mousemove', doc.body, dragPixel);
triggerEvent('mouseup', doc.body);
};
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
it('renders image diff for replaced', done => {
createComponent(requiredProps);
createComponent({ ...allProps });
vm.$nextTick(() => {
const metaInfoElements = vm.$el.querySelectorAll('.image-info');
setTimeout(() => {
expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
......@@ -66,35 +70,35 @@ describe('ImageDiffViewer', () => {
'Onion skin',
);
expect(metaInfoElements.length).toBe(2);
expect(metaInfoElements[0]).toHaveText('2.00 KiB');
expect(metaInfoElements[1]).toHaveText('1.00 KiB');
done();
});
});
it('renders image diff for new', done => {
createComponent(
Object.assign({}, requiredProps, {
diffMode: 'new',
oldPath: '',
}),
);
createComponent({ ...allProps, diffMode: 'new', oldPath: '' });
setImmediate(() => {
const metaInfoElement = vm.$el.querySelector('.image-info');
setTimeout(() => {
expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
expect(metaInfoElement).toHaveText('1.00 KiB');
done();
});
});
it('renders image diff for deleted', done => {
createComponent(
Object.assign({}, requiredProps, {
diffMode: 'deleted',
newPath: '',
}),
);
createComponent({ ...allProps, diffMode: 'deleted', newPath: '' });
setImmediate(() => {
const metaInfoElement = vm.$el.querySelector('.image-info');
setTimeout(() => {
expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
expect(metaInfoElement).toHaveText('2.00 KiB');
done();
});
......@@ -105,26 +109,40 @@ describe('ImageDiffViewer', () => {
components: {
imageDiffViewer,
},
template: `
<image-diff-viewer diff-mode="renamed" new-path="${GREEN_BOX_IMAGE_URL}" old-path="">
data: {
...allProps,
diffMode: 'renamed',
},
...compileToFunctions(`
<image-diff-viewer
:diff-mode="diffMode"
:new-path="newPath"
:old-path="oldPath"
:new-size="newSize"
:old-size="oldSize"
>
<span slot="image-overlay" class="overlay">test</span>
</image-diff-viewer>
`,
`),
}).$mount();
setTimeout(() => {
setImmediate(() => {
const metaInfoElement = vm.$el.querySelector('.image-info');
expect(vm.$el.querySelector('img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
expect(vm.$el.querySelector('.overlay')).not.toBe(null);
expect(metaInfoElement).toHaveText('2.00 KiB');
done();
});
});
describe('swipeMode', () => {
beforeEach(done => {
createComponent(requiredProps);
createComponent({ ...requiredProps });
setTimeout(() => {
setImmediate(() => {
done();
});
});
......@@ -141,9 +159,9 @@ describe('ImageDiffViewer', () => {
describe('onionSkin', () => {
beforeEach(done => {
createComponent(requiredProps);
createComponent({ ...requiredProps });
setTimeout(() => {
setImmediate(() => {
done();
});
});
......@@ -163,7 +181,7 @@ describe('ImageDiffViewer', () => {
vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click();
vm.$nextTick(() => {
dragSlider(vm.$el.querySelector('.dragger'));
dragSlider(vm.$el.querySelector('.dragger'), document, 20);
vm.$nextTick(() => {
expect(vm.$el.querySelector('.dragger').style.left).toBe('20px');
......
......@@ -25,10 +25,19 @@ describe Gitlab::Checks::SnippetCheck do
context 'trying to create the branch' do
let(:oldrev) { '0000000000000000000000000000000000000000' }
let(:ref) { 'refs/heads/feature' }
it 'raises an error' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You can not create or delete branches.')
end
context "when branch is 'master'" do
let(:ref) { 'refs/heads/master' }
it "allows the operation" do
expect { subject.exec }.not_to raise_error
end
end
end
end
end
......@@ -426,6 +426,10 @@ describe Gitlab::ImportExport::Project::TreeRestorer do
expect(pipeline_with_external_pr.external_pull_request).to be_persisted
end
it 'has no import failures' do
expect(@project.import_failures.size).to eq 0
end
end
end
end
......@@ -499,6 +503,30 @@ describe Gitlab::ImportExport::Project::TreeRestorer do
end
end
context 'multiple pipelines reference the same external pull request' do
before do
setup_import_export_config('multi_pipeline_ref_one_external_pr')
expect(restored_project_json).to eq(true)
end
it_behaves_like 'restores project successfully',
issues: 0,
labels: 0,
milestones: 0,
ci_pipelines: 2,
external_pull_requests: 1,
import_failures: 0
it 'restores external pull request for the restored pipelines' do
external_pr = project.external_pull_requests.first
project.ci_pipelines.each do |pipeline_with_external_pr|
expect(pipeline_with_external_pr.external_pull_request).to be_persisted
expect(pipeline_with_external_pr.external_pull_request).to eq(external_pr)
end
end
end
context 'when post import action throw non-retriable exception' do
let(:exception) { StandardError.new('post_import_error') }
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::SidekiqQueue do
around do |example|
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('authorized_projects').clear
end
def add_job(user)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
'args' => [user.id],
'meta.user' => user.username
)
end
describe '#drop_jobs!' do
shared_examples 'queue processing' do
let(:sidekiq_queue) { described_class.new('authorized_projects') }
let_it_be(:sidekiq_queue_user) { create(:user) }
before do
add_job(create(:user))
add_job(sidekiq_queue_user)
add_job(sidekiq_queue_user)
end
context 'when the queue is not processed in time' do
before do
calls = 0
allow(sidekiq_queue).to receive(:job_matches?).and_wrap_original do |m, *args|
raise Timeout::Error if calls > 0
calls += 1
m.call(*args)
end
end
it 'returns a non-completion flag, the number of jobs deleted, and the remaining queue size' do
expect(sidekiq_queue.drop_jobs!(search_metadata, timeout: 10))
.to eq(completed: false,
deleted_jobs: timeout_deleted,
queue_size: 3 - timeout_deleted)
end
end
context 'when the queue is processed in time' do
it 'returns a completion flag, the number of jobs deleted, and the remaining queue size' do
expect(sidekiq_queue.drop_jobs!(search_metadata, timeout: 10))
.to eq(completed: true,
deleted_jobs: no_timeout_deleted,
queue_size: 3 - no_timeout_deleted)
end
end
end
context 'when there are no matching jobs' do
include_examples 'queue processing' do
let(:search_metadata) { { project: 1 } }
let(:timeout_deleted) { 0 }
let(:no_timeout_deleted) { 0 }
end
end
context 'when there are matching jobs' do
include_examples 'queue processing' do
let(:search_metadata) { { user: sidekiq_queue_user.username } }
let(:timeout_deleted) { 1 }
let(:no_timeout_deleted) { 2 }
end
end
context 'when there are no valid metadata keys passed' do
it 'raises NoMetadataError' do
add_job(create(:user))
expect { described_class.new('authorized_projects').drop_jobs!({ username: 'sidekiq_queue_user' }, timeout: 1) }
.to raise_error(described_class::NoMetadataError)
end
end
context 'when the queue does not exist' do
it 'raises InvalidQueueError' do
expect { described_class.new('foo').drop_jobs!({ user: 'sidekiq_queue_user' }, timeout: 1) }
.to raise_error(described_class::InvalidQueueError)
end
end
end
end
......@@ -380,6 +380,12 @@ describe ApplicationSetting do
expect(subject).to be_invalid
end
it 'does not prevent from saving when gitaly timeouts were previously invalid' do
subject.update_column(:gitaly_timeout_default, Settings.gitlab.max_request_duration_seconds + 1)
expect(subject.reload).to be_valid
end
end
describe 'enforcing terms' do
......
......@@ -475,43 +475,59 @@ describe WikiPage do
end
end
describe "#title" do
it "replaces a hyphen to a space" do
subject.title = "Import-existing-repositories-into-GitLab"
describe '#title_changed?' do
using RSpec::Parameterized::TableSyntax
expect(subject.title).to eq("Import existing repositories into GitLab")
let(:untitled_page) { described_class.new(wiki) }
let(:directory_page) do
create_page('parent/child', 'test content')
wiki.find_page('parent/child')
end
it 'unescapes html' do
subject.title = 'foo &amp; bar'
where(:page, :title, :changed) do
:untitled_page | nil | false
:untitled_page | 'new title' | true
expect(subject.title).to eq('foo & bar')
:new_page | nil | true
:new_page | 'test page' | true
:new_page | 'new title' | true
:existing_page | nil | false
:existing_page | 'test page' | false
:existing_page | '/test page' | false
:existing_page | 'new title' | true
:directory_page | nil | false
:directory_page | 'parent/child' | false
:directory_page | 'child' | false
:directory_page | '/child' | true
:directory_page | 'parent/other' | true
:directory_page | 'other/child' | true
end
with_them do
it 'returns the expected value' do
subject = public_send(page)
subject.title = title if title
expect(subject.title_changed?).to be(changed)
end
end
end
describe '#path' do
let(:path) { 'mypath.md' }
let(:git_page) { instance_double('Gitlab::Git::WikiPage', path: path).as_null_object }
it 'returns the path when persisted' do
page = described_class.new(wiki, git_page, true)
expect(page.path).to eq(path)
expect(existing_page.path).to eq('test-page.md')
end
it 'returns nil when not persisted' do
page = described_class.new(wiki, git_page, false)
expect(page.path).to be_nil
expect(new_page.path).to be_nil
end
end
describe '#directory' do
context 'when the page is at the root directory' do
subject do
create_page('file', 'content')
wiki.find_page('file')
end
subject { existing_page }
it 'returns an empty string' do
expect(subject.directory).to eq('')
......
# frozen_string_literal: true
require 'spec_helper'
describe API::Admin::Sidekiq do
let_it_be(:admin) { create(:admin) }
describe 'DELETE /admin/sidekiq/queues/:queue_name' do
context 'when the user is not an admin' do
it 'returns a 403' do
delete api("/admin/sidekiq/queues/authorized_projects?user=#{admin.username}", create(:user))
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when the user is an admin' do
around do |example|
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('authorized_projects').clear
end
def add_job(user)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
'args' => [user.id],
'meta.user' => user.username
)
end
context 'valid request' do
it 'returns info about the deleted jobs' do
add_job(admin)
add_job(admin)
add_job(create(:user))
delete api("/admin/sidekiq/queues/authorized_projects?user=#{admin.username}", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq('completed' => true,
'deleted_jobs' => 2,
'queue_size' => 1)
end
end
context 'when no required params are provided' do
it 'returns a 400' do
delete api("/admin/sidekiq/queues/authorized_projects?user_2=#{admin.username}", admin)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when the queue does not exist' do
it 'returns a 404' do
delete api("/admin/sidekiq/queues/authorized_projects_2?user=#{admin.username}", admin)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe 'Deleting Sidekiq jobs' do
include GraphqlHelpers
let_it_be(:admin) { create(:admin) }
let(:variables) { { user: admin.username, queue_name: 'authorized_projects' } }
let(:mutation) { graphql_mutation(:admin_sidekiq_queues_delete_jobs, variables) }
def mutation_response
graphql_mutation_response(:admin_sidekiq_queues_delete_jobs)
end
context 'when the user is not an admin' do
let(:current_user) { create(:user) }
it_behaves_like 'a mutation that returns top-level errors',
errors: ['You must be an admin to use this mutation']
end
context 'when the user is an admin' do
let(:current_user) { admin }
context 'valid request' do
around do |example|
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('authorized_projects').clear
end
def add_job(user)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
'args' => [user.id],
'meta.user' => user.username
)
end
it 'returns info about the deleted jobs' do
add_job(admin)
add_job(admin)
add_job(create(:user))
post_graphql_mutation(mutation, current_user: admin)
expect(mutation_response['errors']).to be_empty
expect(mutation_response['result']).to eq('completed' => true,
'deletedJobs' => 2,
'queueSize' => 1)
end
end
context 'when no required params are provided' do
let(:variables) { { queue_name: 'authorized_projects' } }
it_behaves_like 'a mutation that returns errors in the response',
errors: ['No metadata provided']
end
context 'when the queue does not exist' do
let(:variables) { { user: admin.username, queue_name: 'authorized_projects_2' } }
it_behaves_like 'a mutation that returns top-level errors',
errors: ['Queue authorized_projects_2 not found']
end
end
end
......@@ -30,6 +30,14 @@ RSpec.shared_examples 'restores project successfully' do |**results|
expect(project.issues.size).to eq(results.fetch(:issues, 0))
end
it 'has ci pipelines' do
expect(project.ci_pipelines.size).to eq(results.fetch(:ci_pipelines, 0))
end
it 'has external pull requests' do
expect(project.external_pull_requests.size).to eq(results.fetch(:external_pull_requests, 0))
end
# This test is quarantined because the use of magic number 999 causes failure on CI
it 'does not set params that are excluded from import_export settings', quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/207932#note_293724442' do
expect(project.import_type).to be_nil
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment