Commit 76623c12 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent b042382b
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-canonical-gitlab-tag: &if-canonical-gitlab-tag
.if-canonical-dot-com-gitlab-org-groups-tag: &if-canonical-dot-com-gitlab-org-groups-tag
if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE =~ /^gitlab-org($|\/)/ && $CI_COMMIT_TAG'
cloud-native-image:
......@@ -13,5 +13,5 @@ cloud-native-image:
- install_gitlab_gem
- CNG_PROJECT_PATH="gitlab-org/build/CNG" BUILD_TRIGGER_TOKEN=$CI_JOB_TOKEN ./scripts/trigger-build cng
rules:
- <<: *if-canonical-gitlab-tag
- <<: *if-canonical-dot-com-gitlab-org-groups-tag
when: manual
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-canonical-dot-com-gitlab-org-groups-merge-request: &if-canonical-dot-com-gitlab-org-groups-merge-request
if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE =~ /^gitlab-org($|\/)/ && $CI_MERGE_REQUEST_IID'
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-not-ee: &if-not-ee
if: '$CI_PROJECT_NAME !~ /^gitlab(-ee)?$/'
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-default-refs: &if-default-refs
if: '$CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_REF_NAME =~ /^[\d-]+-stable(-ee)?$/ || $CI_COMMIT_REF_NAME =~ /^\d+-\d+-auto-deploy-\d+$/ || $CI_COMMIT_REF_NAME =~ /^security\// || $CI_MERGE_REQUEST_IID || $CI_COMMIT_TAG'
# Make sure to update all the similar patterns in other CI config files if you modify these patterns
.code-docs-patterns: &code-docs-patterns
- ".gitlab/route-map.yml"
- "doc/**/*"
- ".markdownlint.json"
# Make sure to update all the similar patterns in other CI config files if you modify these patterns
.code-backstage-qa-patterns: &code-backstage-qa-patterns
- ".gitlab/ci/**/*"
- ".{eslintignore,gitattributes,nvmrc,prettierrc,stylelintrc,yamllint}"
- ".{codeclimate,eslintrc,gitlab-ci,haml-lint,haml-lint_todo,rubocop,rubocop_todo,scss-lint}.yml"
- ".csscomb.json"
- "Dockerfile.assets"
- "*_VERSION"
- "Gemfile{,.lock}"
- "Rakefile"
- "{babel.config,jest.config}.js"
- "config.ru"
- "{package.json,yarn.lock}"
- "{,ee/}{app,bin,config,db,haml_lint,lib,locale,public,scripts,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
# Backstage changes
- "Dangerfile"
- "danger/**/*"
- "{,ee/}fixtures/**/*"
- "{,ee/}rubocop/**/*"
- "{,ee/}spec/**/*"
- "doc/README.md" # Some RSpec test rely on this file
# QA changes
- ".dockerignore"
- "qa/**/*"
.review-docs:
extends:
- .default-tags
- .default-retry
- .only:variables-canonical-dot-com
- .only:changes-docs
only:
refs:
- merge_requests
rules:
- <<: *if-canonical-dot-com-gitlab-org-groups-merge-request
changes: *code-docs-patterns
when: manual
image: ruby:2.6-alpine
stage: review
dependencies: []
......@@ -32,7 +75,6 @@ review-docs-deploy:
extends: .review-docs
script:
- ./scripts/trigger-build-docs deploy
when: manual
# Cleanup remote environment of gitlab-docs
review-docs-cleanup:
......@@ -42,14 +84,15 @@ review-docs-cleanup:
action: stop
script:
- ./scripts/trigger-build-docs cleanup
when: manual
docs lint:
extends:
- .default-tags
- .default-retry
- .default-only
- .only:changes-docs
rules:
- <<: *if-default-refs
changes: *code-docs-patterns
when: on_success
image: "registry.gitlab.com/gitlab-org/gitlab-docs:docs-lint"
stage: test
dependencies: []
......@@ -69,14 +112,17 @@ docs lint:
graphql-reference-verify:
extends:
- .only-ee
- .default-tags
- .default-retry
- .default-cache
- .default-only
- .default-before_script
- .only:changes-code-backstage-qa
- .use-pg9
rules:
- <<: *if-not-ee
when: never
- <<: *if-default-refs
changes: *code-backstage-qa-patterns
when: on_success
stage: test
needs: ["setup-test-env"]
script:
......
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-default-refs: &if-default-refs
if: '$CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_REF_NAME =~ /^[\d-]+-stable(-ee)?$/ || $CI_COMMIT_REF_NAME =~ /^\d+-\d+-auto-deploy-\d+$/ || $CI_COMMIT_REF_NAME =~ /^security\// || $CI_MERGE_REQUEST_IID || $CI_COMMIT_TAG'
# Make sure to update all the similar patterns in other CI config files if you modify these patterns
.code-patterns: &code-patterns
- ".gitlab/ci/**/*"
- ".{eslintignore,gitattributes,nvmrc,prettierrc,stylelintrc,yamllint}"
- ".{codeclimate,eslintrc,gitlab-ci,haml-lint,haml-lint_todo,rubocop,rubocop_todo,scss-lint}.yml"
- ".csscomb.json"
- "Dockerfile.assets"
- "*_VERSION"
- "Gemfile{,.lock}"
- "Rakefile"
- "{babel.config,jest.config}.js"
- "config.ru"
- "{package.json,yarn.lock}"
- "{,ee/}{app,bin,config,db,haml_lint,lib,locale,public,scripts,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
.only-code-memory-job-base:
extends:
- .default-tags
- .default-retry
- .default-cache
- .default-only
- .default-before_script
- .only:changes-code
rules:
- <<: *if-default-refs
changes: *code-patterns
when: on_success
memory-static:
extends: .only-code-memory-job-base
......
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-canonical-dot-com-gitlab-org-group-master-refs: &if-canonical-dot-com-gitlab-org-group-master-refs
if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE == "gitlab-org" && $CI_COMMIT_REF_NAME == "master"'
pages:
extends:
- .default-tags
- .default-retry
- .default-cache
- .default-only
- .only:variables-canonical-dot-com
- .only:changes-code-backstage-qa
only:
refs:
- master
rules:
- <<: *if-canonical-dot-com-gitlab-org-group-master-refs
when: on_success
stage: pages
dependencies: ["coverage", "karma", "gitlab:assets:compile pull-cache"]
script:
......
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-canonical-gitlab-schedule: &if-canonical-gitlab-schedule
.if-canonical-dot-com-gitlab-org-group-schedule: &if-canonical-dot-com-gitlab-org-group-schedule
if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE == "gitlab-org" && $CI_PIPELINE_SOURCE == "schedule"'
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
......@@ -86,7 +86,7 @@ package-and-qa:
- <<: *if-canonical-gitlab-merge-request
changes: *code-patterns
when: manual
- <<: *if-canonical-gitlab-schedule
- <<: *if-canonical-dot-com-gitlab-org-group-schedule
when: on_success
needs: ["build-qa-image", "gitlab:assets:compile pull-cache"]
allow_failure: true
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-canonical-gitlab-schedule: &if-canonical-gitlab-schedule
.if-canonical-dot-com-gitlab-org-group-schedule: &if-canonical-dot-com-gitlab-org-group-schedule
if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE == "gitlab-org" && $CI_PIPELINE_SOURCE == "schedule"'
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
......@@ -47,7 +47,7 @@ build-qa-image:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: on_success
- <<: *if-canonical-gitlab-schedule
- <<: *if-canonical-dot-com-gitlab-org-group-schedule
when: on_success
script:
- '[[ ! -d "ee/" ]] || export GITLAB_EDITION="ee"'
......
// ECMAScript polyfills
import 'core-js/es/array/fill';
import 'core-js/es/array/find';
import 'core-js/es/array/find-index';
import 'core-js/es/array/from';
import 'core-js/es/array/includes';
import 'core-js/es/number/is-integer';
import 'core-js/es/object/assign';
import 'core-js/es/object/values';
import 'core-js/es/object/entries';
import 'core-js/es/promise';
import 'core-js/es/promise/finally';
import 'core-js/es/string/code-point-at';
import 'core-js/es/string/from-code-point';
import 'core-js/es/string/includes';
import 'core-js/es/string/starts-with';
import 'core-js/es/string/ends-with';
import 'core-js/es/symbol';
import 'core-js/es/map';
import 'core-js/es/weak-map';
import 'core-js/modules/web.url';
import 'core-js/stable';
// Browser polyfills
import 'formdata-polyfill';
......
import $ from 'jquery';
import 'at.js';
import _ from 'underscore';
import SidebarMediator from '~/sidebar/sidebar_mediator';
import glRegexp from './lib/utils/regexp';
import AjaxCache from './lib/utils/ajax_cache';
import { spriteIcon } from './lib/utils/common_utils';
......@@ -53,8 +54,8 @@ export const defaultAutocompleteConfig = {
};
class GfmAutoComplete {
constructor(dataSources) {
this.dataSources = dataSources || {};
constructor(dataSources = {}) {
this.dataSources = dataSources;
this.cachedData = {};
this.isLoadingData = {};
}
......@@ -199,6 +200,16 @@ class GfmAutoComplete {
}
setupMembers($input) {
const fetchData = this.fetchData.bind(this);
const MEMBER_COMMAND = {
ASSIGN: '/assign',
UNASSIGN: '/unassign',
REASSIGN: '/reassign',
CC: '/cc',
};
let assignees = [];
let command = '';
// Team Members
$input.atwho({
at: '@',
......@@ -225,6 +236,48 @@ class GfmAutoComplete {
callbacks: {
...this.getDefaultCallbacks(),
beforeSave: membersBeforeSave,
matcher(flag, subtext) {
const subtextNodes = subtext
.split(/\n+/g)
.pop()
.split(GfmAutoComplete.regexSubtext);
// Check if @ is followed by '/assign', '/reassign', '/unassign' or '/cc' commands.
command = subtextNodes.find(node => {
if (Object.values(MEMBER_COMMAND).includes(node)) {
return node;
}
return null;
});
// Cache assignees list for easier filtering later
assignees = SidebarMediator.singleton?.store?.assignees?.map(
assignee => `${assignee.username} ${assignee.name}`,
);
const match = GfmAutoComplete.defaultMatcher(flag, subtext, this.app.controllers);
return match && match.length ? match[1] : null;
},
filter(query, data, searchKey) {
if (GfmAutoComplete.isLoading(data)) {
fetchData(this.$inputor, this.at);
return data;
}
if (data === GfmAutoComplete.defaultLoadingData) {
return $.fn.atwho.default.callbacks.filter(query, data, searchKey);
}
if (command === MEMBER_COMMAND.ASSIGN) {
// Only include members which are not assigned to Issuable currently
return data.filter(member => !assignees.includes(member.search));
} else if (command === MEMBER_COMMAND.UNASSIGN) {
// Only include members which are assigned to Issuable currently
return data.filter(member => assignees.includes(member.search));
}
return data;
},
},
});
}
......
......@@ -44,7 +44,7 @@ export const setLastCommitMessage = ({ commit, rootGetters }, data) => {
const commitMsg = sprintf(
__('Your changes have been committed. Commit %{commitId} %{commitStats}'),
{
commitId: `<a href="${currentProject.web_url}/commit/${data.short_id}" class="commit-sha">${data.short_id}</a>`,
commitId: `<a href="${currentProject.web_url}/-/commit/${data.short_id}" class="commit-sha">${data.short_id}</a>`,
commitStats,
},
false,
......@@ -56,7 +56,7 @@ export const setLastCommitMessage = ({ commit, rootGetters }, data) => {
export const updateFilesAfterCommit = ({ commit, dispatch, rootState, rootGetters }, { data }) => {
const selectedProject = rootGetters.currentProject;
const lastCommit = {
commit_path: `${selectedProject.web_url}/commit/${data.id}`,
commit_path: `${selectedProject.web_url}/-/commit/${data.id}`,
commit: {
id: data.id,
message: data.message,
......
......@@ -209,7 +209,7 @@ export default {
id,
createdAt: created_at,
sha,
commitUrl: `${this.projectPath}/commit/${sha}`,
commitUrl: `${this.projectPath}/-/commit/${sha}`,
tag,
tagUrl: tag ? `${this.tagsPath}/${ref.name}` : null,
ref: ref.name,
......
import 'core-js/es/map';
import 'core-js/es/set';
import { Sortable } from 'sortablejs';
import simulateDrag from './simulate_drag';
import simulateInput from './simulate_input';
......
......@@ -36,7 +36,7 @@ class FlowdockService < Service
token: token,
repo: project.repository,
repo_url: "#{Gitlab.config.gitlab.url}/#{project.full_path}",
commit_url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/commit/%s",
commit_url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/-/commit/%s",
diff_url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/compare/%s...%s"
)
end
......
......@@ -451,10 +451,6 @@ class Repository
return unless repo_type.project?
# This call is stubbed in tests due to being an expensive operation
# It can be reenabled for specific tests via:
#
# allow(DetectRepositoryLanguagesWorker).to receive(:perform_async).and_call_original
DetectRepositoryLanguagesWorker.perform_async(project.id)
end
......
......@@ -66,7 +66,7 @@
- if Feature.enabled?(:create_cloud_run_clusters, clusterable, default_enabled: true)
.form-group
= provider_gcp_field.check_box :cloud_run, { label: s_('ClusterIntegration|Enable Cloud Run on GKE (beta)'),
= provider_gcp_field.check_box :cloud_run, { label: s_('ClusterIntegration|Enable Cloud Run for Anthos'),
label_class: 'label-bold' }
.form-text.text-muted
= s_('ClusterIntegration|Uses the Cloud Run, Istio, and HTTP Load Balancing addons for this cluster.')
......
......@@ -22,7 +22,8 @@ module WorkerAttributes
# EE-specific
epics: 2,
incident_management: 2
incident_management: 2,
security_scans: 2
}.stringify_keys.freeze
class_methods do
......
......@@ -6,6 +6,8 @@ const presets = [
[
'@babel/preset-env',
{
useBuiltIns: 'usage',
corejs: { version: 3, proposals: true },
modules: false,
targets: {
ie: '11',
......@@ -51,4 +53,4 @@ if (isJest) {
plugins.push('babel-plugin-dynamic-import-node');
}
module.exports = { presets, plugins };
module.exports = { presets, plugins, sourceType: 'unambiguous' };
---
title: Make smarter user suggestions for assign slash commands
merge_request: 24294
author:
type: added
---
title: Rename Cloud Run on GKE to Cloud Run for Anthos
merge_request: 23694
author:
type: other
---
title: Move commit routes under - scope
merge_request: 24279
author:
type: changed
---
title: Store security scans run in CI jobs
merge_request: 23669
author:
type: other
......@@ -301,17 +301,6 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
defaults: { format: 'json' },
constraints: { template_type: %r{issue|merge_request}, format: 'json' }
resources :commit, only: [:show], constraints: { id: /\h{7,40}/ } do
member do
get :branches
get :pipelines
post :revert
post :cherry_pick
get :diff_for_path
get :merge_requests
end
end
resource :pages, only: [:show, :update, :destroy] do
resources :domains, except: :index, controller: 'pages_domains', constraints: { id: %r{[^/]+} } do
member do
......
......@@ -10,6 +10,17 @@ resource :repository, only: [:create] do
end
end
resources :commit, only: [:show], constraints: { id: /\h{7,40}/ } do
member do
get :branches
get :pipelines
post :revert
post :cherry_pick
get :diff_for_path
get :merge_requests
end
end
# Don't use format parameter as file extension (old 3.0.x behavior)
# See http://guides.rubyonrails.org/routing.html#route-globbing-and-wildcard-segments
scope format: false do
......
......@@ -220,6 +220,8 @@
- 1
- - repository_update_remote_mirror
- 1
- - security_scans
- 2
- - self_monitoring_project_create
- 2
- - self_monitoring_project_delete
......
# frozen_string_literal: true
class CreateSecurityScan < ActiveRecord::Migration[5.2]
DOWNTIME = false
def change
create_table :security_scans, id: :bigserial do |t|
t.timestamps_with_timezone null: false
t.references :build,
null: false,
index: false,
foreign_key: { to_table: :ci_builds, on_delete: :cascade },
type: :bigint
t.integer :scan_type,
null: false,
index: { name: "idx_security_scans_on_scan_type" },
limit: 2
t.index [:build_id, :scan_type], name: "idx_security_scans_on_build_and_scan_type", unique: true
end
end
end
......@@ -3737,6 +3737,15 @@ ActiveRecord::Schema.define(version: 2020_02_04_131054) do
t.index ["group_id", "token_encrypted"], name: "index_scim_oauth_access_tokens_on_group_id_and_token_encrypted", unique: true
end
create_table "security_scans", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.bigint "build_id", null: false
t.integer "scan_type", limit: 2, null: false
t.index ["build_id", "scan_type"], name: "idx_security_scans_on_build_and_scan_type", unique: true
t.index ["scan_type"], name: "idx_security_scans_on_scan_type"
end
create_table "self_managed_prometheus_alert_events", force: :cascade do |t|
t.bigint "project_id", null: false
t.bigint "environment_id"
......@@ -4863,6 +4872,7 @@ ActiveRecord::Schema.define(version: 2020_02_04_131054) do
add_foreign_key "reviews", "users", column: "author_id", on_delete: :nullify
add_foreign_key "saml_providers", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "scim_oauth_access_tokens", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "security_scans", "ci_builds", column: "build_id", on_delete: :cascade
add_foreign_key "self_managed_prometheus_alert_events", "environments", on_delete: :cascade
add_foreign_key "self_managed_prometheus_alert_events", "projects", on_delete: :cascade
add_foreign_key "sentry_issues", "issues", on_delete: :cascade
......
......@@ -38,13 +38,13 @@ To run rspec tests:
```shell
# run all tests
bundle exec rspec
bin/rspec
# run test for path
bundle exec rspec spec/[path]/[to]/[spec].rb
bin/rspec spec/[path]/[to]/[spec].rb
```
Use [guard](https://github.com/guard/guard) to continuously monitor for changes and only run matching tests:
Use [Guard](https://github.com/guard/guard) to continuously monitor for changes and only run matching tests:
```shell
bundle exec guard
......@@ -130,7 +130,7 @@ Note: `live_debug` only works on JavaScript enabled specs.
Run the spec with `CHROME_HEADLESS=0`, e.g.:
```
CHROME_HEADLESS=0 bundle exec rspec some_spec.rb
CHROME_HEADLESS=0 bin/rspec some_spec.rb
```
The test will go by quickly, but this will give you an idea of what's happening.
......@@ -382,8 +382,8 @@ this trait should be either fixed to not rely on Sidekiq processing jobs, or the
the processing of background jobs is needed/expected.
NOTE: **Note:**
The usage of `perform_enqueued_jobs` is currently useless since our
workers aren't inheriting from `ApplicationJob` / `ActiveJob::Base`.
The usage of `perform_enqueued_jobs` is only useful for testing delayed mail
deliveries since our Sidekiq workers aren't inheriting from `ApplicationJob` / `ActiveJob::Base`.
#### DNS
......
......@@ -50,6 +50,9 @@ is detected in any other branch (`flaky-examples-check` job). In the future, the
This was originally implemented in: <https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/13021>.
If you want to enable retries locally, you can use the `RETRIES` env variable.
For instance `RETRIES=1 bin/rspec ...` would retry the failing examples once.
[rspec-retry]: https://github.com/NoRedInk/rspec-retry
[`spec/spec_helper.rb`]: https://gitlab.com/gitlab-org/gitlab/blob/master/spec/spec_helper.rb
......
......@@ -232,8 +232,8 @@ To create and add a new Kubernetes cluster to your project, group, or instance:
- **Number of nodes** - Enter the number of nodes you wish the cluster to have.
- **Machine type** - The [machine type](https://cloud.google.com/compute/docs/machine-types)
of the Virtual Machine instance that the cluster will be based on.
- **Enable Cloud Run on GKE (beta)** - Check this if you want to use Cloud Run on GKE for this cluster.
See the [Cloud Run on GKE section](#cloud-run-on-gke) for more information.
- **Enable Cloud Run for Anthos** - Check this if you want to use Cloud Run for Anthos for this cluster.
See the [Cloud Run for Anthos section](#cloud-run-for-anthos) for more information.
- **GitLab-managed cluster** - Leave this checked if you want GitLab to manage namespaces and service accounts for this cluster.
See the [Managed clusters section](index.md#gitlab-managed-clusters) for more information.
1. Finally, click the **Create Kubernetes cluster** button.
......@@ -241,11 +241,11 @@ To create and add a new Kubernetes cluster to your project, group, or instance:
After a couple of minutes, your cluster will be ready to go. You can now proceed
to install some [pre-defined applications](index.md#installing-applications).
#### Cloud Run on GKE
#### Cloud Run for Anthos
> [Introduced](https://gitlab.com/gitlab-org/gitlab/merge_requests/16566) in GitLab 12.4.
You can choose to use Cloud Run on GKE in place of installing Knative and Istio
You can choose to use Cloud Run for Anthos in place of installing Knative and Istio
separately after the cluster has been created. This means that Cloud Run
(Knative), Istio, and HTTP Load Balancing will be enabled on the cluster at
create time and cannot be [installed or uninstalled](../../clusters/applications.md) separately.
......
......@@ -45,7 +45,7 @@ module API
end
def authenticate_job!
job = Ci::Build.find_by_id(params[:id])
job = current_job
validate_job!(job) do
forbidden! unless job_token_valid?(job)
......@@ -54,6 +54,10 @@ module API
job
end
def current_job
@current_job ||= Ci::Build.find_by_id(params[:id])
end
def job_token_valid?(job)
token = (params[JOB_TOKEN_PARAM] || env[JOB_TOKEN_HEADER]).to_s
token && job.valid_token?(token)
......
......@@ -75,6 +75,13 @@ module API
end
resource :jobs do
before do
Gitlab::ApplicationContext.push(
user: -> { current_job&.user },
project: -> { current_job&.project }
)
end
desc 'Request a job' do
success Entities::JobRequest::Response
http_codes [[201, 'Job was scheduled'],
......
......@@ -37,7 +37,7 @@ module Gitlab
id: "c5feabde2d8cd023215af4d2ceeb7a64839fc428",
message: "Add simple search to projects in public area",
timestamp: "2013-05-13T18:18:08+00:00",
url: "https://test.example.com/gitlab/gitlab/commit/c5feabde2d8cd023215af4d2ceeb7a64839fc428",
url: "https://test.example.com/gitlab/gitlab/-/commit/c5feabde2d8cd023215af4d2ceeb7a64839fc428",
author: {
name: "Test User",
email: "test@example.com"
......
......@@ -10,7 +10,7 @@ module RspecFlaky
# This class is responsible for loading/saving JSON reports, and pruning
# outdated examples.
class Report < SimpleDelegator
OUTDATED_DAYS_THRESHOLD = 30
OUTDATED_DAYS_THRESHOLD = 7
attr_reader :flaky_examples
......
......@@ -76,17 +76,26 @@ class GitlabProjectImport
# synchronously as part of that process.
# This ensures that all expensive operations do not escape
# to general Sidekiq clusters/nodes.
def run_isolated_sidekiq_job
def with_isolated_sidekiq_job
Sidekiq::Testing.fake! do
with_request_store do
@project = create_project
execute_sidekiq_job
::Gitlab::GitalyClient.allow_n_plus_1_calls do
yield
end
end
true
end
end
def run_isolated_sidekiq_job
with_isolated_sidekiq_job do
@project = create_project
execute_sidekiq_job
end
end
def create_project
# We are disabling ObjectStorage for `import`
# as it is too slow to handle big archives:
......
......@@ -4070,7 +4070,7 @@ msgstr ""
msgid "ClusterIntegration|Elastic Stack"
msgstr ""
msgid "ClusterIntegration|Enable Cloud Run on GKE (beta)"
msgid "ClusterIntegration|Enable Cloud Run for Anthos"
msgstr ""
msgid "ClusterIntegration|Enable Web Application Firewall"
......@@ -8976,9 +8976,6 @@ msgstr ""
msgid "Geo|Status"
msgstr ""
msgid "Geo|Sync"
msgstr ""
msgid "Geo|Synced"
msgstr ""
......
# frozen_string_literal: true
module QA
context 'Manage', :orchestrated, :mattermost do
context 'Manage', :orchestrated, :mattermost, quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/202069' do
describe 'Mattermost login' do
it 'user logs into Mattermost using GitLab OAuth' do
Flow::Login.sign_in
......
......@@ -37,7 +37,7 @@ describe 'Admin mode for workers', :do_not_mock_admin_mode, :request_store, :cle
gitlab_enable_admin_mode_sign_in(user)
end
it 'can delete user', :sidekiq, :js do
it 'can delete user', :js do
visit admin_user_path(user_to_delete)
click_button 'Delete user'
......
......@@ -282,6 +282,32 @@ describe 'GFM autocomplete', :js do
end
end
context 'assignees' do
let(:issue_assignee) { create(:issue, project: project) }
before do
issue_assignee.update(assignees: [user])
visit project_issue_path(project, issue_assignee)
wait_for_requests
end
it 'lists users who are currently not assigned to the issue when using /assign' do
note = find('#note-body')
page.within '.timeline-content-form' do
note.native.send_keys('/as')
end
find('.atwho-view li', text: '/assign')
note.native.send_keys(:tab)
wait_for_requests
expect(find('#at-view-users .atwho-view-ul')).not_to have_content(user.username)
end
end
context 'labels' do
it 'opens autocomplete menu for Labels when field starts with text with item escaping HTML characters' do
create(:label, project: project, title: label_xss_title)
......
......@@ -385,7 +385,7 @@ describe('Time series component', () => {
describe('when tooltip is showing deployment data', () => {
const mockSha = 'mockSha';
const commitUrl = `${mockProjectDir}/commit/${mockSha}`;
const commitUrl = `${mockProjectDir}/-/commit/${mockSha}`;
beforeEach(done => {
timeSeriesAreaChart.vm.tooltip.isDeployment = true;
......
......@@ -169,7 +169,7 @@ export const deploymentData = [
iid: 3,
sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
commitUrl:
'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
'http://test.host/frontend-fixtures/environments-project/-/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
ref: {
name: 'master',
},
......@@ -183,7 +183,7 @@ export const deploymentData = [
iid: 2,
sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
commitUrl:
'http://test.host/frontend-fixtures/environments-project/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
'http://test.host/frontend-fixtures/environments-project/-/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
ref: {
name: 'master',
},
......@@ -197,7 +197,7 @@ export const deploymentData = [
iid: 1,
sha: '6511e58faafaa7ad2228990ec57f19d66f7db7c2',
commitUrl:
'http://test.host/frontend-fixtures/environments-project/commit/6511e58faafaa7ad2228990ec57f19d66f7db7c2',
'http://test.host/frontend-fixtures/environments-project/-/commit/6511e58faafaa7ad2228990ec57f19d66f7db7c2',
ref: {
name: 'update2-readme',
},
......
......@@ -98,7 +98,7 @@ describe EventsHelper do
it 'returns a commit note url' do
event.target = create(:note_on_commit, note: '+1 from me')
expect(subject).to eq("#{project_base_url}/commit/#{event.target.commit_id}#note_#{event.target.id}")
expect(subject).to eq("#{project_base_url}/-/commit/#{event.target.commit_id}#note_#{event.target.id}")
end
it 'returns a project snippet note url' do
......
......@@ -131,7 +131,7 @@ describe('IDE commit module actions', () => {
.dispatch('commit/setLastCommitMessage', { short_id: '123' })
.then(() => {
expect(store.state.lastCommitMsg).toContain(
'Your changes have been committed. Commit <a href="http://testing/commit/123" class="commit-sha">123</a>',
'Your changes have been committed. Commit <a href="http://testing/-/commit/123" class="commit-sha">123</a>',
);
})
.then(done)
......@@ -149,7 +149,7 @@ describe('IDE commit module actions', () => {
})
.then(() => {
expect(store.state.lastCommitMsg).toBe(
'Your changes have been committed. Commit <a href="http://testing/commit/123" class="commit-sha">123</a> with 1 additions, 2 deletions.',
'Your changes have been committed. Commit <a href="http://testing/-/commit/123" class="commit-sha">123</a> with 1 additions, 2 deletions.',
);
})
.then(done)
......@@ -407,7 +407,7 @@ describe('IDE commit module actions', () => {
.dispatch('commit/commitChanges')
.then(() => {
expect(store.state.lastCommitMsg).toBe(
'Your changes have been committed. Commit <a href="webUrl/commit/123" class="commit-sha">123</a> with 1 additions, 2 deletions.',
'Your changes have been committed. Commit <a href="webUrl/-/commit/123" class="commit-sha">123</a> with 1 additions, 2 deletions.',
);
done();
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
# Rollback DB to 10.5 (later than this was originally written for) because it still needs to work.
describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :sidekiq, :migration, schema: 20180208183958 do
describe Gitlab::BackgroundMigration::PopulateUntrackedUploads, :migration, schema: 20180208183958 do
include MigrationsHelpers::TrackUntrackedUploadsHelpers
subject { described_class.new }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
# Rollback DB to 10.5 (later than this was originally written for) because it still needs to work.
describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq, :migration, schema: 20180208183958 do
describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :migration, schema: 20180208183958 do
include MigrationsHelpers::TrackUntrackedUploadsHelpers
let!(:untracked_files_for_uploads) { table(:untracked_files_for_uploads) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190527194900_schedule_calculate_wiki_sizes.rb')
describe ScheduleCalculateWikiSizes, :migration, :sidekiq do
describe ScheduleCalculateWikiSizes, :migration do
let(:migration_class) { Gitlab::BackgroundMigration::CalculateWikiSizes }
let(:migration_name) { migration_class.to_s.demodulize }
......
......@@ -76,7 +76,7 @@ describe Gitlab::BackgroundMigration do
end
end
context 'when there are scheduled jobs present', :sidekiq, :redis do
context 'when there are scheduled jobs present', :redis do
it 'steals all jobs from the scheduled sets' do
Sidekiq::Testing.disable! do
BackgroundMigrationWorker.perform_in(10.minutes, 'Object')
......@@ -91,7 +91,7 @@ describe Gitlab::BackgroundMigration do
end
end
context 'when there are enqueued and scheduled jobs present', :sidekiq, :redis do
context 'when there are enqueued and scheduled jobs present', :redis do
it 'steals from the scheduled sets queue first' do
Sidekiq::Testing.disable! do
expect(described_class).to receive(:perform)
......@@ -107,7 +107,7 @@ describe Gitlab::BackgroundMigration do
end
end
context 'when retry_dead_jobs is true', :sidekiq, :redis do
context 'when retry_dead_jobs is true', :redis do
let(:retry_queue) do
[double(args: ['Object', [3]], queue: described_class.queue, delete: true)]
end
......@@ -186,7 +186,7 @@ describe Gitlab::BackgroundMigration do
end
end
context 'when there are scheduled jobs present', :sidekiq, :redis do
context 'when there are scheduled jobs present', :redis do
before do
Sidekiq::Testing.disable! do
BackgroundMigrationWorker.perform_in(10.minutes, 'Foo')
......
......@@ -1158,7 +1158,7 @@ describe Gitlab::Database::MigrationHelpers do
end
end
describe 'sidekiq migration helpers', :sidekiq, :redis do
describe 'sidekiq migration helpers', :redis do
let(:worker) do
Class.new do
include Sidekiq::Worker
......@@ -1221,7 +1221,7 @@ describe Gitlab::Database::MigrationHelpers do
end
end
describe '#bulk_queue_background_migration_jobs_by_range', :sidekiq do
describe '#bulk_queue_background_migration_jobs_by_range' do
context 'when the model has an ID column' do
let!(:id1) { create(:user).id }
let!(:id2) { create(:user).id }
......@@ -1293,7 +1293,7 @@ describe Gitlab::Database::MigrationHelpers do
end
end
describe '#queue_background_migration_jobs_by_range_at_intervals', :sidekiq do
describe '#queue_background_migration_jobs_by_range_at_intervals' do
context 'when the model has an ID column' do
let!(:id1) { create(:user).id }
let!(:id2) { create(:user).id }
......
......@@ -59,7 +59,7 @@ describe Gitlab::EtagCaching::Router do
it 'matches commit pipelines endpoint' do
result = described_class.match(
'/my-group/my-project/commit/aa8260d253a53f73f6c26c734c72fdd600f6e6d4/pipelines.json'
'/my-group/my-project/-/commit/aa8260d253a53f73f6c26c734c72fdd600f6e6d4/pipelines.json'
)
expect(result).to be_present
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
describe Gitlab::HashedStorage::Migrator, :sidekiq, :redis do
describe Gitlab::HashedStorage::Migrator, :redis do
describe '#bulk_schedule_migration' do
it 'schedules job to HashedStorage::MigratorWorker' do
Sidekiq::Testing.fake! do
......
......@@ -215,6 +215,7 @@ ci_pipelines:
- vulnerabilities_occurrence_pipelines
- vulnerability_findings
- pipeline_config
- security_scans
pipeline_variables:
- pipeline
stages:
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
describe Gitlab::SidekiqVersioning, :sidekiq, :redis do
describe Gitlab::SidekiqVersioning, :redis do
let(:foo_worker) do
Class.new do
def self.name
......
......@@ -10,7 +10,7 @@ describe Gitlab::UrlBuilder do
url = described_class.build(commit)
expect(url).to eq "#{Settings.gitlab['url']}/#{commit.project.full_path}/commit/#{commit.id}"
expect(url).to eq "#{Settings.gitlab['url']}/#{commit.project.full_path}/-/commit/#{commit.id}"
end
end
......@@ -86,7 +86,7 @@ describe Gitlab::UrlBuilder do
url = described_class.build(note)
expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.full_path}/commit/#{note.commit_id}#note_#{note.id}"
expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.full_path}/-/commit/#{note.commit_id}#note_#{note.id}"
end
end
......@@ -96,7 +96,7 @@ describe Gitlab::UrlBuilder do
url = described_class.build(note)
expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.full_path}/commit/#{note.commit_id}#note_#{note.id}"
expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.full_path}/-/commit/#{note.commit_id}#note_#{note.id}"
end
end
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190924152703_migrate_issue_trackers_data.rb')
describe MigrateIssueTrackersData, :migration, :sidekiq do
describe MigrateIssueTrackersData, :migration do
let(:services) { table(:services) }
let(:migration_class) { Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData }
let(:migration_name) { migration_class.to_s.demodulize }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180122154930_schedule_set_confidential_note_events_on_services.rb')
describe ScheduleSetConfidentialNoteEventsOnServices, :migration, :sidekiq do
describe ScheduleSetConfidentialNoteEventsOnServices, :migration do
let(:services_table) { table(:services) }
let(:migration_class) { Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnServices }
let(:migration_name) { migration_class.to_s.demodulize }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180425131009_assure_commits_count_for_merge_request_diff.rb')
describe AssureCommitsCountForMergeRequestDiff, :migration, :sidekiq, :redis do
describe AssureCommitsCountForMergeRequestDiff, :migration, :redis do
let(:migration) { spy('migration') }
before do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180420010616_cleanup_build_stage_migration.rb')
describe CleanupBuildStageMigration, :migration, :sidekiq, :redis do
describe CleanupBuildStageMigration, :migration, :redis do
let(:migration) { spy('migration') }
before do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20190104182041_cleanup_legacy_artifact_migration.rb')
describe CleanupLegacyArtifactMigration, :migration, :sidekiq, :redis do
describe CleanupLegacyArtifactMigration, :migration, :redis do
let(:migration) { spy('migration') }
context 'when still legacy artifacts exist' do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180604123514_cleanup_stages_position_migration.rb')
describe CleanupStagesPositionMigration, :migration, :sidekiq, :redis do
describe CleanupStagesPositionMigration, :migration, :redis do
let(:migration) { spy('migration') }
before do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190620112608_enqueue_reset_merge_status_second_run.rb')
describe EnqueueResetMergeStatusSecondRun, :migration, :sidekiq do
describe EnqueueResetMergeStatusSecondRun, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190528180441_enqueue_reset_merge_status.rb')
describe EnqueueResetMergeStatus, :migration, :sidekiq do
describe EnqueueResetMergeStatus, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180216121030_enqueue_verify_pages_domain_workers')
describe EnqueueVerifyPagesDomainWorkers, :sidekiq, :migration do
describe EnqueueVerifyPagesDomainWorkers, :migration do
around do |example|
Sidekiq::Testing.fake! do
example.run
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181219145520_migrate_cluster_configure_worker_sidekiq_queue.rb')
describe MigrateClusterConfigureWorkerSidekiqQueue, :sidekiq, :redis do
describe MigrateClusterConfigureWorkerSidekiqQueue, :redis do
include Gitlab::Database::MigrationHelpers
include StubWorker
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180306074045_migrate_create_trace_artifact_sidekiq_queue.rb')
describe MigrateCreateTraceArtifactSidekiqQueue, :sidekiq, :redis do
describe MigrateCreateTraceArtifactSidekiqQueue, :redis do
include Gitlab::Database::MigrationHelpers
include StubWorker
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190715193142_migrate_discussion_id_on_promoted_epics.rb')
describe MigrateDiscussionIdOnPromotedEpics, :migration, :sidekiq do
describe MigrateDiscussionIdOnPromotedEpics, :migration do
let(:migration_class) { described_class::MIGRATION }
let(:migration_name) { migration_class.to_s.demodulize }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180816161409_migrate_legacy_artifacts_to_job_artifacts.rb')
describe MigrateLegacyArtifactsToJobArtifacts, :migration, :sidekiq do
describe MigrateLegacyArtifactsToJobArtifacts, :migration do
let(:migration_class) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts }
let(:migration_name) { migration_class.to_s.demodulize }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180603190921_migrate_object_storage_upload_sidekiq_queue.rb')
describe MigrateObjectStorageUploadSidekiqQueue, :sidekiq, :redis do
describe MigrateObjectStorageUploadSidekiqQueue, :redis do
include Gitlab::Database::MigrationHelpers
include StubWorker
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190124200344_migrate_storage_migrator_sidekiq_queue.rb')
describe MigrateStorageMigratorSidekiqQueue, :sidekiq, :redis do
describe MigrateStorageMigratorSidekiqQueue, :redis do
include Gitlab::Database::MigrationHelpers
include StubWorker
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180307012445_migrate_update_head_pipeline_for_merge_request_sidekiq_queue.rb')
describe MigrateUpdateHeadPipelineForMergeRequestSidekiqQueue, :sidekiq, :redis do
describe MigrateUpdateHeadPipelineForMergeRequestSidekiqQueue, :redis do
include Gitlab::Database::MigrationHelpers
include StubWorker
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200114113341_patch_prometheus_services_for_shared_cluster_applications.rb')
describe PatchPrometheusServicesForSharedClusterApplications, :migration, :sidekiq do
describe PatchPrometheusServicesForSharedClusterApplications, :migration do
include MigrationHelpers::PrometheusServiceHelpers
let(:namespaces) { table(:namespaces) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180405101928_reschedule_builds_stages_migration')
describe RescheduleBuildsStagesMigration, :sidekiq, :migration do
describe RescheduleBuildsStagesMigration, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180309121820_reschedule_commits_count_for_merge_request_diff')
describe RescheduleCommitsCountForMergeRequestDiff, :migration, :sidekiq do
describe RescheduleCommitsCountForMergeRequestDiff, :migration do
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
let(:projects) { table(:projects) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180913142237_schedule_digest_personal_access_tokens.rb')
describe ScheduleDigestPersonalAccessTokens, :migration, :sidekiq do
describe ScheduleDigestPersonalAccessTokens, :migration do
let(:personal_access_tokens) { table(:personal_access_tokens) }
let(:users) { table(:users) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190524073827_schedule_fill_valid_time_for_pages_domain_certificates.rb')
describe ScheduleFillValidTimeForPagesDomainCertificates, :migration, :sidekiq do
describe ScheduleFillValidTimeForPagesDomainCertificates, :migration do
let(:migration_class) { described_class::MIGRATION }
let(:migration_name) { migration_class.to_s.demodulize }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191002031332_schedule_pages_metadata_migration')
describe SchedulePagesMetadataMigration, :migration, :sidekiq do
describe SchedulePagesMetadataMigration, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190322132835_schedule_populate_merge_request_assignees_table.rb')
describe SchedulePopulateMergeRequestAssigneesTable, :migration, :sidekiq do
describe SchedulePopulateMergeRequestAssigneesTable, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181121111200_schedule_runners_token_encryption')
describe ScheduleRunnersTokenEncryption, :migration, :sidekiq do
describe ScheduleRunnersTokenEncryption, :migration do
let(:settings) { table(:application_settings) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180104131052_schedule_set_confidential_note_events_on_webhooks.rb')
describe ScheduleSetConfidentialNoteEventsOnWebhooks, :migration, :sidekiq do
describe ScheduleSetConfidentialNoteEventsOnWebhooks, :migration do
let(:web_hooks_table) { table(:web_hooks) }
let(:migration_class) { Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnWebhooks }
let(:migration_name) { migration_class.to_s.demodulize }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180420080616_schedule_stages_index_migration')
describe ScheduleStagesIndexMigration, :sidekiq, :migration do
describe ScheduleStagesIndexMigration, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190214112022_schedule_sync_issuables_state_id.rb')
describe ScheduleSyncIssuablesStateId, :migration, :sidekiq do
describe ScheduleSyncIssuablesStateId, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:merge_requests) { table(:merge_requests) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190506135400_schedule_sync_issuables_state_id_where_nil')
describe ScheduleSyncIssuablesStateIdWhereNil, :migration, :sidekiq do
describe ScheduleSyncIssuablesStateIdWhereNil, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:merge_requests) { table(:merge_requests) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190911251732_sync_issuables_state_id')
describe SyncIssuablesStateId, :migration, :sidekiq do
describe SyncIssuablesStateId, :migration do
let(:migration) { described_class.new }
describe '#up' do
......
......@@ -4,7 +4,7 @@ require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200106071113_update_fingerprint_sha256_within_keys.rb')
describe UpdateFingerprintSha256WithinKeys, :sidekiq, :migration do
describe UpdateFingerprintSha256WithinKeys, :migration do
let(:key_table) { table(:keys) }
describe '#up' do
......
......@@ -3854,6 +3854,10 @@ describe Ci::Build do
set(:build) { create(:ci_build, project: project) }
let(:path) { 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' }
around do |example|
Timecop.freeze { example.run }
end
before do
stub_artifacts_object_storage
end
......
......@@ -267,7 +267,7 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
end
it { expect(result[:issue].gitlab_commit).to eq(commit_id) }
it { expect(result[:issue].gitlab_commit_path).to eq("/#{project.namespace.path}/#{project.path}/commit/#{commit_id}") }
it { expect(result[:issue].gitlab_commit_path).to eq("/#{project.namespace.path}/#{project.path}/-/commit/#{commit_id}") }
end
end
......
......@@ -421,7 +421,7 @@ describe JiraService do
GlobalID: 'GitLab',
relationship: 'mentioned on',
object: {
url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/commit/#{commit_id}",
url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/-/commit/#{commit_id}",
title: "Solved by commit #{commit_id}.",
icon: { title: 'GitLab', url16x16: favicon_path },
status: { resolved: true }
......@@ -464,7 +464,7 @@ describe JiraService do
@jira_service.close_issue(resource, ExternalIssue.new('JIRA-123', project))
expect(WebMock).to have_requested(:post, @comment_url).with(
body: %r{#{custom_base_url}/#{project.full_path}/commit/#{commit_id}}
body: %r{#{custom_base_url}/#{project.full_path}/-/commit/#{commit_id}}
).once
end
......@@ -479,7 +479,7 @@ describe JiraService do
@jira_service.close_issue(resource, ExternalIssue.new('JIRA-123', project))
expect(WebMock).to have_requested(:post, @comment_url).with(
body: %r{#{Gitlab.config.gitlab.url}/#{project.full_path}/commit/#{commit_id}}
body: %r{#{Gitlab.config.gitlab.url}/#{project.full_path}/-/commit/#{commit_id}}
).once
end
......
......@@ -6,8 +6,6 @@ shared_examples 'languages and percentages JSON response' do
let(:expected_languages) { project.repository.languages.map { |language| language.values_at(:label, :value)}.to_h }
before do
allow(DetectRepositoryLanguagesWorker).to receive(:perform_async).and_call_original
allow(project.repository).to receive(:languages).and_return(
[{ value: 66.69, label: "Ruby", color: "#701516", highlight: "#701516" },
{ value: 22.98, label: "JavaScript", color: "#f1e05a", highlight: "#f1e05a" },
......
......@@ -61,9 +61,9 @@ describe API::Releases do
it 'returns rendered helper paths' do
get api("/projects/#{project.id}/releases", maintainer)
expect(json_response.first['commit_path']).to eq("/#{release_2.project.full_path}/commit/#{release_2.commit.id}")
expect(json_response.first['commit_path']).to eq("/#{release_2.project.full_path}/-/commit/#{release_2.commit.id}")
expect(json_response.first['tag_path']).to eq("/#{release_2.project.full_path}/-/tags/#{release_2.tag}")
expect(json_response.second['commit_path']).to eq("/#{release_1.project.full_path}/commit/#{release_1.commit.id}")
expect(json_response.second['commit_path']).to eq("/#{release_1.project.full_path}/-/commit/#{release_1.commit.id}")
expect(json_response.second['tag_path']).to eq("/#{release_1.project.full_path}/-/tags/#{release_1.tag}")
end
......@@ -164,7 +164,7 @@ describe API::Releases do
expect(response).to match_response_schema('public_api/v4/releases')
expect(json_response.first['assets']['count']).to eq(release.links.count + release.sources.count)
expect(json_response.first['commit_path']).to eq("/#{release.project.full_path}/commit/#{release.commit.id}")
expect(json_response.first['commit_path']).to eq("/#{release.project.full_path}/-/commit/#{release.commit.id}")
expect(json_response.first['tag_path']).to eq("/#{release.project.full_path}/-/tags/#{release.tag}")
end
end
......@@ -214,7 +214,7 @@ describe API::Releases do
expect(json_response['author']['name']).to eq(maintainer.name)
expect(json_response['commit']['id']).to eq(commit.id)
expect(json_response['assets']['count']).to eq(4)
expect(json_response['commit_path']).to eq("/#{release.project.full_path}/commit/#{release.commit.id}")
expect(json_response['commit_path']).to eq("/#{release.project.full_path}/-/commit/#{release.commit.id}")
expect(json_response['tag_path']).to eq("/#{release.project.full_path}/-/tags/#{release.tag}")
end
......
......@@ -311,11 +311,33 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
describe '/api/v4/jobs' do
shared_examples 'application context metadata' do |api_route|
it 'contains correct context metadata' do
# Avoids popping the context from the thread so we can
# check its content after the request.
allow(Labkit::Context).to receive(:pop)
send_request
Labkit::Context.with_context do |context|
expected_context = {
'meta.caller_id' => api_route,
'meta.user' => job.user.username,
'meta.project' => job.project.full_path,
'meta.root_namespace' => job.project.full_path_components.first
}
expect(context.to_h).to include(expected_context)
end
end
end
let(:root_namespace) { create(:namespace) }
let(:namespace) { create(:namespace, parent: root_namespace) }
let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
let(:runner) { create(:ci_runner, :project, projects: [project]) }
let(:user) { create(:user) }
let(:job) do
create(:ci_build, :artifacts, :extended_options,
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
......@@ -984,12 +1006,18 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
describe 'PUT /api/v4/jobs/:id' do
let(:job) { create(:ci_build, :pending, :trace_live, pipeline: pipeline, runner_id: runner.id) }
let(:job) do
create(:ci_build, :pending, :trace_live, pipeline: pipeline, project: project, user: user, runner_id: runner.id)
end
before do
job.run!
end
it_behaves_like 'application context metadata', '/api/:version/jobs/:id' do
let(:send_request) { update_job(state: 'success') }
end
context 'when status is given' do
it 'mark job as succeeded' do
update_job(state: 'success')
......@@ -1139,7 +1167,10 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
describe 'PATCH /api/v4/jobs/:id/trace' do
let(:job) { create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) }
let(:job) do
create(:ci_build, :running, :trace_live,
project: project, user: user, runner_id: runner.id, pipeline: pipeline)
end
let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
let(:update_interval) { 10.seconds.to_i }
......@@ -1148,6 +1179,10 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
initial_patch_the_trace
end
it_behaves_like 'application context metadata', '/api/:version/jobs/:id/trace' do
let(:send_request) { patch_the_trace }
end
context 'when request is valid' do
it 'gets correct response' do
expect(response.status).to eq 202
......@@ -1399,7 +1434,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
describe 'artifacts' do
let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner_id: runner.id) }
let(:job) { create(:ci_build, :pending, user: user, project: project, pipeline: pipeline, runner_id: runner.id) }
let(:jwt_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt_token } }
let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
......@@ -1418,6 +1453,10 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
authorize_artifacts_with_token_in_params
end
it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts/authorize' do
let(:send_request) { subject }
end
shared_examples 'authorizes local file' do
it 'succeeds' do
subject
......@@ -1571,6 +1610,12 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
describe 'POST /api/v4/jobs/:id/artifacts' do
it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts' do
let(:send_request) do
upload_artifacts(file_upload, headers_with_token)
end
end
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
......@@ -1971,6 +2016,10 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
describe 'GET /api/v4/jobs/:id/artifacts' do
let(:token) { job.token }
it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts' do
let(:send_request) { download_artifact }
end
context 'when job has artifacts' do
let(:job) { create(:ci_build) }
let(:store) { JobArtifactUploader::Store::LOCAL }
......
......@@ -445,10 +445,14 @@ describe 'project routing' do
# project_commit GET /:project_id/commit/:id(.:format) commit#show {id: /\h{7,40}/, project_id: /[^\/]+/}
describe Projects::CommitController, 'routing' do
it 'to #show' do
expect(get('/gitlab/gitlabhq/-/commit/4246fbd')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd')
expect(get('/gitlab/gitlabhq/-/commit/4246fbd.diff')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd', format: 'diff')
expect(get('/gitlab/gitlabhq/-/commit/4246fbd.patch')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd', format: 'patch')
expect(get('/gitlab/gitlabhq/-/commit/4246fbd13872934f72a8fd0d6fb1317b47b59cb5')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd13872934f72a8fd0d6fb1317b47b59cb5')
end
it 'to #show unscoped routing' do
expect(get('/gitlab/gitlabhq/commit/4246fbd')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd')
expect(get('/gitlab/gitlabhq/commit/4246fbd.diff')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd', format: 'diff')
expect(get('/gitlab/gitlabhq/commit/4246fbd.patch')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd', format: 'patch')
expect(get('/gitlab/gitlabhq/commit/4246fbd13872934f72a8fd0d6fb1317b47b59cb5')).to route_to('projects/commit#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '4246fbd13872934f72a8fd0d6fb1317b47b59cb5')
end
end
......
......@@ -46,7 +46,7 @@ describe Ci::RetryBuildService do
sourced_pipelines artifacts_file_store artifacts_metadata_store
metadata runner_session trace_chunks upstream_pipeline_id
artifacts_file artifacts_metadata artifacts_size commands
resource resource_group_id processed].freeze
resource resource_group_id processed security_scans].freeze
shared_examples 'build duplication' do
let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
......
......@@ -69,6 +69,7 @@ describe Git::BranchHooksService do
Gitlab.config.gitlab.url,
project.namespace.to_param,
project.to_param,
'-',
'commit',
commit.id
].join('/')
......
......@@ -421,7 +421,7 @@ describe Git::BranchPushService, services: true do
let(:message) { "this is some work.\n\ncloses JIRA-1" }
let(:comment_body) do
{
body: "Issue solved with [#{closing_commit.id}|http://#{Gitlab.config.gitlab.host}/#{project.full_path}/commit/#{closing_commit.id}]."
body: "Issue solved with [#{closing_commit.id}|http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/commit/#{closing_commit.id}]."
}.to_json
end
......
......@@ -107,6 +107,7 @@ describe Git::TagHooksService, :service do
Gitlab.config.gitlab.url,
project.namespace.to_param,
project.to_param,
'-',
'commit',
commit.id
].join('/')
......
......@@ -6,7 +6,7 @@ describe MergeRequests::MigrateExternalDiffsService do
let(:merge_request) { create(:merge_request) }
let(:diff) { merge_request.merge_request_diff }
describe '.enqueue!', :sidekiq do
describe '.enqueue!' do
around do |example|
Sidekiq::Testing.fake! { example.run }
end
......
......@@ -322,7 +322,7 @@ describe SystemNoteService do
links = []
if link_exists
url = if type == 'commit'
"#{Settings.gitlab.base_url}/#{project.namespace.path}/#{project.path}/commit/#{commit.id}"
"#{Settings.gitlab.base_url}/#{project.namespace.path}/#{project.path}/-/commit/#{commit.id}"
else
"#{Settings.gitlab.base_url}/#{project.namespace.path}/#{project.path}/-/merge_requests/#{merge_request.iid}"
end
......@@ -462,7 +462,7 @@ describe SystemNoteService do
describe "existing reference" do
before do
allow(JIRA::Resource::Remotelink).to receive(:all).and_return([])
message = "[#{author.name}|http://localhost/#{author.username}] mentioned this issue in [a commit of #{project.full_path}|http://localhost/#{project.full_path}/commit/#{commit.id}]:\n'#{commit.title.chomp}'"
message = "[#{author.name}|http://localhost/#{author.username}] mentioned this issue in [a commit of #{project.full_path}|http://localhost/#{project.full_path}/-/commit/#{commit.id}]:\n'#{commit.title.chomp}'"
allow_next_instance_of(JIRA::Resource::Issue) do |instance|
allow(instance).to receive(:comments).and_return([OpenStruct.new(body: message)])
end
......
......@@ -121,9 +121,9 @@ RSpec.configure do |config|
config.include ExpectRequestWithStatus, type: :request
config.include RailsHelpers
if ENV['CI']
if ENV['CI'] || ENV['RETRIES']
# This includes the first try, i.e. tests will be run 4 times before failing.
config.default_retry_count = 4
config.default_retry_count = ENV.fetch('RETRIES', 3).to_i + 1
config.reporter.register_listener(
RspecFlaky::Listener.new,
:example_passed,
......@@ -139,6 +139,16 @@ RSpec.configure do |config|
TestEnv.clean_test_path
end
# We can't use an `around` hook here because the wrapping transaction
# is not yet opened at the time that is triggered
config.prepend_before do
Gitlab::Database.set_open_transactions_baseline
end
config.append_after do
Gitlab::Database.reset_open_transactions_baseline
end
config.before do |example|
# Enable all features by default for testing
allow(Feature).to receive(:enabled?) { true }
......@@ -176,12 +186,12 @@ RSpec.configure do |config|
# Stub these calls due to being expensive operations
# It can be reenabled for specific tests via:
#
# allow(DetectRepositoryLanguagesWorker).to receive(:perform_async).and_call_original
# allow(Gitlab::Git::KeepAround).to receive(:execute).and_call_original
allow(DetectRepositoryLanguagesWorker).to receive(:perform_async).and_return(true)
# expect(Gitlab::Git::KeepAround).to receive(:execute).and_call_original
allow(Gitlab::Git::KeepAround).to receive(:execute)
# Clear thread cache and Sidekiq queues
Gitlab::ThreadMemoryCache.cache_backend.clear
Sidekiq::Worker.clear_all
# Temporary patch to force admin mode to be active by default in tests when
# using the feature flag :user_mode_in_session, since this will require
......@@ -217,11 +227,14 @@ RSpec.configure do |config|
RequestStore.clear!
end
config.after do
Fog.unmock! if Fog.mock?
config.around do |example|
# Wrap each example in it's own context to make sure the contexts don't
# leak
Labkit::Context.with_context { example.run }
end
config.after do
Fog.unmock! if Fog.mock?
Gitlab::CurrentSettings.clear_in_memory_application_settings!
end
......@@ -236,90 +249,6 @@ RSpec.configure do |config|
Gitlab::Metrics.reset_registry!
end
config.around(:each, :use_clean_rails_memory_store_caching) do |example|
caching_store = Rails.cache
Rails.cache = ActiveSupport::Cache::MemoryStore.new
example.run
Rails.cache = caching_store
end
config.around do |example|
# Wrap each example in it's own context to make sure the contexts don't
# leak
Labkit::Context.with_context { example.run }
end
config.around(:each, :clean_gitlab_redis_cache) do |example|
redis_cache_cleanup!
example.run
redis_cache_cleanup!
end
config.around(:each, :clean_gitlab_redis_shared_state) do |example|
redis_shared_state_cleanup!
example.run
redis_shared_state_cleanup!
end
config.around(:each, :clean_gitlab_redis_queues) do |example|
redis_queues_cleanup!
example.run
redis_queues_cleanup!
end
config.around(:each, :use_clean_rails_memory_store_fragment_caching) do |example|
caching_store = ActionController::Base.cache_store
ActionController::Base.cache_store = ActiveSupport::Cache::MemoryStore.new
ActionController::Base.perform_caching = true
example.run
ActionController::Base.perform_caching = false
ActionController::Base.cache_store = caching_store
end
config.around(:each, :use_sql_query_cache) do |example|
ActiveRecord::Base.cache do
example.run
end
end
# The :each scope runs "inside" the example, so this hook ensures the DB is in the
# correct state before any examples' before hooks are called. This prevents a
# problem where `ScheduleIssuesClosedAtTypeChange` (or any migration that depends
# on background migrations being run inline during test setup) can be broken by
# altering Sidekiq behavior in an unrelated spec like so:
#
# around do |example|
# Sidekiq::Testing.fake! do
# example.run
# end
# end
config.before(:context, :migration) do
schema_migrate_down!
end
# Each example may call `migrate!`, so we must ensure we are migrated down every time
config.before(:each, :migration) do
use_fake_application_settings
schema_migrate_down!
end
config.after(:context, :migration) do
schema_migrate_up!
Gitlab::CurrentSettings.clear_in_memory_application_settings!
end
# This makes sure the `ApplicationController#can?` method is stubbed with the
# original implementation for all view specs.
config.before(:each, type: :view) do
......@@ -327,60 +256,8 @@ RSpec.configure do |config|
Ability.allowed?(*args)
end
end
config.before(:each, :http_pages_enabled) do |_|
allow(Gitlab.config.pages).to receive(:external_http).and_return(['1.1.1.1:80'])
end
config.before(:each, :https_pages_enabled) do |_|
allow(Gitlab.config.pages).to receive(:external_https).and_return(['1.1.1.1:443'])
end
config.before(:each, :http_pages_disabled) do |_|
allow(Gitlab.config.pages).to receive(:external_http).and_return(false)
end
config.before(:each, :https_pages_disabled) do |_|
allow(Gitlab.config.pages).to receive(:external_https).and_return(false)
end
# We can't use an `around` hook here because the wrapping transaction
# is not yet opened at the time that is triggered
config.prepend_before do
Gitlab::Database.set_open_transactions_baseline
end
config.append_after do
Gitlab::Database.reset_open_transactions_baseline
end
end
# add simpler way to match asset paths containing digest strings
RSpec::Matchers.define :match_asset_path do |expected|
match do |actual|
path = Regexp.escape(expected)
extname = Regexp.escape(File.extname(expected))
digest_regex = Regexp.new(path.sub(extname, "(?:-\\h+)?#{extname}") << '$')
digest_regex =~ actual
end
failure_message do |actual|
"expected that #{actual} would include an asset path for #{expected}"
end
failure_message_when_negated do |actual|
"expected that #{actual} would not include an asset path for #{expected}"
end
end
FactoryBot::SyntaxRunner.class_eval do
include RSpec::Mocks::ExampleMethods
end
# Use FactoryBot 4.x behavior:
# https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#associations
FactoryBot.use_parent_strategy = false
ActiveRecord::Migration.maintain_test_schema!
Shoulda::Matchers.configure do |config|
......
# frozen_string_literal: true
RSpec.configure do |config|
config.around(:each, :use_clean_rails_memory_store_caching) do |example|
caching_store = Rails.cache
Rails.cache = ActiveSupport::Cache::MemoryStore.new
example.run
Rails.cache = caching_store
end
config.around(:each, :use_clean_rails_memory_store_fragment_caching) do |example|
caching_store = ActionController::Base.cache_store
ActionController::Base.cache_store = ActiveSupport::Cache::MemoryStore.new
ActionController::Base.perform_caching = true
example.run
ActionController::Base.perform_caching = false
ActionController::Base.cache_store = caching_store
end
config.around(:each, :use_sql_query_cache) do |example|
ActiveRecord::Base.cache do
example.run
end
end
end
# frozen_string_literal: true
FactoryBot::SyntaxRunner.class_eval do
include RSpec::Mocks::ExampleMethods
end
# Use FactoryBot 4.x behavior:
# https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#associations
FactoryBot.use_parent_strategy = false
# frozen_string_literal: true
# add simpler way to match asset paths containing digest strings
RSpec::Matchers.define :match_asset_path do |expected|
match do |actual|
path = Regexp.escape(expected)
extname = Regexp.escape(File.extname(expected))
digest_regex = Regexp.new(path.sub(extname, "(?:-\\h+)?#{extname}") << '$')
digest_regex =~ actual
end
failure_message do |actual|
"expected that #{actual} would include an asset path for #{expected}"
end
failure_message_when_negated do |actual|
"expected that #{actual} would not include an asset path for #{expected}"
end
end
# frozen_string_literal: true
RSpec.configure do |config|
# The :each scope runs "inside" the example, so this hook ensures the DB is in the
# correct state before any examples' before hooks are called. This prevents a
# problem where `ScheduleIssuesClosedAtTypeChange` (or any migration that depends
# on background migrations being run inline during test setup) can be broken by
# altering Sidekiq behavior in an unrelated spec like so:
#
# around do |example|
# Sidekiq::Testing.fake! do
# example.run
# end
# end
config.before(:context, :migration) do
schema_migrate_down!
end
# Each example may call `migrate!`, so we must ensure we are migrated down every time
config.before(:each, :migration) do
use_fake_application_settings
schema_migrate_down!
end
config.after(:context, :migration) do
schema_migrate_up!
Gitlab::CurrentSettings.clear_in_memory_application_settings!
end
end
# frozen_string_literal: true
RSpec.configure do |config|
config.before(:each, :http_pages_enabled) do |_|
allow(Gitlab.config.pages).to receive(:external_http).and_return(['1.1.1.1:80'])
end
config.before(:each, :https_pages_enabled) do |_|
allow(Gitlab.config.pages).to receive(:external_https).and_return(['1.1.1.1:443'])
end
config.before(:each, :http_pages_disabled) do |_|
allow(Gitlab.config.pages).to receive(:external_http).and_return(false)
end
config.before(:each, :https_pages_disabled) do |_|
allow(Gitlab.config.pages).to receive(:external_https).and_return(false)
end
end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment