Commit df2eda3f authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent b9bac6db
......@@ -46,9 +46,7 @@ docs lint:
- .docs:rules:docs-lint
image: "registry.gitlab.com/gitlab-org/gitlab-docs:docs-lint"
stage: test
needs:
- job: "retrieve-tests-metadata"
artifacts: false
needs: []
script:
- scripts/lint-doc.sh
# Lint Markdown
......
......@@ -8,6 +8,10 @@
memory-static:
extends: .only-code-memory-job-base
stage: test
needs:
- job: setup-test-env
artifacts: true
variables:
SETUP_DB: "false"
script:
......@@ -36,6 +40,12 @@ memory-on-boot:
extends:
- .only-code-memory-job-base
- .use-pg10
stage: test
needs:
- job: setup-test-env
artifacts: true
- job: compile-assets pull-cache
artifacts: true
variables:
NODE_ENV: "production"
RAILS_ENV: "production"
......
......@@ -3,9 +3,7 @@
- .default-tags
- .default-retry
stage: test
needs:
- job: "retrieve-tests-metadata"
artifacts: false
needs: []
cache:
key: "qa-framework-jobs:v1"
paths:
......
......@@ -12,9 +12,7 @@ code_quality:
- .default-retry
- .reports:rules:code_quality
stage: test
needs:
- job: "retrieve-tests-metadata"
artifacts: false
needs: []
image: docker:stable
allow_failure: true
services:
......@@ -54,9 +52,7 @@ sast:
- .reports:rules:sast
stage: test
allow_failure: true
needs:
- job: "retrieve-tests-metadata"
artifacts: false
needs: []
artifacts:
paths:
- gl-sast-report.json # GitLab-specific
......@@ -94,9 +90,7 @@ dependency_scanning:
- .default-retry
- .reports:rules:dependency_scanning
stage: test
needs:
- job: "retrieve-tests-metadata"
artifacts: false
needs: []
image: docker:stable
variables:
DOCKER_DRIVER: overlay2
......
......@@ -248,9 +248,7 @@ danger-review:
- .review:rules:danger
image: registry.gitlab.com/gitlab-org/gitlab-build-images:danger
stage: test
needs:
- job: "retrieve-tests-metadata"
artifacts: false
needs: []
script:
- git version
- node --version
......
......@@ -23,12 +23,13 @@ cache gems:
extends:
- .default-tags
- .default-retry
dependencies: []
needs: []
gitlab_git_test:
extends:
- .minimal-job
- .setup:rules:gitlab_git_test
stage: test
script:
- spec/support/prepare-gitlab-git-test-for-commit --check-for-changes
......@@ -36,5 +37,6 @@ no_ee_check:
extends:
- .minimal-job
- .setup:rules:no_ee_check
stage: test
script:
- scripts/no-ee-check
......@@ -6,7 +6,8 @@ lint-ci-gitlab:
- .default-retry
- .yaml:rules
image: sdesbure/yamllint:latest
dependencies: []
stage: test
needs: []
variables:
LINT_PATHS: .gitlab-ci.yml .gitlab/ci lib/gitlab/ci/templates changelogs
script:
......
......@@ -3,7 +3,7 @@ import { mapState, mapActions } from 'vuex';
import createFlash from '~/flash';
import { s__ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
import { UNFOLD_COUNT } from '../constants';
import { UNFOLD_COUNT, INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '../constants';
import * as utils from '../store/utils';
import tooltip from '../../vue_shared/directives/tooltip';
......@@ -11,6 +11,16 @@ const EXPAND_ALL = 0;
const EXPAND_UP = 1;
const EXPAND_DOWN = 2;
const lineNumberByViewType = (viewType, diffLine) => {
const numberGetters = {
[INLINE_DIFF_VIEW_TYPE]: line => line?.new_line,
[PARALLEL_DIFF_VIEW_TYPE]: line => (line?.right || line?.left)?.new_line,
};
const numberGetter = numberGetters[viewType];
return numberGetter && numberGetter(diffLine);
};
export default {
directives: {
tooltip,
......@@ -67,12 +77,16 @@ export default {
...mapActions('diffs', ['loadMoreLines']),
getPrevLineNumber(oldLineNumber, newLineNumber) {
const diffFile = utils.findDiffFile(this.diffFiles, this.fileHash);
const indexForInline = utils.findIndexInInlineLines(diffFile.highlighted_diff_lines, {
const lines = {
[INLINE_DIFF_VIEW_TYPE]: diffFile.highlighted_diff_lines,
[PARALLEL_DIFF_VIEW_TYPE]: diffFile.parallel_diff_lines,
};
const index = utils.getPreviousLineIndex(this.diffViewType, diffFile, {
oldLineNumber,
newLineNumber,
});
const prevLine = diffFile.highlighted_diff_lines[indexForInline - 2];
return (prevLine && prevLine.new_line) || 0;
return lineNumberByViewType(this.diffViewType, lines[this.diffViewType][index - 2]) || 0;
},
callLoadMoreLines(
endpoint,
......@@ -114,7 +128,7 @@ export default {
this.handleExpandAllLines(expandOptions);
}
},
handleExpandUpLines(expandOptions = EXPAND_ALL) {
handleExpandUpLines(expandOptions) {
const { endpoint, fileHash, view, oldLineNumber, newLineNumber, offset } = expandOptions;
const bottom = this.isBottom;
......
......@@ -140,6 +140,7 @@ export default {
addContextLines({
inlineLines: diffFile.highlighted_diff_lines,
parallelLines: diffFile.parallel_diff_lines,
diffViewType: state.diffViewType,
contextLines: lines,
bottom,
lineNumbers,
......
......@@ -13,6 +13,8 @@ import {
LINES_TO_BE_RENDERED_DIRECTLY,
MAX_LINES_TO_BE_RENDERED,
TREE_TYPE,
INLINE_DIFF_VIEW_TYPE,
PARALLEL_DIFF_VIEW_TYPE,
} from '../constants';
export function findDiffFile(files, match, matchKey = 'file_hash') {
......@@ -93,8 +95,7 @@ export function getNoteFormData(params) {
export const findIndexInInlineLines = (lines, lineNumbers) => {
const { oldLineNumber, newLineNumber } = lineNumbers;
return _.findIndex(
lines,
return lines.findIndex(
line => line.old_line === oldLineNumber && line.new_line === newLineNumber,
);
};
......@@ -102,8 +103,7 @@ export const findIndexInInlineLines = (lines, lineNumbers) => {
export const findIndexInParallelLines = (lines, lineNumbers) => {
const { oldLineNumber, newLineNumber } = lineNumbers;
return _.findIndex(
lines,
return lines.findIndex(
line =>
line.left &&
line.right &&
......@@ -112,13 +112,32 @@ export const findIndexInParallelLines = (lines, lineNumbers) => {
);
};
const indexGettersByViewType = {
[INLINE_DIFF_VIEW_TYPE]: findIndexInInlineLines,
[PARALLEL_DIFF_VIEW_TYPE]: findIndexInParallelLines,
};
export const getPreviousLineIndex = (diffViewType, file, lineNumbers) => {
const findIndex = indexGettersByViewType[diffViewType];
const lines = {
[INLINE_DIFF_VIEW_TYPE]: file.highlighted_diff_lines,
[PARALLEL_DIFF_VIEW_TYPE]: file.parallel_diff_lines,
};
return findIndex && findIndex(lines[diffViewType], lineNumbers);
};
export function removeMatchLine(diffFile, lineNumbers, bottom) {
const indexForInline = findIndexInInlineLines(diffFile.highlighted_diff_lines, lineNumbers);
const indexForParallel = findIndexInParallelLines(diffFile.parallel_diff_lines, lineNumbers);
const factor = bottom ? 1 : -1;
diffFile.highlighted_diff_lines.splice(indexForInline + factor, 1);
diffFile.parallel_diff_lines.splice(indexForParallel + factor, 1);
if (indexForInline > -1) {
diffFile.highlighted_diff_lines.splice(indexForInline + factor, 1);
}
if (indexForParallel > -1) {
diffFile.parallel_diff_lines.splice(indexForParallel + factor, 1);
}
}
export function addLineReferences(lines, lineNumbers, bottom, isExpandDown, nextLineNumbers) {
......@@ -160,8 +179,8 @@ export function addLineReferences(lines, lineNumbers, bottom, isExpandDown, next
return linesWithNumbers;
}
export function addContextLines(options) {
const { inlineLines, parallelLines, contextLines, lineNumbers, isExpandDown } = options;
function addParallelContextLines(options) {
const { parallelLines, contextLines, lineNumbers, isExpandDown } = options;
const normalizedParallelLines = contextLines.map(line => ({
left: line,
right: line,
......@@ -170,17 +189,40 @@ export function addContextLines(options) {
const factor = isExpandDown ? 1 : 0;
if (!isExpandDown && options.bottom) {
inlineLines.push(...contextLines);
parallelLines.push(...normalizedParallelLines);
} else {
const inlineIndex = findIndexInInlineLines(inlineLines, lineNumbers);
const parallelIndex = findIndexInParallelLines(parallelLines, lineNumbers);
inlineLines.splice(inlineIndex + factor, 0, ...contextLines);
parallelLines.splice(parallelIndex + factor, 0, ...normalizedParallelLines);
}
}
function addInlineContextLines(options) {
const { inlineLines, contextLines, lineNumbers, isExpandDown } = options;
const factor = isExpandDown ? 1 : 0;
if (!isExpandDown && options.bottom) {
inlineLines.push(...contextLines);
} else {
const inlineIndex = findIndexInInlineLines(inlineLines, lineNumbers);
inlineLines.splice(inlineIndex + factor, 0, ...contextLines);
}
}
export function addContextLines(options) {
const { diffViewType } = options;
const contextLineHandlers = {
[INLINE_DIFF_VIEW_TYPE]: addInlineContextLines,
[PARALLEL_DIFF_VIEW_TYPE]: addParallelContextLines,
};
const contextLineHandler = contextLineHandlers[diffViewType];
if (contextLineHandler) {
contextLineHandler(options);
}
}
/**
* Trims the first char of the `richText` property when it's either a space or a diff symbol.
* @param {Object} line
......
import _ from 'underscore';
import { escape } from 'lodash';
/**
Very limited implementation of sprintf supporting only named parameters.
......@@ -17,7 +17,7 @@ export default (input, parameters, escapeParameters = true) => {
if (parameters) {
Object.keys(parameters).forEach(parameterName => {
const parameterValue = parameters[parameterName];
const escapedParameterValue = escapeParameters ? _.escape(parameterValue) : parameterValue;
const escapedParameterValue = escapeParameters ? escape(parameterValue) : parameterValue;
output = output.replace(new RegExp(`%{${parameterName}}`, 'g'), escapedParameterValue);
});
}
......
......@@ -173,8 +173,10 @@ module Ci
scope :queued_before, ->(time) { where(arel_table[:queued_at].lt(time)) }
scope :order_id_desc, -> { order('ci_builds.id DESC') }
PROJECT_ROUTE_AND_NAMESPACE_ROUTE = { project: [:project_feature, :route, { namespace: :route }] }.freeze
scope :preload_project_and_pipeline_project, -> { preload(PROJECT_ROUTE_AND_NAMESPACE_ROUTE, pipeline: PROJECT_ROUTE_AND_NAMESPACE_ROUTE) }
scope :preload_project_and_pipeline_project, -> do
preload(Ci::Pipeline::PROJECT_ROUTE_AND_NAMESPACE_ROUTE,
pipeline: Ci::Pipeline::PROJECT_ROUTE_AND_NAMESPACE_ROUTE)
end
acts_as_taggable
......
......@@ -16,6 +16,10 @@ module Ci
include FromUnion
include UpdatedAtFilterable
PROJECT_ROUTE_AND_NAMESPACE_ROUTE = {
project: [:project_feature, :route, { namespace: :route }]
}.freeze
BridgeStatusError = Class.new(StandardError)
sha_attribute :source_sha
......
......@@ -18,7 +18,6 @@
module WithUploads
extend ActiveSupport::Concern
include FastDestroyAll::Helpers
include FeatureGate
# Currently there is no simple way how to select only not-mounted
# uploads, it should be all FileUploaders so we select them by
......
......@@ -235,12 +235,17 @@ class MergeRequest < ApplicationRecord
end
scope :join_project, -> { joins(:target_project) }
scope :references_project, -> { references(:target_project) }
PROJECT_ROUTE_AND_NAMESPACE_ROUTE = [
target_project: [:route, { namespace: :route }],
source_project: [:route, { namespace: :route }]
].freeze
scope :with_api_entity_associations, -> {
preload(:assignees, :author, :unresolved_notes, :labels, :milestone,
:timelogs, :latest_merge_request_diff,
metrics: [:latest_closed_by, :merged_by],
target_project: [:route, { namespace: :route }],
source_project: [:route, { namespace: :route }])
*PROJECT_ROUTE_AND_NAMESPACE_ROUTE,
metrics: [:latest_closed_by, :merged_by])
}
scope :by_target_branch_wildcard, ->(wildcard_branch_name) do
where("target_branch LIKE ?", ApplicationRecord.sanitize_sql_like(wildcard_branch_name).tr('*', '%'))
......
---
title: Refuse to start web server without a working ActiveRecord connection
merge_request: 25160
author:
type: other
---
title: Allow users to get Merge Trains entries via Public API
merge_request: 25229
author:
type: added
---
title: Remove duplicate authorization refresh for group members on project creation
merge_request:
author:
type: performance
......@@ -14,6 +14,8 @@ end
if defined?(ActiveRecord::Base)
Gitlab::Cluster::LifecycleEvents.on_before_fork do
raise 'ActiveRecord connection not established. Unable to start.' unless Gitlab::Database.exists?
# the following is highly recommended for Rails + "preload_app true"
# as there's no need for the master process to hold a connection
ActiveRecord::Base.connection.disconnect!
......
# frozen_string_literal: true
class AddNpmPackageRequestsForwardingToApplicationSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_column_with_default(:application_settings, :npm_package_requests_forwarding,
:boolean,
default: false,
allow_null: false)
end
def down
remove_column(:application_settings, :npm_package_requests_forwarding)
end
end
......@@ -351,6 +351,7 @@ ActiveRecord::Schema.define(version: 2020_02_14_034836) do
t.boolean "prevent_merge_requests_committers_approval", default: false, null: false
t.boolean "email_restrictions_enabled", default: false, null: false
t.text "email_restrictions"
t.boolean "npm_package_requests_forwarding", default: false, null: false
t.index ["custom_project_templates_group_id"], name: "index_application_settings_on_custom_project_templates_group_id"
t.index ["file_template_project_id"], name: "index_application_settings_on_file_template_project_id"
t.index ["instance_administration_project_id"], name: "index_applicationsettings_on_instance_administration_project_id"
......
......@@ -3,28 +3,28 @@
GitLab offers a way to view the changes made within the GitLab server for owners and administrators on a [paid plan][ee].
GitLab system administrators can also take advantage of the logs located on the
filesystem, see [the logs system documentation](logs.md) for more details.
filesystem. See [the logs system documentation](logs.md) for more details.
## Overview
**Audit Events** is a tool for GitLab owners and administrators to be
able to track important events such as who performed certain actions and the
time they happened. These actions could be, for example, change a user
**Audit Events** is a tool for GitLab owners and administrators
to track important events such as who performed certain actions and the
time they happened. For example, these actions could be a change to a user
permission level, who added a new user, or who removed a user.
## Use-cases
## Use cases
- Check who the person was that changed the permission level of a particular
user for a project in GitLab.
- Use it to track which users have access to a certain group of projects
in GitLab, and who gave them that permission level.
- Check who changed the permission level of a particular
user for a GitLab project.
- Track which users have access to a certain group of projects
in GitLab, and who gave them that permission level.
## List of events
There are two kinds of events logged:
- Events scoped to the group or project, used by group / project managers
to look up who made what change.
- Events scoped to the group or project, used by group and project managers
to look up who made a change.
- Instance events scoped to the whole GitLab instance, used by your Compliance team to
perform formal audits.
......@@ -36,9 +36,9 @@ You need Owner [permissions] to view the group Audit Events page.
To view a group's audit events, navigate to **Group > Settings > Audit Events**.
From there, you can see the following actions:
- Group name/path changed
- Group name or path changed
- Group repository size limit changed
- Group created/deleted
- Group created or deleted
- Group changed visibility
- User was added to group and with which [permissions]
- Permissions changes of a user assigned to a group
......@@ -48,11 +48,11 @@ From there, you can see the following actions:
- [Project shared with group](../user/project/members/share_project_with_groups.md)
and with which [permissions]
- Removal of a previously shared group with a project
- LFS enabled/disabled
- LFS enabled or disabled
- Shared runners minutes limit changed
- Membership lock enabled/disabled
- Request access enabled/disabled
- 2FA enforcement/grace period changed
- Membership lock enabled or disabled
- Request access enabled or disabled
- 2FA enforcement or grace period changed
- Roles allowed to create project changed
Group events can also be accessed via the [Group Audit Events API](../api/audit_events.md#group-audit-events-starter)
......@@ -65,8 +65,8 @@ You need Maintainer [permissions] or higher to view the project Audit Events pag
To view a project's audit events, navigate to **Project > Settings > Audit Events**.
From there, you can see the following actions:
- Added/removed deploy keys
- Project created/deleted/renamed/moved(transferred)/changed path
- Added or removed deploy keys
- Project created, deleted, renamed, moved(transferred), changed path
- Project changed visibility level
- User was added to project and with which [permissions]
- Permission changes of a user assigned to a project
......@@ -75,7 +75,7 @@ From there, you can see the following actions:
- Project repository was downloaded
- Project was archived
- Project was unarchived
- Added/removed/updated protected branches
- Added, removed, or updated protected branches
- Release was added to a project
- Release was updated
- Release milestone associations changed
......@@ -94,20 +94,20 @@ In addition to the group and project events, the following user actions are also
recorded:
- Failed Logins
- Sign-in events and the authentication type (standard, LDAP, OmniAuth, etc.)
- Sign-in events and the authentication type (such as standard, LDAP, or OmniAuth)
- Added SSH key
- Added/removed email
- Added or removed email
- Changed password
- Ask for password reset
- Grant OAuth access
- Started/stopped user impersonation
- Started or stopped user impersonation
- Changed username ([introduced](https://gitlab.com/gitlab-org/gitlab/issues/7797) in GitLab 12.8)
- User was deleted ([introduced](https://gitlab.com/gitlab-org/gitlab/issues/251) in GitLab 12.8)
- User was added ([introduced](https://gitlab.com/gitlab-org/gitlab/issues/251) in GitLab 12.8)
- User was blocked via Admin Area ([introduced](https://gitlab.com/gitlab-org/gitlab/issues/251) in GitLab 12.8)
It is possible to filter particular actions by choosing an audit data type from
the filter dropdown box. You can further filter by specific group, project or user
It's possible to filter particular actions by choosing an audit data type from
the filter dropdown box. You can further filter by specific group, project, or user
(for authentication events).
![audit log](img/audit_log.png)
......@@ -116,8 +116,8 @@ Instance events can also be accessed via the [Instance Audit Events API](../api/
### Missing events
Some events are not being tracked in Audit Events. Please see the following
epics for more detail on which events are not being tracked and our progress
Some events are not tracked in Audit Events. See the following
epics for more detail on which events are not being tracked, and our progress
on adding these events into GitLab:
- [Project settings and activity](https://gitlab.com/groups/gitlab-org/-/epics/474)
......@@ -129,8 +129,8 @@ on adding these events into GitLab:
#### Repository push
The current architecture of audit events is not prepared to receive a very high amount of records.
It may make your project/admin audit logs UI very busy and the disk space consumed by the
`audit_events` Postgres table will increase considerably. Thus, it's disabled by default
It may make the user interface for your project or audit logs very busy, and the disk space consumed by the
`audit_events` Postgres table will increase considerably. It's disabled by default
to prevent performance degradations on GitLab instances with very high Git write traffic.
In an upcoming release, Audit Logs for Git push events will be enabled
......
This diff is collapsed.
......@@ -7,7 +7,7 @@
> they got added. For installations from source you will have to install them
> yourself. Over subsequent releases additional GitLab metrics will be captured.
> - Prometheus services are on by default with GitLab 9.0.
> - Prometheus and its exporters do not authenticate users, and will be available
> - Prometheus and its exporters don't authenticate users, and will be available
> to anyone who can access them.
[Prometheus] is a powerful time-series monitoring service, providing a flexible
......@@ -18,7 +18,7 @@ access to high quality time-series monitoring of GitLab services.
## Overview
Prometheus works by periodically connecting to data sources and collecting their
performance metrics via the [various exporters](#bundled-software-metrics). To view
performance metrics through the [various exporters](#bundled-software-metrics). To view
and work with the monitoring data, you can either
[connect directly to Prometheus](#viewing-performance-metrics) or utilize a
dashboard tool like [Grafana](https://grafana.com).
......@@ -26,11 +26,11 @@ dashboard tool like [Grafana](https://grafana.com).
## Configuring Prometheus
NOTE: **Note:**
For installations from source you'll have to install and configure it yourself.
For installations from source, you'll have to install and configure it yourself.
Prometheus and its exporters are on by default, starting with GitLab 9.0.
Prometheus will run as the `gitlab-prometheus` user and listen on
`http://localhost:9090`. By default Prometheus is only accessible from the GitLab server itself.
`http://localhost:9090`. By default, Prometheus is only accessible from the GitLab server itself.
Each exporter will be automatically set up as a
monitoring target for Prometheus, unless individually disabled.
......@@ -51,7 +51,7 @@ To disable Prometheus and all of its exporters, as well as any added in the futu
NOTE: **Note:**
The following change was added in [GitLab Omnibus 8.17][1261]. Although possible,
it's not recommended to change the port Prometheus listens
on as this might affect or conflict with other services running on the GitLab
on, as this might affect or conflict with other services running on the GitLab
server. Proceed at your own risk.
In order to access Prometheus from outside the GitLab server you will need to
......@@ -65,7 +65,7 @@ To change the address/port that Prometheus listens on:
prometheus['listen_address'] = 'localhost:9090'
```
Replace `localhost:9090` with the address/port you want Prometheus to
Replace `localhost:9090` with the address or port you want Prometheus to
listen on. If you would like to allow access to Prometheus to hosts other
than `localhost`, leave out the host, or use `0.0.0.0` to allow public access:
......@@ -106,7 +106,7 @@ prometheus['scrape_configs'] = [
### Using an external Prometheus server
NOTE: **Note:**
Prometheus and most exporters do not support authentication. We do not recommend exposing them outside the local network.
Prometheus and most exporters don't support authentication. We don't recommend exposing them outside the local network.
A few configuration changes are required to allow GitLab to be monitored by an external Prometheus server. External servers are recommended for highly available deployments of GitLab with multiple nodes.
......@@ -151,7 +151,7 @@ To use an external Prometheus server:
}
```
1. [Reconfigure GitLab][reconfigure] to apply the changes
1. [Reconfigure GitLab][reconfigure] to apply the changes.
1. Edit the Prometheus server's configuration file.
1. Add each node's exporters to the Prometheus server's
[scrape target configuration](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#%3Cscrape_config%3E).
......@@ -223,9 +223,9 @@ some workarounds: using a separate FQDN, using server IP, using a separate brows
having [NGINX proxy it][nginx-custom-config].
The performance data collected by Prometheus can be viewed directly in the
Prometheus console or through a compatible dashboard tool.
Prometheus console, or through a compatible dashboard tool.
The Prometheus interface provides a [flexible query language](https://prometheus.io/docs/prometheus/latest/querying/basics/)
to work with the collected data where you can visualize their output.
to work with the collected data where you can visualize the output.
For a more fully featured dashboard, Grafana can be used and has
[official support for Prometheus][prom-grafana].
......@@ -238,23 +238,23 @@ Sample Prometheus queries:
## Prometheus as a Grafana data source
Grafana allows you to import Prometheus performance metrics as a data source
and render the metrics as graphs and dashboards which is helpful with visualisation.
Grafana allows you to import Prometheus performance metrics as a data source,
and render the metrics as graphs and dashboards, which is helpful with visualization.
To add a Prometheus dashboard for a single server GitLab setup:
1. Create a new data source in Grafana.
1. Name your data source i.e GitLab.
1. Name your data source (such as GitLab).
1. Select `Prometheus` in the type dropdown box.
1. Add your Prometheus listen address as the URL and set access to `Browser`.
1. Add your Prometheus listen address as the URL, and set access to `Browser`.
1. Set the HTTP method to `GET`.
1. Save & Test your configuration to verify that it works.
1. Save and test your configuration to verify that it works.
## GitLab metrics
> Introduced in GitLab 9.3.
GitLab monitors its own internal service metrics, and makes them available at the `/-/metrics` endpoint. Unlike other exporters, this endpoint requires authentication as it is available on the same URL and port as user traffic.
GitLab monitors its own internal service metrics, and makes them available at the `/-/metrics` endpoint. Unlike other exporters, this endpoint requires authentication as it's available on the same URL and port as user traffic.
[➔ Read more about the GitLab Metrics.](gitlab_metrics.md)
......@@ -265,8 +265,8 @@ export Prometheus metrics.
### Node exporter
The node exporter allows you to measure various machine resources such as
memory, disk and CPU utilization.
The node exporter allows you to measure various machine resources, such as
memory, disk, and CPU utilization.
[➔ Read more about the node exporter.](node_exporter.md)
......@@ -310,7 +310,7 @@ If your GitLab server is running within Kubernetes, Prometheus will collect metr
To disable the monitoring of Kubernetes:
1. Edit `/etc/gitlab/gitlab.rb`.
1. Add or find and uncomment the following line and set it to `false`:
1. Add (or find and uncomment) the following line and set it to `false`:
```ruby
prometheus['monitor_kubernetes'] = false
......
# Merge Trains API **(PREMIUM)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/36146) in GitLab 12.9.
> - Using this API you can consume GitLab's [Merge Train](../ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md) entries.
Every API call to merge trains must be authenticated with Developer or higher [permissions](link-to-permissions-doc).
If a user is not a member of a project and the project is private, a `GET` request on that project will result to a `404` status code.
If Merge Trains is not available for the project, a `403` status code will return.
## Merge Trains API pagination
By default, `GET` requests return 20 results at a time because the API results
are paginated.
Read more on [pagination](README.md#pagination).
## List Merge Trains for a project
Get all Merge Trains of the requested project:
```txt
GET /projects/:id/merge_trains
GET /projects/:id/merge_trains?scope=complete
```
| Attribute | Type | Required | Description |
| ------------------- | ---------------- | ---------- | --------------------------------------------------------------------------------------------------------------------------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding). |
| `scope` | string | no | Return Merge Trains filtered by the given scope. Available scopes are `active` (to be merged) and `complete` (have been merged). |
| `sort` | string | no | Return Merge Trains sorted in `asc` or `desc` order. Default is `desc`. |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/1/merge_trains
```
Example response:
```json
[
{
"id": 110,
"merge_request": {
"id": 126,
"iid": 59,
"project_id": 20,
"title": "Test MR 1580978354",
"description": "",
"state": "merged",
"created_at": "2020-02-06T08:39:14.883Z",
"updated_at": "2020-02-06T08:40:57.038Z",
"web_url": "http://local.gitlab.test:8181/root/merge-train-race-condition/-/merge_requests/59"
},
"user": {
"id": 1,
"name": "Administrator",
"username": "root",
"state": "active",
"avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://local.gitlab.test:8181/root"
},
"pipeline": {
"id": 246,
"sha": "bcc17a8ffd51be1afe45605e714085df28b80b13",
"ref": "refs/merge-requests/59/train",
"status": "success",
"created_at": "2020-02-06T08:40:42.410Z",
"updated_at": "2020-02-06T08:40:46.912Z",
"web_url": "http://local.gitlab.test:8181/root/merge-train-race-condition/pipelines/246"
},
"created_at": "2020-02-06T08:39:47.217Z",
"updated_at": "2020-02-06T08:40:57.720Z",
"target_branch": "feature-1580973432",
"status": "merged",
"merged_at": "2020-02-06T08:40:57.719Z",
"duration": 70
}
]
```
......@@ -73,7 +73,7 @@ especially the case for small tables.
If a table is expected to grow in size and you expect your query has to filter
out a lot of rows you may want to consider adding an index. If the table size is
very small (e.g. only a handful of rows) or any existing indexes filter out
very small (e.g. less than `1,000` records) or any existing indexes filter out
enough rows you may _not_ want to add a new index.
## Maintenance Overhead
......
......@@ -304,10 +304,16 @@ combining it with other operations that don't require `disable_ddl_transaction!`
## Adding indexes
If you need to add a unique index, please keep in mind there is the possibility
of existing duplicates being present in the database. This means that should
always _first_ add a migration that removes any duplicates, before adding the
unique index.
Before adding an index, consider if this one is necessary. There are situations in which an index
might not be required, like:
- The table is small (less than `1,000` records) and it's not expected to exponentially grow in size.
- Any existing indexes filter out enough rows.
- The reduction in query timings after the index is added is not significant.
Additionally, wide indexes are not required to match all filter criteria of queries, we just need
to cover enough columns so that the index lookup has a small enough selectivity. Please review our
[Adding Database indexes](adding_database_indexes.md) guide for more details.
When adding an index to a non-empty table make sure to use the method
`add_concurrent_index` instead of the regular `add_index` method.
......@@ -334,6 +340,11 @@ class MyMigration < ActiveRecord::Migration[4.2]
end
```
If you need to add a unique index, please keep in mind there is the possibility
of existing duplicates being present in the database. This means that should
always _first_ add a migration that removes any duplicates, before adding the
unique index.
For a small table (such as an empty one or one with less than `1,000` records),
it is recommended to use `add_index` in a single-transaction migration, combining it with other
operations that don't require `disable_ddl_transaction!`.
......
......@@ -29,7 +29,7 @@ You can access them in two ways.
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/22011) in GitLab 12.5.
Go to **Operations > Pod logs** on the sidebar menu.
Go to **{cloud-gear}** **Operations > Pod logs** on the sidebar menu.
![Sidebar menu](img/sidebar_menu_pod_logs_v12_5.png)
......@@ -37,7 +37,7 @@ Go to **Operations > Pod logs** on the sidebar menu.
Logs can be displayed by clicking on a specific pod from [Deploy Boards](../deploy_boards.md):
1. Go to **Operations > Environments** and find the environment which contains the desired pod, like `production`.
1. Go to **{cloud-gear}** **Operations > Environments** and find the environment which contains the desired pod, like `production`.
1. On the **Environments** page, you should see the status of the environment's pods with [Deploy Boards](../deploy_boards.md).
1. When mousing over the list of pods, a tooltip will appear with the exact pod name and status.
![Deploy Boards pod list](img/pod_logs_deploy_board.png)
......@@ -45,7 +45,7 @@ Logs can be displayed by clicking on a specific pod from [Deploy Boards](../depl
### Logs view
The logs view will contain the last 500 lines for a pod, and has control to filter via:
The logs view will contain the last 500 lines for a pod, and has control to filter through:
- Pods.
- [From GitLab 12.4](https://gitlab.com/gitlab-org/gitlab/issues/5769), environments.
......@@ -62,14 +62,14 @@ Support for historical data is coming [in a future release](https://gitlab.com/g
When you enable [Elastic Stack](../../clusters/applications.md#elastic-stack) on your cluster, you can filter by date.
Click on "Show last" to see the available options.
Click on **Show last** to see the available options.
### Full text search
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21656) in GitLab 12.7.
When you enable [Elastic Stack](../../clusters/applications.md#elastic-stack) on your cluster,
you can search the content of your logs via a search bar.
you can search the content of your logs through a search bar.
The search is passed on to Elasticsearch using the [simple_query_string](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html)
Elasticsearch function, which supports the following operators:
......
......@@ -359,6 +359,10 @@ module API
render_api_error!('405 Method Not Allowed', 405)
end
def service_unavailable!
render_api_error!('503 Service Unavailable', 503)
end
def conflict!(message = nil)
render_api_error!(message || '409 Conflict', 409)
end
......
......@@ -21618,12 +21618,18 @@ msgstr ""
msgid "VulnerabilityManagement|Confirm"
msgstr ""
msgid "VulnerabilityManagement|Create issue"
msgstr ""
msgid "VulnerabilityManagement|Dismiss"
msgstr ""
msgid "VulnerabilityManagement|Resolved"
msgstr ""
msgid "VulnerabilityManagement|Something went wrong, could not create an issue."
msgstr ""
msgid "VulnerabilityManagement|Something went wrong, could not update vulnerability state."
msgstr ""
......
......@@ -7,10 +7,11 @@ describe 'User views diffs', :js do
create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
end
let(:project) { create(:project, :public, :repository) }
let(:view) { 'inline' }
before do
stub_feature_flags(diffs_batch_load: false)
visit(diffs_project_merge_request_path(project, merge_request))
visit(diffs_project_merge_request_path(project, merge_request, view: view))
wait_for_requests
......@@ -20,12 +21,20 @@ describe 'User views diffs', :js do
shared_examples 'unfold diffs' do
it 'unfolds diffs upwards' do
first('.js-unfold').click
expect(find('.file-holder[id="a5cc2925ca8258af241be7e5b0381edf30266302"] .text-file')).to have_content('.bundle')
page.within('.file-holder[id="a5cc2925ca8258af241be7e5b0381edf30266302"]') do
expect(find('.text-file')).to have_content('.bundle')
expect(page).to have_selector('.new_line [data-linenumber="1"]', count: 1)
end
end
it 'unfolds diffs to the start' do
first('.js-unfold-all').click
expect(find('.file-holder[id="a5cc2925ca8258af241be7e5b0381edf30266302"] .text-file')).to have_content('.rbc')
it 'unfolds diffs in the middle' do
page.within('.file-holder[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd"]') do
all('.js-unfold-all')[1].click
expect(page).to have_selector('.new_line [data-linenumber="24"]', count: 1)
expect(page).not_to have_selector('.new_line [data-linenumber="1"]')
end
end
it 'unfolds diffs downwards' do
......@@ -66,13 +75,7 @@ describe 'User views diffs', :js do
end
context 'when in the side-by-side view' do
before do
find('.js-show-diff-settings').click
click_button 'Side-by-side'
wait_for_requests
end
let(:view) { 'parallel' }
it 'shows diffs in parallel' do
expect(page).to have_css('.parallel')
......
{
"type": "object",
"properties" : {
"properties" : {
"id": { "type": "integer" },
"iid": { "type": "integer" },
"project_id": { "type": "integer" },
"title": { "type": "string" },
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
"created_at": { "type": "date" },
"updated_at": { "type": "date" },
"web_url": { "type": "uri" }
},
"required": [
"id", "iid", "project_id", "title", "description",
"state", "created_at", "updated_at", "web_url"
],
"head_pipeline": {
"oneOf": [
{ "type": "null" },
{ "$ref": "pipeline/detail.json" }
]
}
}
}
import Vue from 'vue';
import { cloneDeep } from 'lodash';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import { createStore } from '~/mr_notes/stores';
import DiffExpansionCell from '~/diffs/components/diff_expansion_cell.vue';
import { getPreviousLineIndex } from '~/diffs/store/utils';
import { INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '~/diffs/constants';
import diffFileMockData from '../mock_data/diff_file';
const EXPAND_UP_CLASS = '.js-unfold';
const EXPAND_DOWN_CLASS = '.js-unfold-down';
const EXPAND_ALL_CLASS = '.js-unfold-all';
const LINE_TO_USE = 5;
const lineSources = {
[INLINE_DIFF_VIEW_TYPE]: 'highlighted_diff_lines',
[PARALLEL_DIFF_VIEW_TYPE]: 'parallel_diff_lines',
};
const lineHandlers = {
[INLINE_DIFF_VIEW_TYPE]: line => line,
[PARALLEL_DIFF_VIEW_TYPE]: line => line.right || line.left,
};
function makeLoadMoreLinesPayload({
sinceLine,
toLine,
oldLineNumber,
diffViewType,
fileHash,
nextLineNumbers = {},
unfold = false,
bottom = false,
isExpandDown = false,
}) {
return {
endpoint: 'contextLinesPath',
params: {
since: sinceLine,
to: toLine,
offset: toLine + 1 - oldLineNumber,
view: diffViewType,
unfold,
bottom,
},
lineNumbers: {
oldLineNumber,
newLineNumber: toLine + 1,
},
nextLineNumbers,
fileHash,
isExpandDown,
};
}
function getLine(file, type, index) {
const source = lineSources[type];
const handler = lineHandlers[type];
return handler(file[source][index]);
}
describe('DiffExpansionCell', () => {
const matchLine = diffFileMockData.highlighted_diff_lines[5];
let mockFile;
let mockLine;
let store;
let vm;
beforeEach(() => {
mockFile = cloneDeep(diffFileMockData);
mockLine = getLine(mockFile, INLINE_DIFF_VIEW_TYPE, LINE_TO_USE);
store = createStore();
store.state.diffs.diffFiles = [mockFile];
spyOn(store, 'dispatch').and.returnValue(Promise.resolve());
});
const createComponent = (options = {}) => {
const cmp = Vue.extend(DiffExpansionCell);
const defaults = {
fileHash: diffFileMockData.file_hash,
fileHash: mockFile.file_hash,
contextLinesPath: 'contextLinesPath',
line: matchLine,
line: mockLine,
isTop: false,
isBottom: false,
};
const props = Object.assign({}, defaults, options);
return createComponentWithStore(cmp, createStore(), props).$mount();
vm = createComponentWithStore(cmp, store, props).$mount();
};
const findExpandUp = () => vm.$el.querySelector(EXPAND_UP_CLASS);
const findExpandDown = () => vm.$el.querySelector(EXPAND_DOWN_CLASS);
const findExpandAll = () => vm.$el.querySelector(EXPAND_ALL_CLASS);
describe('top row', () => {
it('should have "expand up" and "show all" option', () => {
const vm = createComponent({
createComponent({
isTop: true,
});
const el = vm.$el;
expect(el.querySelector(EXPAND_UP_CLASS)).not.toBe(null);
expect(el.querySelector(EXPAND_DOWN_CLASS)).toBe(null);
expect(el.querySelector(EXPAND_ALL_CLASS)).not.toBe(null);
expect(findExpandUp()).not.toBe(null);
expect(findExpandDown()).toBe(null);
expect(findExpandAll()).not.toBe(null);
});
});
describe('middle row', () => {
it('should have "expand down", "show all", "expand up" option', () => {
const vm = createComponent();
const el = vm.$el;
createComponent();
expect(el.querySelector(EXPAND_UP_CLASS)).not.toBe(null);
expect(el.querySelector(EXPAND_DOWN_CLASS)).not.toBe(null);
expect(el.querySelector(EXPAND_ALL_CLASS)).not.toBe(null);
expect(findExpandUp()).not.toBe(null);
expect(findExpandDown()).not.toBe(null);
expect(findExpandAll()).not.toBe(null);
});
});
describe('bottom row', () => {
it('should have "expand down" and "show all" option', () => {
const vm = createComponent({
createComponent({
isBottom: true,
});
const el = vm.$el;
expect(el.querySelector(EXPAND_UP_CLASS)).toBe(null);
expect(el.querySelector(EXPAND_DOWN_CLASS)).not.toBe(null);
expect(el.querySelector(EXPAND_ALL_CLASS)).not.toBe(null);
expect(findExpandUp()).toBe(null);
expect(findExpandDown()).not.toBe(null);
expect(findExpandAll()).not.toBe(null);
});
});
describe('any row', () => {
[
{ diffViewType: INLINE_DIFF_VIEW_TYPE, file: { parallel_diff_lines: [] } },
{ diffViewType: PARALLEL_DIFF_VIEW_TYPE, file: { highlighted_diff_lines: [] } },
].forEach(({ diffViewType, file }) => {
describe(`with diffViewType (${diffViewType})`, () => {
beforeEach(() => {
mockLine = getLine(mockFile, diffViewType, LINE_TO_USE);
store.state.diffs.diffFiles = [{ ...mockFile, ...file }];
store.state.diffs.diffViewType = diffViewType;
});
it('does not initially dispatch anything', () => {
expect(store.dispatch).not.toHaveBeenCalled();
});
it('on expand all clicked, dispatch loadMoreLines', () => {
const oldLineNumber = mockLine.meta_data.old_pos;
const newLineNumber = mockLine.meta_data.new_pos;
const previousIndex = getPreviousLineIndex(diffViewType, mockFile, {
oldLineNumber,
newLineNumber,
});
createComponent();
findExpandAll().click();
expect(store.dispatch).toHaveBeenCalledWith(
'diffs/loadMoreLines',
makeLoadMoreLinesPayload({
fileHash: mockFile.file_hash,
toLine: newLineNumber - 1,
sinceLine: previousIndex,
oldLineNumber,
diffViewType,
}),
);
});
it('on expand up clicked, dispatch loadMoreLines', () => {
mockLine.meta_data.old_pos = 200;
mockLine.meta_data.new_pos = 200;
const oldLineNumber = mockLine.meta_data.old_pos;
const newLineNumber = mockLine.meta_data.new_pos;
createComponent();
findExpandUp().click();
expect(store.dispatch).toHaveBeenCalledWith(
'diffs/loadMoreLines',
makeLoadMoreLinesPayload({
fileHash: mockFile.file_hash,
toLine: newLineNumber - 1,
sinceLine: 179,
oldLineNumber,
diffViewType,
unfold: true,
}),
);
});
it('on expand down clicked, dispatch loadMoreLines', () => {
mockFile[lineSources[diffViewType]][LINE_TO_USE + 1] = cloneDeep(
mockFile[lineSources[diffViewType]][LINE_TO_USE],
);
const nextLine = getLine(mockFile, diffViewType, LINE_TO_USE + 1);
nextLine.meta_data.old_pos = 300;
nextLine.meta_data.new_pos = 300;
mockLine.meta_data.old_pos = 200;
mockLine.meta_data.new_pos = 200;
createComponent();
findExpandDown().click();
expect(store.dispatch).toHaveBeenCalledWith('diffs/loadMoreLines', {
endpoint: 'contextLinesPath',
params: {
since: 1,
to: 21, // the load amount, plus 1 line
offset: 0,
view: diffViewType,
unfold: true,
bottom: true,
},
lineNumbers: {
// when expanding down, these are based on the previous line, 0, in this case
oldLineNumber: 0,
newLineNumber: 0,
},
nextLineNumbers: { old_line: 200, new_line: 200 },
fileHash: mockFile.file_hash,
isExpandDown: true,
});
});
});
});
});
});
......@@ -167,7 +167,7 @@ describe('DiffsStoreMutations', () => {
highlighted_diff_lines: [],
parallel_diff_lines: [],
};
const state = { diffFiles: [diffFile] };
const state = { diffFiles: [diffFile], diffViewType: 'viewType' };
const lines = [{ old_line: 1, new_line: 1 }];
const findDiffFileSpy = spyOnDependency(mutations, 'findDiffFile').and.returnValue(diffFile);
......@@ -195,6 +195,7 @@ describe('DiffsStoreMutations', () => {
expect(addContextLinesSpy).toHaveBeenCalledWith({
inlineLines: diffFile.highlighted_diff_lines,
parallelLines: diffFile.parallel_diff_lines,
diffViewType: 'viewType',
contextLines: options.contextLines,
bottom: options.params.bottom,
lineNumbers: options.lineNumbers,
......
import { clone } from 'lodash';
import * as utils from '~/diffs/store/utils';
import {
LINE_POSITION_LEFT,
......@@ -8,6 +9,7 @@ import {
NEW_LINE_TYPE,
OLD_LINE_TYPE,
MATCH_LINE_TYPE,
INLINE_DIFF_VIEW_TYPE,
PARALLEL_DIFF_VIEW_TYPE,
} from '~/diffs/constants';
import { MERGE_REQUEST_NOTEABLE_TYPE } from '~/notes/constants';
......@@ -47,7 +49,38 @@ describe('DiffsStoreUtils', () => {
describe('findIndexInParallelLines', () => {
it('should return correct index for given line numbers', () => {
expectSet(utils.findIndexInParallelLines, getDiffFileMock().parallel_diff_lines, {});
expectSet(utils.findIndexInParallelLines, getDiffFileMock().parallel_diff_lines, []);
});
});
});
describe('getPreviousLineIndex', () => {
[
{ diffViewType: INLINE_DIFF_VIEW_TYPE, file: { parallel_diff_lines: [] } },
{ diffViewType: PARALLEL_DIFF_VIEW_TYPE, file: { highlighted_diff_lines: [] } },
].forEach(({ diffViewType, file }) => {
describe(`with diffViewType (${diffViewType}) in split diffs`, () => {
let diffFile;
beforeEach(() => {
diffFile = { ...clone(diffFileMockData), ...file };
});
it('should return the correct previous line number', () => {
const emptyLines =
diffViewType === INLINE_DIFF_VIEW_TYPE
? diffFile.parallel_diff_lines
: diffFile.highlighted_diff_lines;
// This expectation asserts that we cannot possibly be using the opposite view type lines in the next expectation
expect(emptyLines.length).toBe(0);
expect(
utils.getPreviousLineIndex(diffViewType, diffFile, {
oldLineNumber: 3,
newLineNumber: 5,
}),
).toBe(4);
});
});
});
});
......@@ -80,44 +113,59 @@ describe('DiffsStoreUtils', () => {
});
describe('addContextLines', () => {
it('should add context lines', () => {
const diffFile = getDiffFileMock();
const inlineLines = diffFile.highlighted_diff_lines;
const parallelLines = diffFile.parallel_diff_lines;
const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
const contextLines = [{ lineNumber: 42, line_code: '123' }];
const options = { inlineLines, parallelLines, contextLines, lineNumbers };
const inlineIndex = utils.findIndexInInlineLines(inlineLines, lineNumbers);
const parallelIndex = utils.findIndexInParallelLines(parallelLines, lineNumbers);
const normalizedParallelLine = {
left: options.contextLines[0],
right: options.contextLines[0],
line_code: '123',
};
utils.addContextLines(options);
expect(inlineLines[inlineIndex]).toEqual(contextLines[0]);
expect(parallelLines[parallelIndex]).toEqual(normalizedParallelLine);
});
it('should add context lines properly with bottom parameter', () => {
const diffFile = getDiffFileMock();
const inlineLines = diffFile.highlighted_diff_lines;
const parallelLines = diffFile.parallel_diff_lines;
const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
const contextLines = [{ lineNumber: 42, line_code: '123' }];
const options = { inlineLines, parallelLines, contextLines, lineNumbers, bottom: true };
const normalizedParallelLine = {
left: options.contextLines[0],
right: options.contextLines[0],
line_code: '123',
};
utils.addContextLines(options);
[INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE].forEach(diffViewType => {
it(`should add context lines for ${diffViewType}`, () => {
const diffFile = getDiffFileMock();
const inlineLines = diffFile.highlighted_diff_lines;
const parallelLines = diffFile.parallel_diff_lines;
const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
const contextLines = [{ lineNumber: 42, line_code: '123' }];
const options = { inlineLines, parallelLines, contextLines, lineNumbers, diffViewType };
const inlineIndex = utils.findIndexInInlineLines(inlineLines, lineNumbers);
const parallelIndex = utils.findIndexInParallelLines(parallelLines, lineNumbers);
const normalizedParallelLine = {
left: options.contextLines[0],
right: options.contextLines[0],
line_code: '123',
};
utils.addContextLines(options);
if (diffViewType === INLINE_DIFF_VIEW_TYPE) {
expect(inlineLines[inlineIndex]).toEqual(contextLines[0]);
} else {
expect(parallelLines[parallelIndex]).toEqual(normalizedParallelLine);
}
});
expect(inlineLines[inlineLines.length - 1]).toEqual(contextLines[0]);
expect(parallelLines[parallelLines.length - 1]).toEqual(normalizedParallelLine);
it(`should add context lines properly with bottom parameter for ${diffViewType}`, () => {
const diffFile = getDiffFileMock();
const inlineLines = diffFile.highlighted_diff_lines;
const parallelLines = diffFile.parallel_diff_lines;
const lineNumbers = { oldLineNumber: 3, newLineNumber: 5 };
const contextLines = [{ lineNumber: 42, line_code: '123' }];
const options = {
inlineLines,
parallelLines,
contextLines,
lineNumbers,
bottom: true,
diffViewType,
};
const normalizedParallelLine = {
left: options.contextLines[0],
right: options.contextLines[0],
line_code: '123',
};
utils.addContextLines(options);
if (diffViewType === INLINE_DIFF_VIEW_TYPE) {
expect(inlineLines[inlineLines.length - 1]).toEqual(contextLines[0]);
} else {
expect(parallelLines[parallelLines.length - 1]).toEqual(normalizedParallelLine);
}
});
});
});
......
......@@ -5,8 +5,7 @@ require 'spec_helper'
describe Banzai::Filter::InlineMetricsRedactorFilter do
include FilterSpecHelper
set(:project) { create(:project) }
let_it_be(:project) { create(:project) }
let(:url) { urls.metrics_dashboard_project_environment_url(project, 1, embedded: true) }
let(:input) { %(<a href="#{url}">example</a>) }
let(:doc) { filter(input) }
......
......@@ -121,7 +121,7 @@ describe Banzai::Pipeline::GfmPipeline do
end
describe 'emoji in references' do
set(:project) { create(:project, :public) }
let_it_be(:project) { create(:project, :public) }
let(:emoji) { '💯' }
it 'renders a label reference with emoji inside' do
......
......@@ -18,17 +18,16 @@ describe EventFilter do
end
describe '#apply_filter' do
set(:public_project) { create(:project, :public) }
set(:push_event) { create(:push_event, project: public_project) }
set(:merged_event) { create(:event, :merged, project: public_project, target: public_project) }
set(:created_event) { create(:event, :created, project: public_project, target: create(:issue, project: public_project)) }
set(:updated_event) { create(:event, :updated, project: public_project, target: create(:issue, project: public_project)) }
set(:closed_event) { create(:event, :closed, project: public_project, target: create(:issue, project: public_project)) }
set(:reopened_event) { create(:event, :reopened, project: public_project, target: create(:issue, project: public_project)) }
set(:comments_event) { create(:event, :commented, project: public_project, target: public_project) }
set(:joined_event) { create(:event, :joined, project: public_project, target: public_project) }
set(:left_event) { create(:event, :left, project: public_project, target: public_project) }
let_it_be(:public_project) { create(:project, :public) }
let_it_be(:push_event) { create(:push_event, project: public_project) }
let_it_be(:merged_event) { create(:event, :merged, project: public_project, target: public_project) }
let_it_be(:created_event) { create(:event, :created, project: public_project, target: create(:issue, project: public_project)) }
let_it_be(:updated_event) { create(:event, :updated, project: public_project, target: create(:issue, project: public_project)) }
let_it_be(:closed_event) { create(:event, :closed, project: public_project, target: create(:issue, project: public_project)) }
let_it_be(:reopened_event) { create(:event, :reopened, project: public_project, target: create(:issue, project: public_project)) }
let_it_be(:comments_event) { create(:event, :commented, project: public_project, target: public_project) }
let_it_be(:joined_event) { create(:event, :joined, project: public_project, target: public_project) }
let_it_be(:left_event) { create(:event, :left, project: public_project, target: public_project) }
let(:filtered_events) { described_class.new(filter).apply_filter(Event.all) }
......
......@@ -3,10 +3,9 @@
require 'spec_helper'
describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let_it_be(:project) { create(:project) }
let(:gl_auth) { described_class }
set(:project) { create(:project) }
describe 'constants' do
it 'API_SCOPES contains all scopes for API access' do
expect(subject::API_SCOPES).to eq %i[api read_user]
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Checks::ForcePush do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
describe '.force_push?' do
it 'returns false if the repo is empty' do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Ci::Build::Policy::Changes do
set(:project) { create(:project) }
let_it_be(:project) { create(:project) }
describe '#satisfied_by?' do
describe 'paths matching' do
......@@ -89,7 +89,7 @@ describe Gitlab::Ci::Build::Policy::Changes do
end
describe 'gitaly integration' do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:pipeline) do
create(:ci_empty_pipeline, project: project,
......@@ -119,6 +119,7 @@ describe Gitlab::Ci::Build::Policy::Changes do
end
context 'when branch is created' do
let_it_be(:project) { create(:project, :repository) }
let(:pipeline) do
create(:ci_empty_pipeline, project: project,
ref: 'feature',
......
......@@ -18,7 +18,7 @@ describe Gitlab::Ci::Build::Policy::Kubernetes do
end
context 'when kubernetes service is inactive' do
set(:project) { create(:project) }
let_it_be(:project) { create(:project) }
it 'is not satisfied by a pipeline without kubernetes available' do
expect(described_class.new('active'))
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Ci::Build::Policy::Variables do
set(:project) { create(:project) }
let_it_be(:project) { create(:project) }
let(:pipeline) do
build(:ci_empty_pipeline, project: project, ref: 'master', source: :push)
......
......@@ -3,13 +3,11 @@
require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Local do
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:sha) { '12345' }
let(:context_params) { { project: project, sha: sha, user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:params) { { local: location } }
let(:local_file) { described_class.new(params, context) }
......
......@@ -3,10 +3,9 @@
require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Project do
set(:context_project) { create(:project) }
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
let_it_be(:context_project) { create(:project) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:context_user) { user }
let(:context_params) { { project: context_project, sha: '12345', user: context_user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
......
......@@ -3,14 +3,12 @@
require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Template do
set(:project) { create(:project) }
set(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let(:context_params) { { project: project, sha: '12345', user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:template) { 'Auto-DevOps.gitlab-ci.yml' }
let(:params) { { template: template } }
let(:template_file) { described_class.new(params, context) }
before do
......
......@@ -5,9 +5,8 @@ require 'spec_helper'
describe Gitlab::Ci::Config::External::Mapper do
include StubRequests
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:local_file) { '/lib/gitlab/ci/templates/non-existent-file.yml' }
let(:remote_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
let(:template_file) { 'Auto-DevOps.gitlab-ci.yml' }
......
......@@ -5,10 +5,9 @@ require 'spec_helper'
describe Gitlab::Ci::Config::External::Processor do
include StubRequests
set(:project) { create(:project, :repository) }
set(:another_project) { create(:project, :repository) }
set(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:another_project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:sha) { '12345' }
let(:context_params) { { project: project, sha: sha, user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
......
......@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::Ci::Config do
include StubRequests
set(:user) { create(:user) }
let_it_be(:user) { create(:user) }
before do
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
......
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Build do
set(:project) { create(:project, :repository) }
set(:user) { create(:user, developer_projects: [project]) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
let(:pipeline) { Ci::Pipeline.new }
let(:variables_attributes) do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Command do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
describe '#initialize' do
subject do
......
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Create do
set(:project) { create(:project) }
set(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let(:pipeline) do
build(:ci_empty_pipeline, project: project, ref: 'master')
......
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Populate do
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:pipeline) do
build(:ci_pipeline, project: project, ref: 'master', user: user)
......
......@@ -3,9 +3,8 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Sequence do
set(:project) { create(:project) }
set(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let(:pipeline) { build_stubbed(:ci_pipeline) }
let(:command) { Gitlab::Ci::Pipeline::Chain::Command.new }
let(:first_step) { spy('first step') }
......
......@@ -3,9 +3,9 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Skip do
set(:project) { create(:project) }
set(:user) { create(:user) }
set(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:pipeline, reload: true) { create(:ci_pipeline, project: project) }
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
......
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Validate::Abilities do
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:pipeline) do
build_stubbed(:ci_pipeline, project: project)
......
......@@ -3,10 +3,9 @@
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Validate::Repository do
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:pipeline) { build_stubbed(:ci_pipeline) }
let!(:step) { described_class.new(pipeline, command) }
before do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Ci::Status::Composite do
set(:pipeline) { create(:ci_pipeline) }
let_it_be(:pipeline) { create(:ci_pipeline) }
before(:all) do
@statuses = HasStatus::STATUSES_ENUM.map do |status, idx|
......
......@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
include ChunkedIOHelpers
set(:build) { create(:ci_build, :running) }
let_it_be(:build) { create(:ci_build, :running) }
let(:chunked_io) { described_class.new(build) }
before do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
set(:build) { create(:ci_build, :running) }
let_it_be(:build) { create(:ci_build, :running) }
before do
stub_feature_flags(ci_enable_live_trace: true)
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::DataBuilder::WikiPage do
set(:project) { create(:project, :repository, :wiki_repo) }
let_it_be(:project) { create(:project, :repository, :wiki_repo) }
let(:wiki_page) { create(:wiki_page, wiki: project.wiki) }
let(:user) { create(:user) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Git::LfsChanges do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
let(:blob_object_id) { '0c304a93cb8430108629bbbcaa27db3343299bc0' }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Git::MergeBase do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
subject(:merge_base) { described_class.new(repository, refs) }
......
......@@ -3,8 +3,7 @@
require 'spec_helper'
describe Gitlab::Git::Push do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:oldrev) { project.commit('HEAD~2').id }
let(:newrev) { project.commit.id }
let(:ref) { 'refs/heads/some-branch' }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe ::Gitlab::GitPostReceive do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
subject { described_class.new(project, "project-#{project.id}", changes.dup, {}) }
......
......@@ -3,10 +3,10 @@
require 'spec_helper'
describe Gitlab::GitalyClient::OperationService do
set(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository.raw }
let(:client) { described_class.new(repository) }
set(:user) { create(:user) }
let(:gitaly_user) { Gitlab::Git::User.from_gitlab(user).to_gitaly }
describe '#user_create_branch' do
......
......@@ -4,7 +4,7 @@ require 'spec_helper'
describe Gitlab::GlobalId do
describe '.build' do
set(:object) { create(:issue) }
let_it_be(:object) { create(:issue) }
it 'returns a standard GlobalId if only object is passed' do
expect(described_class.build(object).to_s).to eq(object.to_global_id.to_s)
......
......@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::Graphql::Loaders::BatchLfsOidLoader do
include GraphqlHelpers
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:blob) { Gitlab::Graphql::Representation::TreeEntry.new(repository.blob_at('master', 'files/lfs/lfs_object.iso'), repository) }
let(:otherblob) { Gitlab::Graphql::Representation::TreeEntry.new(repository.blob_at('master', 'README'), repository) }
......
......@@ -186,7 +186,7 @@ describe Gitlab::HashedStorage::Migrator, :redis do
end
describe 'migration_pending?' do
set(:project) { create(:project, :empty_repo) }
let_it_be(:project) { create(:project, :empty_repo) }
it 'returns true when there are MigratorWorker jobs scheduled' do
Sidekiq::Testing.disable! do
......@@ -210,7 +210,7 @@ describe Gitlab::HashedStorage::Migrator, :redis do
end
describe 'rollback_pending?' do
set(:project) { create(:project, :empty_repo) }
let_it_be(:project) { create(:project, :empty_repo) }
it 'returns true when there are RollbackerWorker jobs scheduled' do
Sidekiq::Testing.disable! do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::HookData::IssuableBuilder do
set(:user) { create(:user) }
let_it_be(:user) { create(:user) }
# This shared example requires a `builder` and `user` variable
shared_examples 'issuable hook data' do |kind|
......
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::HookData::IssueBuilder do
set(:label) { create(:label) }
set(:issue) { create(:labeled_issue, labels: [label], project: label.project) }
let_it_be(:label) { create(:label) }
let_it_be(:issue) { create(:labeled_issue, labels: [label], project: label.project) }
let(:builder) { described_class.new(issue) }
describe '#build' do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::HookData::MergeRequestBuilder do
set(:merge_request) { create(:merge_request) }
let_it_be(:merge_request) { create(:merge_request) }
let(:builder) { described_class.new(merge_request) }
describe '#build' do
......
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe Gitlab::Import::MergeRequestHelpers, type: :helper do
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
describe '.create_merge_request_without_hooks' do
let(:iid) { 42 }
......
......@@ -4,7 +4,7 @@ require 'spec_helper'
describe Gitlab::ImportExport::RepoSaver do
describe 'bundle a project Git repo' do
set(:user) { create(:user) }
let_it_be(:user) { create(:user) }
let!(:project) { create(:project, :repository) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
......
......@@ -4,8 +4,8 @@ require 'spec_helper'
describe Gitlab::ImportExport::WikiRepoSaver do
describe 'bundle a wiki Git repo' do
set(:user) { create(:user) }
let!(:project) { create(:project, :wiki_repo) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :wiki_repo) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
let(:wiki_bundler) { described_class.new(project: project, shared: shared) }
......
......@@ -3,13 +3,12 @@
require 'spec_helper'
describe Gitlab::JsonCache do
let_it_be(:broadcast_message) { create(:broadcast_message) }
let(:backend) { double('backend').as_null_object }
let(:namespace) { 'geo' }
let(:key) { 'foo' }
let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}" }
set(:broadcast_message) { create(:broadcast_message) }
subject(:cache) { described_class.new(namespace: namespace, backend: backend) }
describe '#active?' do
......
......@@ -3,9 +3,9 @@
require 'spec_helper'
describe Gitlab::LanguageDetection do
set(:project) { create(:project, :repository) }
set(:ruby) { create(:programming_language, name: 'Ruby') }
set(:haskell) { create(:programming_language, name: 'Haskell') }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:ruby) { create(:programming_language, name: 'Ruby') }
let_it_be(:haskell) { create(:programming_language, name: 'Haskell') }
let(:repository) { project.repository }
let(:detection) do
[{ value: 66.63, label: "Ruby", color: "#701516", highlight: "#701516" },
......
......@@ -5,9 +5,9 @@ require 'spec_helper'
describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
set(:project) { create(:project) }
set(:user) { create(:user) }
set(:environment) { create(:environment, project: project) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::PhabricatorImport::Cache::Map, :clean_gitlab_redis_cache do
set(:project) { create(:project) }
let_it_be(:project) { create(:project) }
let(:redis) { Gitlab::Redis::Cache }
subject(:map) { described_class.new(project) }
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
describe Gitlab::PhabricatorImport::Issues::TaskImporter do
set(:project) { create(:project) }
let_it_be(:project) { create(:project) }
let(:task) do
Gitlab::PhabricatorImport::Representation::Task.new(
{
......
......@@ -73,7 +73,7 @@ describe Gitlab::ProjectTemplate do
end
describe 'validate all templates' do
set(:admin) { create(:admin) }
let_it_be(:admin) { create(:admin) }
described_class.all.each do |template|
it "#{template.name} has a valid archive" do
......
......@@ -4,7 +4,7 @@ require 'spec_helper'
describe ::Gitlab::RepoPath do
describe '.parse' do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
context 'a repository storage path' do
it 'parses a full repository path' do
......
......@@ -7,9 +7,9 @@ describe Gitlab::Sanitizers::Exif do
describe '#batch_clean' do
context 'with image uploads' do
set(:upload1) { create(:upload, :with_file, :issuable_upload) }
set(:upload2) { create(:upload, :with_file, :personal_snippet_upload) }
set(:upload3) { create(:upload, :with_file, created_at: 3.days.ago) }
let_it_be(:upload1) { create(:upload, :with_file, :issuable_upload) }
let_it_be(:upload2) { create(:upload, :with_file, :personal_snippet_upload) }
let_it_be(:upload3) { create(:upload, :with_file, created_at: 3.days.ago) }
it 'processes all uploads if range ID is not set' do
expect(sanitizer).to receive(:clean).exactly(3).times
......
......@@ -4,8 +4,7 @@ require 'spec_helper'
require 'stringio'
describe Gitlab::Shell do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:gitlab_shell) { described_class.new }
let(:popen_vars) { { 'GIT_TERMINAL_PROMPT' => ENV['GIT_TERMINAL_PROMPT'] } }
......
......@@ -22,11 +22,11 @@ describe Gitlab::SlashCommands::IssueMove, service: true do
end
describe '#execute' do
set(:user) { create(:user) }
set(:issue) { create(:issue) }
set(:chat_name) { create(:chat_name, user: user) }
set(:project) { issue.project }
set(:other_project) { create(:project, namespace: project.namespace) }
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue) }
let_it_be(:chat_name) { create(:chat_name, user: user) }
let_it_be(:project) { issue.project }
let_it_be(:other_project) { create(:project, namespace: project.namespace) }
before do
[project, other_project].each { |prj| prj.add_maintainer(user) }
......
......@@ -3,11 +3,11 @@
require 'spec_helper'
describe Gitlab::SlashCommands::Presenters::IssueMove do
set(:admin) { create(:admin) }
set(:project) { create(:project) }
set(:other_project) { create(:project) }
set(:old_issue) { create(:issue, project: project) }
set(:new_issue) { Issues::MoveService.new(project, admin).execute(old_issue, other_project) }
let_it_be(:admin) { create(:admin) }
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:other_project) { create(:project) }
let_it_be(:old_issue, reload: true) { create(:issue, project: project) }
let(:new_issue) { Issues::MoveService.new(project, admin).execute(old_issue, other_project) }
let(:attachment) { subject[:attachments].first }
subject { described_class.new(new_issue).present(old_issue) }
......
......@@ -3,8 +3,7 @@
require 'spec_helper'
describe Gitlab::Template::Finders::RepoTemplateFinder do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:categories) { { 'HTML' => 'html' } }
subject(:finder) { described_class.new(project, 'files/', '.html', categories) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Workhorse do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
def decode_workhorse_header(array)
......
......@@ -17,7 +17,8 @@ JS_CONSOLE_FILTER = Regexp.union([
'"[HMR] Waiting for update signal from WDS..."',
'"[WDS] Hot Module Replacement enabled."',
'"[WDS] Live Reloading enabled."',
"Download the Vue Devtools extension"
'Download the Vue Devtools extension',
'Download the Apollo DevTools'
])
CAPYBARA_WINDOW_SIZE = [1366, 768].freeze
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment