Commit 2abb1b54 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 8320f795
import axios from 'axios'; import axios from 'axios';
import csrf from './csrf'; import csrf from './csrf';
import suppressAjaxErrorsDuringNavigation from './suppress_ajax_errors_during_navigation';
axios.defaults.headers.common[csrf.headerKey] = csrf.token; axios.defaults.headers.common[csrf.headerKey] = csrf.token;
// Used by Rails to check if it is a valid XHR request // Used by Rails to check if it is a valid XHR request
...@@ -25,6 +26,20 @@ axios.interceptors.response.use( ...@@ -25,6 +26,20 @@ axios.interceptors.response.use(
}, },
); );
let isUserNavigating = false;
window.addEventListener('beforeunload', () => {
isUserNavigating = true;
});
// Ignore AJAX errors caused by requests
// being cancelled due to browser navigation
const { gon } = window;
const featureFlagEnabled = gon && gon.features && gon.features.suppressAjaxNavigationErrors;
axios.interceptors.response.use(
response => response,
err => suppressAjaxErrorsDuringNavigation(err, isUserNavigating, featureFlagEnabled),
);
export default axios; export default axios;
/** /**
......
...@@ -15,6 +15,8 @@ export const getPagePath = (index = 0) => { ...@@ -15,6 +15,8 @@ export const getPagePath = (index = 0) => {
return page.split(':')[index]; return page.split(':')[index];
}; };
export const getDashPath = (path = window.location.pathname) => path.split('/-/')[1] || null;
export const isInGroupsPage = () => getPagePath() === 'groups'; export const isInGroupsPage = () => getPagePath() === 'groups';
export const isInProjectPage = () => getPagePath() === 'projects'; export const isInProjectPage = () => getPagePath() === 'projects';
......
/**
* An Axios error interceptor that suppresses AJAX errors caused
* by the request being cancelled when the user navigates to a new page
*/
export default (err, isUserNavigating, featureFlagEnabled) => {
if (featureFlagEnabled && isUserNavigating && err.code === 'ECONNABORTED') {
// If the user is navigating away from the current page,
// prevent .then() and .catch() handlers from being
// called by returning a Promise that never resolves
return new Promise(() => {});
}
// The error is not related to browser navigation,
// so propagate the error
return Promise.reject(err);
};
...@@ -69,6 +69,7 @@ export default { ...@@ -69,6 +69,7 @@ export default {
'commentsDisabled', 'commentsDisabled',
'getNoteableData', 'getNoteableData',
'userCanReply', 'userCanReply',
'discussionTabCounter',
]), ]),
noteableType() { noteableType() {
return this.noteableData.noteableType; return this.noteableData.noteableType;
...@@ -95,13 +96,13 @@ export default { ...@@ -95,13 +96,13 @@ export default {
} }
}, },
allDiscussions() { allDiscussions() {
if (this.discussonsCount) { if (this.discussionsCount && !this.isLoading) {
this.discussonsCount.textContent = this.allDiscussions.length; this.discussionsCount.textContent = this.discussionTabCounter;
} }
}, },
}, },
created() { created() {
this.discussonsCount = document.querySelector('.js-discussions-count'); this.discussionsCount = document.querySelector('.js-discussions-count');
this.setNotesData(this.notesData); this.setNotesData(this.notesData);
this.setNoteableData(this.noteableData); this.setNoteableData(this.noteableData);
......
/* eslint-disable no-new */ /* eslint-disable no-new */
import { getPagePath } from '~/lib/utils/common_utils'; import { getPagePath, getDashPath } from '~/lib/utils/common_utils';
import { ACTIVE_TAB_SHARED, ACTIVE_TAB_ARCHIVED } from '~/groups/constants'; import { ACTIVE_TAB_SHARED, ACTIVE_TAB_ARCHIVED } from '~/groups/constants';
import NewGroupChild from '~/groups/new_group_child'; import NewGroupChild from '~/groups/new_group_child';
import notificationsDropdown from '~/notifications_dropdown'; import notificationsDropdown from '~/notifications_dropdown';
...@@ -12,9 +12,8 @@ import GroupTabs from './group_tabs'; ...@@ -12,9 +12,8 @@ import GroupTabs from './group_tabs';
export default function initGroupDetails(actionName = 'show') { export default function initGroupDetails(actionName = 'show') {
const newGroupChildWrapper = document.querySelector('.js-new-project-subgroup'); const newGroupChildWrapper = document.querySelector('.js-new-project-subgroup');
const loadableActions = [ACTIVE_TAB_SHARED, ACTIVE_TAB_ARCHIVED]; const loadableActions = [ACTIVE_TAB_SHARED, ACTIVE_TAB_ARCHIVED];
const paths = window.location.pathname.split('/'); const dashPath = getDashPath();
const subpath = paths[paths.length - 1]; let action = loadableActions.includes(dashPath) ? dashPath : getPagePath(1);
let action = loadableActions.includes(subpath) ? subpath : getPagePath(1);
if (actionName && action === actionName) { if (actionName && action === actionName) {
action = 'show'; // 'show' resets GroupTabs to default action through base class action = 'show'; // 'show' resets GroupTabs to default action through base class
} }
......
...@@ -11,6 +11,8 @@ module Boards ...@@ -11,6 +11,8 @@ module Boards
def index def index
lists = Boards::Lists::ListService.new(board.parent, current_user).execute(board) lists = Boards::Lists::ListService.new(board.parent, current_user).execute(board)
List.preload_preferences_for_user(lists, current_user)
render json: serialize_as_json(lists) render json: serialize_as_json(lists)
end end
...@@ -51,7 +53,10 @@ module Boards ...@@ -51,7 +53,10 @@ module Boards
service = Boards::Lists::GenerateService.new(board_parent, current_user) service = Boards::Lists::GenerateService.new(board_parent, current_user)
if service.execute(board) if service.execute(board)
lists = board.lists.movable.preload_associations(current_user) lists = board.lists.movable.preload_associations
List.preload_preferences_for_user(lists, current_user)
render json: serialize_as_json(lists) render json: serialize_as_json(lists)
else else
head :unprocessable_entity head :unprocessable_entity
......
...@@ -234,6 +234,7 @@ module Ci ...@@ -234,6 +234,7 @@ module Ci
end end
after_transition pending: :running do |build| after_transition pending: :running do |build|
build.pipeline.persistent_ref.create
build.deployment&.run build.deployment&.run
build.run_after_commit do build.run_after_commit do
......
# frozen_string_literal: true
module Ci
##
# The persistent pipeline ref to ensure runners can safely fetch source code
# even if force-push/source-branch-deletion happens.
class PersistentRef
include ActiveModel::Model
attr_accessor :pipeline
delegate :project, :sha, to: :pipeline
delegate :repository, to: :project
delegate :ref_exists?, :create_ref, :delete_refs, to: :repository
def exist?
return unless enabled?
ref_exists?(path)
rescue
false
end
def create
return unless enabled? && !exist?
create_ref(sha, path)
rescue => e
Gitlab::Sentry
.track_acceptable_exception(e, extra: { pipeline_id: pipeline.id })
end
def delete
return unless enabled?
delete_refs(path)
rescue Gitlab::Git::Repository::NoRepository
# no-op
rescue => e
Gitlab::Sentry
.track_acceptable_exception(e, extra: { pipeline_id: pipeline.id })
end
def path
"refs/#{Repository::REF_PIPELINES}/#{pipeline.id}"
end
private
def enabled?
Feature.enabled?(:depend_on_persistent_pipeline_ref, project)
end
end
end
...@@ -174,6 +174,8 @@ module Ci ...@@ -174,6 +174,8 @@ module Ci
after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline| after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
pipeline.run_after_commit do pipeline.run_after_commit do
pipeline.persistent_ref.delete
pipeline.all_merge_requests.each do |merge_request| pipeline.all_merge_requests.each do |merge_request|
next unless merge_request.auto_merge_enabled? next unless merge_request.auto_merge_enabled?
...@@ -853,6 +855,10 @@ module Ci ...@@ -853,6 +855,10 @@ module Ci
end end
end end
def persistent_ref
@persistent_ref ||= PersistentRef.new(pipeline: self)
end
private private
def ci_yaml_from_repo def ci_yaml_from_repo
......
...@@ -21,20 +21,10 @@ class List < ApplicationRecord ...@@ -21,20 +21,10 @@ class List < ApplicationRecord
scope :destroyable, -> { where(list_type: list_types.slice(*destroyable_types).values) } scope :destroyable, -> { where(list_type: list_types.slice(*destroyable_types).values) }
scope :movable, -> { where(list_type: list_types.slice(*movable_types).values) } scope :movable, -> { where(list_type: list_types.slice(*movable_types).values) }
scope :preload_associations, -> (user) do scope :preload_associations, -> { preload(:board, label: :priorities) }
preload(:board, label: :priorities)
end
scope :ordered, -> { order(:list_type, :position) } scope :ordered, -> { order(:list_type, :position) }
# Loads list with preferences for given user
# if preferences exists for user or not
scope :with_preferences_for, -> (user) do
return unless user
includes(:list_user_preferences).where(list_user_preferences: { user_id: [user.id, nil] })
end
alias_method :preferences, :list_user_preferences alias_method :preferences, :list_user_preferences
class << self class << self
...@@ -45,25 +35,25 @@ class List < ApplicationRecord ...@@ -45,25 +35,25 @@ class List < ApplicationRecord
def movable_types def movable_types
[:label] [:label]
end end
def preload_preferences_for_user(lists, user)
return unless user
lists.each { |list| list.preferences_for(user) }
end
end end
def preferences_for(user) def preferences_for(user)
return preferences.build unless user return preferences.build unless user
if preferences.loaded? BatchLoader.for(list_id: id, user_id: user.id).batch(default_value: preferences.build(user: user)) do |items, loader|
preloaded_preferences_for(user) list_ids = items.map { |i| i[:list_id] }
else user_ids = items.map { |i| i[:user_id] }
preferences.find_or_initialize_by(user: user)
end
end
def preloaded_preferences_for(user) ListUserPreference.where(list_id: list_ids, user_id: user_ids).find_each do |preference|
user_preferences = loader.call({ list_id: preference.list_id, user_id: preference.user_id }, preference)
preferences.find do |preference|
preference.user_id == user.id
end end
end
user_preferences || preferences.build(user: user)
end end
def update_preferences_for(user, preferences = {}) def update_preferences_for(user, preferences = {})
......
...@@ -6,6 +6,7 @@ class Repository ...@@ -6,6 +6,7 @@ class Repository
REF_MERGE_REQUEST = 'merge-requests' REF_MERGE_REQUEST = 'merge-requests'
REF_KEEP_AROUND = 'keep-around' REF_KEEP_AROUND = 'keep-around'
REF_ENVIRONMENTS = 'environments' REF_ENVIRONMENTS = 'environments'
REF_PIPELINES = 'pipelines'
ARCHIVE_CACHE_TIME = 60 # Cache archives referred to by a (mutable) ref for 1 minute ARCHIVE_CACHE_TIME = 60 # Cache archives referred to by a (mutable) ref for 1 minute
ARCHIVE_CACHE_TIME_IMMUTABLE = 3600 # Cache archives referred to by an immutable reference for 1 hour ARCHIVE_CACHE_TIME_IMMUTABLE = 3600 # Cache archives referred to by an immutable reference for 1 hour
...@@ -16,7 +17,7 @@ class Repository ...@@ -16,7 +17,7 @@ class Repository
replace replace
#{REF_ENVIRONMENTS} #{REF_ENVIRONMENTS}
#{REF_KEEP_AROUND} #{REF_KEEP_AROUND}
#{REF_ENVIRONMENTS} #{REF_PIPELINES}
].freeze ].freeze
include Gitlab::RepositoryCacheAdapter include Gitlab::RepositoryCacheAdapter
......
...@@ -34,7 +34,8 @@ module Ci ...@@ -34,7 +34,8 @@ module Ci
def refspecs def refspecs
specs = [] specs = []
specs << refspec_for_merge_request_ref if merge_request_ref? specs << refspec_for_pipeline_ref if merge_request_ref?
specs << refspec_for_persistent_ref if persistent_ref_exist?
if git_depth > 0 if git_depth > 0
specs << refspec_for_branch(ref) if branch? || legacy_detached_merge_request_pipeline? specs << refspec_for_branch(ref) if branch? || legacy_detached_merge_request_pipeline?
...@@ -86,10 +87,22 @@ module Ci ...@@ -86,10 +87,22 @@ module Ci
"+#{Gitlab::Git::TAG_REF_PREFIX}#{ref}:#{RUNNER_REMOTE_TAG_PREFIX}#{ref}" "+#{Gitlab::Git::TAG_REF_PREFIX}#{ref}:#{RUNNER_REMOTE_TAG_PREFIX}#{ref}"
end end
def refspec_for_merge_request_ref def refspec_for_pipeline_ref
"+#{ref}:#{ref}" "+#{ref}:#{ref}"
end end
def refspec_for_persistent_ref
"+#{persistent_ref_path}:#{persistent_ref_path}"
end
def persistent_ref_exist?
pipeline.persistent_ref.exist?
end
def persistent_ref_path
pipeline.persistent_ref.path
end
def git_depth_variable def git_depth_variable
strong_memoize(:git_depth_variable) do strong_memoize(:git_depth_variable) do
variables&.find { |variable| variable[:key] == 'GIT_DEPTH' } variables&.find { |variable| variable[:key] == 'GIT_DEPTH' }
......
...@@ -6,7 +6,7 @@ module Boards ...@@ -6,7 +6,7 @@ module Boards
def execute(board) def execute(board)
board.lists.create(list_type: :backlog) unless board.lists.backlog.exists? board.lists.create(list_type: :backlog) unless board.lists.backlog.exists?
board.lists.preload_associations(current_user) board.lists.preload_associations
end end
end end
end end
......
---
title: Naming a project "shared" will no longer automatically open the "Shared Projects" tab.
merge_request: 16847
author: Jesse Hall @jessehall3
type: fixed
---
title: Create a persistent ref per pipeline for keeping pipelines run from force-push
and merged results
merge_request: 17043
author:
type: fixed
---
title: Add trigram index on snippet content
merge_request: 17806
author:
type: performance
---
title: Increase the limit of includes in CI file to 100
merge_request: 17807
author:
type: fixed
---
title: Suppress error messages shown when navigating to a new page
merge_request: 17706
author:
type: fixed
...@@ -209,9 +209,7 @@ module.exports = { ...@@ -209,9 +209,7 @@ module.exports = {
{ {
loader: 'css-loader', loader: 'css-loader',
options: { options: {
modules: { name: '[name].[hash:8].[ext]',
localIdentName: '[name]__[local].[hash:8].[ext]',
},
}, },
}, },
], ],
......
# frozen_string_literal: true
class AddIndexOnSnippetContent < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_snippets_on_content_trigram'
disable_ddl_transaction!
def up
add_concurrent_index :snippets, :content, name: INDEX_NAME, using: :gin, opclass: { content: :gin_trgm_ops }
end
def down
remove_concurrent_index_by_name(:snippets, INDEX_NAME)
end
end
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2019_09_26_041216) do ActiveRecord::Schema.define(version: 2019_09_27_074328) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm" enable_extension "pg_trgm"
...@@ -3319,6 +3319,7 @@ ActiveRecord::Schema.define(version: 2019_09_26_041216) do ...@@ -3319,6 +3319,7 @@ ActiveRecord::Schema.define(version: 2019_09_26_041216) do
t.text "description" t.text "description"
t.text "description_html" t.text "description_html"
t.index ["author_id"], name: "index_snippets_on_author_id" t.index ["author_id"], name: "index_snippets_on_author_id"
t.index ["content"], name: "index_snippets_on_content_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["file_name"], name: "index_snippets_on_file_name_trigram", opclass: :gin_trgm_ops, using: :gin t.index ["file_name"], name: "index_snippets_on_file_name_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["project_id"], name: "index_snippets_on_project_id" t.index ["project_id"], name: "index_snippets_on_project_id"
t.index ["title"], name: "index_snippets_on_title_trigram", opclass: :gin_trgm_ops, using: :gin t.index ["title"], name: "index_snippets_on_title_trigram", opclass: :gin_trgm_ops, using: :gin
......
...@@ -93,6 +93,17 @@ To check these feature flag values, please ask administrator to execute the foll ...@@ -93,6 +93,17 @@ To check these feature flag values, please ask administrator to execute the foll
> Feature.enable(:ci_use_merge_request_ref) # Enable the feature flag. > Feature.enable(:ci_use_merge_request_ref) # Enable the feature flag.
``` ```
### Intermittently pipelines fail by `fatal: reference is not a tree:` error
Since pipelines for merged results are a run on a merge ref of a merge request
(`refs/merge-requests/<iid>/merge`), the git-reference could be overwritten at an
unexpected timing, for example, when a source or target branch is advanced.
In this case, the pipeline fails because of `fatal: reference is not a tree:` error,
which indicates that the checkout-SHA is not found in the merge ref.
This behavior was improved at GitLab 12.4 by introducing [Persistent pipeline refs](../../pipelines.md#persistent-pipeline-refs).
You should be able to create pipelines at any timings without concerning the error.
## Using Merge Trains **(PREMIUM)** ## Using Merge Trains **(PREMIUM)**
By enabling [Pipelines for merged results](#pipelines-for-merged-results-premium), By enabling [Pipelines for merged results](#pipelines-for-merged-results-premium),
......
...@@ -405,3 +405,44 @@ branches, avoiding untrusted code to be executed on the protected runner and ...@@ -405,3 +405,44 @@ branches, avoiding untrusted code to be executed on the protected runner and
preserving deployment keys and other credentials from being unintentionally preserving deployment keys and other credentials from being unintentionally
accessed. In order to ensure that jobs intended to be executed on protected accessed. In order to ensure that jobs intended to be executed on protected
runners will not use regular runners, they must be tagged accordingly. runners will not use regular runners, they must be tagged accordingly.
## Persistent pipeline refs
> [Introduced](https://gitlab.com/gitlab-org/gitlab/merge_requests/17043) in GitLab 12.4.
Previously, you'd have encountered unexpected pipeline failures when you force-pushed
a branch to its remote repository. To illustrate the problem, suppose you've had the current workflow:
1. A user creates a feature branch named `example` and pushes it to a remote repository.
1. A new pipeline starts running on the `example` branch.
1. A user rebases the `example` branch on the latest `master` branch and force-pushes it to its remote repository.
1. A new pipeline starts running on the `example` branch again, however,
the previous pipeline (2) fails because of `fatal: reference is not a tree:` error.
This is because the previous pipeline cannot find a checkout-SHA (which associated with the pipeline record)
from the `example` branch that the commit history has already been overwritten by the force-push.
Similarly, [Pipelines for merged results](merge_request_pipelines/pipelines_for_merged_results/index.md)
might have failed intermittently due to [the same reason](merge_request_pipelines/pipelines_for_merged_results/index.md#intermittently-pipelines-fail-by-fatal-reference-is-not-a-tree-error).
As of GitLab 12.4, we've improved this behavior by persisting pipeline refs exclusively.
To illustrate its life cycle:
1. A pipeline is created on a feature branch named `example`.
1. A persistent pipeline ref is created at `refs/pipelines/<pipeline-id>`,
which retains the checkout-SHA of the associated pipeline record.
This persistent ref stays intact during the pipeline execution,
even if the commit history of the `example` branch has been overwritten by force-push.
1. GitLab Runner fetches the persistent pipeline ref and gets source code from the checkout-SHA.
1. When the pipeline finished, its persistent ref is cleaned up in a background process.
NOTE: **NOTE**: At this moment, this feature is off dy default and can be manually enabled
by enabling `depend_on_persistent_pipeline_ref` feature flag, however, we'd remove this
feature flag and make it enabled by deafult by the day we release 12.4 _if we don't find any issues_.
If you'd be interested in manually turning on this behavior, please ask the administrator
to execute the following commands in rails console.
```shell
> sudo gitlab-rails console # Login to Rails console of GitLab instance.
> project = Project.find_by_full_path('namespace/project-name') # Get the project instance.
> Feature.enable(:depend_on_persistent_pipeline_ref, project) # Enable the feature flag.
```
...@@ -1246,8 +1246,8 @@ Read how caching works and find out some good practices in the ...@@ -1246,8 +1246,8 @@ Read how caching works and find out some good practices in the
[caching dependencies documentation](../caching/index.md). [caching dependencies documentation](../caching/index.md).
`cache` is used to specify a list of files and directories which should be `cache` is used to specify a list of files and directories which should be
cached between jobs. You can only use paths that are within the project cached between jobs. You can only use paths that are within the local working
workspace. copy.
If `cache` is defined outside the scope of jobs, it means it is set If `cache` is defined outside the scope of jobs, it means it is set
globally and all jobs will use that definition. globally and all jobs will use that definition.
...@@ -1417,7 +1417,7 @@ be available for download in the GitLab UI. ...@@ -1417,7 +1417,7 @@ be available for download in the GitLab UI.
#### `artifacts:paths` #### `artifacts:paths`
You can only use paths that are within the project workspace. You can only use paths that are within the local working copy.
Wildcards can be used that follow the [glob](https://en.wikipedia.org/wiki/Glob_(programming)) patterns and [filepath.Match](https://golang.org/pkg/path/filepath/#Match). Wildcards can be used that follow the [glob](https://en.wikipedia.org/wiki/Glob_(programming)) patterns and [filepath.Match](https://golang.org/pkg/path/filepath/#Match).
To pass artifacts between different jobs, see [dependencies](#dependencies). To pass artifacts between different jobs, see [dependencies](#dependencies).
...@@ -2296,7 +2296,7 @@ or public project, or template is allowed. ...@@ -2296,7 +2296,7 @@ or public project, or template is allowed.
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/56836) in GitLab 11.9. > [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/56836) in GitLab 11.9.
Nested includes allow you to compose a set of includes. Nested includes allow you to compose a set of includes.
A total of 50 includes is allowed. A total of 100 includes is allowed.
Duplicate includes are considered a configuration error. Duplicate includes are considered a configuration error.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/28212) in GitLab 12.4. > [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/28212) in GitLab 12.4.
...@@ -2738,14 +2738,14 @@ unspecified, the default from project settings will be used. ...@@ -2738,14 +2738,14 @@ unspecified, the default from project settings will be used.
There are three possible values: `clone`, `fetch`, and `none`. There are three possible values: `clone`, `fetch`, and `none`.
`clone` is the slowest option. It clones the repository from scratch for every `clone` is the slowest option. It clones the repository from scratch for every
job, ensuring that the project workspace is always pristine. job, ensuring that the local working copy is always pristine.
```yaml ```yaml
variables: variables:
GIT_STRATEGY: clone GIT_STRATEGY: clone
``` ```
`fetch` is faster as it re-uses the project workspace (falling back to `clone` `fetch` is faster as it re-uses the local working copy (falling back to `clone`
if it doesn't exist). `git clean` is used to undo any changes made by the last if it doesn't exist). `git clean` is used to undo any changes made by the last
job, and `git fetch` is used to retrieve commits made since the last job ran. job, and `git fetch` is used to retrieve commits made since the last job ran.
...@@ -2754,11 +2754,11 @@ variables: ...@@ -2754,11 +2754,11 @@ variables:
GIT_STRATEGY: fetch GIT_STRATEGY: fetch
``` ```
`none` also re-uses the project workspace, but skips all Git operations `none` also re-uses the local working copy, but skips all Git operations
(including GitLab Runner's pre-clone script, if present). It is mostly useful (including GitLab Runner's pre-clone script, if present). It is mostly useful
for jobs that operate exclusively on artifacts (e.g., `deploy`). Git repository for jobs that operate exclusively on artifacts (e.g., `deploy`). Git repository
data may be present, but it is certain to be out of date, so you should only data may be present, but it is certain to be out of date, so you should only
rely on files brought into the project workspace from cache or artifacts. rely on files brought into the local working copy from cache or artifacts.
```yaml ```yaml
variables: variables:
......
...@@ -62,8 +62,7 @@ Please help other GitLab users when you can. ...@@ -62,8 +62,7 @@ Please help other GitLab users when you can.
The methods people will use to seek help can be found on the [getting help page](https://about.gitlab.com/get-help/). The methods people will use to seek help can be found on the [getting help page](https://about.gitlab.com/get-help/).
Sign up for the mailing list, answer GitLab questions on StackOverflow or Sign up for the mailing list, answer GitLab questions on StackOverflow or
respond in the IRC channel. You can also sign up on [CodeTriage](https://www.codetriage.com/gitlabhq/gitlabhq) to help with respond in the IRC channel.
the remaining issues on the GitHub issue tracker.
## I want to contribute ## I want to contribute
......
...@@ -84,7 +84,7 @@ For example, only two pipelines will be created per day if: ...@@ -84,7 +84,7 @@ For example, only two pipelines will be created per day if:
To change the Sidekiq worker's frequency: To change the Sidekiq worker's frequency:
1. Edit the `gitlab_rails['pipeline_schedule_worker_cron']` value in your instance's `gitlab.rb` file. 1. Edit the `gitlab_rails['pipeline_schedule_worker_cron']` value in your instance's `gitlab.rb` file.
1. [Restart GitLab](../../../administration/restart_gitlab.md). 1. [Reconfigure GitLab](../../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect.
For GitLab.com, refer to the [dedicated settings page](../../gitlab_com/index.md#cron-jobs). For GitLab.com, refer to the [dedicated settings page](../../gitlab_com/index.md#cron-jobs).
......
...@@ -21,8 +21,8 @@ from GitLab in a job. ...@@ -21,8 +21,8 @@ from GitLab in a job.
There are two options. Using: There are two options. Using:
- `git clone`, which is slower since it clones the repository from scratch - `git clone`, which is slower since it clones the repository from scratch
for every job, ensuring that the project workspace is always pristine. for every job, ensuring that the local working copy is always pristine.
- `git fetch`, which is faster as it re-uses the project workspace (falling - `git fetch`, which is faster as it re-uses the local working copy (falling
back to clone if it doesn't exist). back to clone if it doesn't exist).
The default Git strategy can be overridden by the [GIT_STRATEGY variable](../../../ci/yaml/README.md#git-strategy) The default Git strategy can be overridden by the [GIT_STRATEGY variable](../../../ci/yaml/README.md#git-strategy)
......
...@@ -7,7 +7,7 @@ module Gitlab ...@@ -7,7 +7,7 @@ module Gitlab
class Mapper class Mapper
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
MAX_INCLUDES = 50 MAX_INCLUDES = 100
FILE_CLASSES = [ FILE_CLASSES = [
External::File::Remote, External::File::Remote,
......
...@@ -38,6 +38,10 @@ module Gitlab ...@@ -38,6 +38,10 @@ module Gitlab
gon.current_user_fullname = current_user.name gon.current_user_fullname = current_user.name
gon.current_user_avatar_url = current_user.avatar_url gon.current_user_avatar_url = current_user.avatar_url
end end
# Initialize gon.features with any flags that should be
# made globally available to the frontend
push_frontend_feature_flag(:suppress_ajax_navigation_errors, default_enabled: true)
end end
# Exposes the state of a feature flag to the frontend code. # Exposes the state of a feature flag to the frontend code.
......
...@@ -61,7 +61,7 @@ ...@@ -61,7 +61,7 @@
"copy-webpack-plugin": "^5.0.4", "copy-webpack-plugin": "^5.0.4",
"core-js": "^3.2.1", "core-js": "^3.2.1",
"cropper": "^2.3.0", "cropper": "^2.3.0",
"css-loader": "^3.2.0", "css-loader": "^1.0.0",
"d3": "^4.13.0", "d3": "^4.13.0",
"d3-array": "^1.2.1", "d3-array": "^1.2.1",
"d3-axis": "^1.0.8", "d3-axis": "^1.0.8",
......
...@@ -14,6 +14,10 @@ describe Boards::ListsController do ...@@ -14,6 +14,10 @@ describe Boards::ListsController do
end end
describe 'GET index' do describe 'GET index' do
before do
create(:list, board: board)
end
it 'returns a successful 200 response' do it 'returns a successful 200 response' do
read_board_list user: user, board: board read_board_list user: user, board: board
...@@ -22,27 +26,22 @@ describe Boards::ListsController do ...@@ -22,27 +26,22 @@ describe Boards::ListsController do
end end
it 'returns a list of board lists' do it 'returns a list of board lists' do
create(:list, board: board)
read_board_list user: user, board: board read_board_list user: user, board: board
expect(response).to match_response_schema('lists') expect(response).to match_response_schema('lists')
expect(json_response.length).to eq 3 expect(json_response.length).to eq 3
end end
it 'avoids n+1 queries when serializing lists' do context 'when another user has list preferences' do
list_1 = create(:list, board: board) before do
list_1.update_preferences_for(user, { collapsed: true }) board.lists.first.update_preferences_for(guest, collapsed: true)
end
control_count = ActiveRecord::QueryRecorder.new { read_board_list user: user, board: board }.count
list_2 = create(:list, board: board)
list_2.update_preferences_for(user, { collapsed: true })
list_3 = create(:list, board: board) it 'returns the complete list of board lists' do
list_3.update_preferences_for(user, { collapsed: true }) read_board_list user: user, board: board
expect { read_board_list user: user, board: board }.not_to exceed_query_limit(control_count) expect(json_response.length).to eq 3
end
end end
context 'with unauthorized user' do context 'with unauthorized user' do
......
# frozen_string_literal: true
require 'spec_helper'
describe 'Issue page tabs', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, author: user, assignees: [user], project: project) }
describe 'discussions tab counter' do
before do
stub_licensed_features(design_management: true)
stub_feature_flags(design_management_flag: true)
allow(Ability).to receive(:allowed?) { true }
end
subject do
sign_in(user)
visit project_issue_path(project, issue)
wait_for_requests
find('#discussion')
end
context 'new issue' do
it 'displays count of 0' do
is_expected.to have_content('Discussion 0')
end
end
context 'issue with 2 system notes and 1 discussion' do
let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: project, note: "This is good") }
before do
create(:system_note, noteable: issue, project: project, author: user, note: 'description updated')
create(:system_note, noteable: issue, project: project, author: user, note: 'description updated')
end
it 'displays count of 1' do
is_expected.to have_content('Discussion 1')
end
context 'with 1 reply' do
before do
create(:note, noteable: issue, in_reply_to: discussion, discussion_id: discussion.discussion_id, note: 'I also think this is good')
end
it 'displays count of 2' do
is_expected.to have_content('Discussion 2')
end
end
end
end
end
import suppressAjaxErrorsDuringNavigation from '~/lib/utils/suppress_ajax_errors_during_navigation';
import waitForPromises from 'helpers/wait_for_promises';
describe('suppressAjaxErrorsDuringNavigation', () => {
const OTHER_ERR_CODE = 'foo';
const NAV_ERR_CODE = 'ECONNABORTED';
it.each`
isFeatureFlagEnabled | isUserNavigating | code
${false} | ${false} | ${OTHER_ERR_CODE}
${false} | ${false} | ${NAV_ERR_CODE}
${false} | ${true} | ${OTHER_ERR_CODE}
${false} | ${true} | ${NAV_ERR_CODE}
${true} | ${false} | ${OTHER_ERR_CODE}
${true} | ${false} | ${NAV_ERR_CODE}
${true} | ${true} | ${OTHER_ERR_CODE}
`('should return a rejected Promise', ({ isFeatureFlagEnabled, isUserNavigating, code }) => {
const err = { code };
const actual = suppressAjaxErrorsDuringNavigation(err, isUserNavigating, isFeatureFlagEnabled);
return expect(actual).rejects.toBe(err);
});
it('should return a Promise that never resolves', () => {
const err = { code: NAV_ERR_CODE };
const actual = suppressAjaxErrorsDuringNavigation(err, true, true);
const thenCallback = jest.fn();
const catchCallback = jest.fn();
actual.then(thenCallback).catch(catchCallback);
return waitForPromises().then(() => {
expect(thenCallback).not.toHaveBeenCalled();
expect(catchCallback).not.toHaveBeenCalled();
});
});
});
...@@ -236,8 +236,15 @@ describe('Frequent Items App Component', () => { ...@@ -236,8 +236,15 @@ describe('Frequent Items App Component', () => {
.then(() => { .then(() => {
expect(vm.$el.querySelector('.loading-animation')).toBeDefined(); expect(vm.$el.querySelector('.loading-animation')).toBeDefined();
}) })
// This test waits for multiple ticks in order to allow the responses to
// propagate through each interceptor installed on the Axios instance.
// This shouldn't be necessary; this test should be refactored to avoid this.
// https://gitlab.com/gitlab-org/gitlab/issues/32479
.then(vm.$nextTick)
.then(vm.$nextTick) .then(vm.$nextTick)
.then(vm.$nextTick) .then(vm.$nextTick)
.then(() => { .then(() => {
expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe( expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe(
mockSearchedProjects.length, mockSearchedProjects.length,
......
...@@ -943,4 +943,14 @@ describe('common_utils', () => { ...@@ -943,4 +943,14 @@ describe('common_utils', () => {
expect(commonUtils.isScopedLabel({ title: 'foobar' })).toBe(false); expect(commonUtils.isScopedLabel({ title: 'foobar' })).toBe(false);
}); });
}); });
describe('getDashPath', () => {
it('returns the path following /-/', () => {
expect(commonUtils.getDashPath('/some/-/url-with-dashes-/')).toEqual('url-with-dashes-/');
});
it('returns null when no path follows /-/', () => {
expect(commonUtils.getDashPath('/some/url')).toEqual(null);
});
});
}); });
...@@ -3079,6 +3079,12 @@ describe Ci::Build do ...@@ -3079,6 +3079,12 @@ describe Ci::Build do
rescue StateMachines::InvalidTransition rescue StateMachines::InvalidTransition
end end
it 'ensures pipeline ref existence' do
expect(job.pipeline.persistent_ref).to receive(:create).once
run_job_without_exception
end
shared_examples 'saves data on transition' do shared_examples 'saves data on transition' do
it 'saves timeout' do it 'saves timeout' do
expect { job.run! }.to change { job.reload.ensure_metadata.timeout }.from(nil).to(expected_timeout) expect { job.run! }.to change { job.reload.ensure_metadata.timeout }.from(nil).to(expected_timeout)
......
# frozen_string_literal: true
require 'spec_helper'
describe Ci::PersistentRef do
it 'cleans up persistent refs after pipeline finished' do
pipeline = create(:ci_pipeline, :running)
expect(pipeline.persistent_ref).to receive(:delete).once
pipeline.succeed!
end
context '#exist?' do
subject { pipeline.persistent_ref.exist? }
let(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
let(:project) { create(:project, :repository) }
let(:sha) { project.repository.commit.sha }
context 'when a persistent ref does not exist' do
it { is_expected.to eq(false) }
end
context 'when a persistent ref exists' do
before do
pipeline.persistent_ref.create
end
it { is_expected.to eq(true) }
end
end
context '#create' do
subject { pipeline.persistent_ref.create }
let(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
let(:project) { create(:project, :repository) }
let(:sha) { project.repository.commit.sha }
context 'when a persistent ref does not exist' do
it 'creates a persistent ref' do
subject
expect(pipeline.persistent_ref).to be_exist
end
context 'when depend_on_persistent_pipeline_ref feature flag is disabled' do
before do
stub_feature_flags(depend_on_persistent_pipeline_ref: false)
end
it 'does not create a persistent ref' do
expect(project.repository).not_to receive(:create_ref)
subject
end
end
context 'when sha does not exist in the repository' do
let(:sha) { 'not-exist' }
it 'fails to create a persistent ref' do
subject
expect(pipeline.persistent_ref).not_to be_exist
end
end
end
context 'when a persistent ref already exists' do
before do
pipeline.persistent_ref.create
end
it 'does not create a persistent ref' do
expect(project.repository).not_to receive(:create_ref)
subject
end
end
end
context '#delete' do
subject { pipeline.persistent_ref.delete }
let(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
let(:project) { create(:project, :repository) }
let(:sha) { project.repository.commit.sha }
context 'when a persistent ref exists' do
before do
pipeline.persistent_ref.create
end
it 'deletes the ref' do
expect { subject }.to change { pipeline.persistent_ref.exist? }
.from(true).to(false)
end
end
context 'when a persistent ref does not exist' do
it 'does not raise an error' do
expect { subject }.not_to raise_error
end
end
end
end
...@@ -1318,6 +1318,16 @@ describe Ci::Pipeline, :mailer do ...@@ -1318,6 +1318,16 @@ describe Ci::Pipeline, :mailer do
let(:build_b) { create_build('build2', queued_at: 0) } let(:build_b) { create_build('build2', queued_at: 0) }
let(:build_c) { create_build('build3', queued_at: 0) } let(:build_c) { create_build('build3', queued_at: 0) }
%w[succeed! drop! cancel! skip!].each do |action|
context "when the pipeline recieved #{action} event" do
it 'deletes a persistent ref' do
expect(pipeline.persistent_ref).to receive(:delete).once
pipeline.public_send(action)
end
end
end
describe '#duration' do describe '#duration' do
context 'when multiple builds are finished' do context 'when multiple builds are finished' do
before do before do
......
...@@ -136,18 +136,6 @@ describe List do ...@@ -136,18 +136,6 @@ describe List do
expect(preferences).to be_persisted expect(preferences).to be_persisted
expect(preferences.collapsed).to eq(true) expect(preferences.collapsed).to eq(true)
end end
context 'when preferences are already loaded for user' do
it 'gets preloaded user preferences' do
fetched_list = described_class.where(id: list.id).with_preferences_for(user).first
expect(fetched_list).to receive(:preloaded_preferences_for).with(user).and_call_original
preferences = fetched_list.preferences_for(user)
expect(preferences.collapsed).to eq(true)
end
end
end end
context 'when preferences for user does not exist' do context 'when preferences for user does not exist' do
......
...@@ -207,5 +207,22 @@ describe Ci::BuildRunnerPresenter do ...@@ -207,5 +207,22 @@ describe Ci::BuildRunnerPresenter do
end end
end end
end end
context 'when persistent pipeline ref exists' do
let(:project) { create(:project, :repository) }
let(:sha) { project.repository.commit.sha }
let(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
let(:build) { create(:ci_build, pipeline: pipeline) }
before do
pipeline.persistent_ref.create
end
it 'exposes the persistent pipeline ref' do
is_expected
.to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
"+refs/heads/#{build.ref}:refs/remotes/origin/#{build.ref}")
end
end
end end
end end
# frozen_string_literal: true
require 'spec_helper'
describe Boards::ListsController do
describe '#index' do
let(:board) { create(:board) }
let(:user) { board.project.owner }
it 'does not have N+1 queries' do
login_as(user)
# First request has more queries because we create the default `backlog` list
get board_lists_path(board)
create(:list, board: board)
control_count = ActiveRecord::QueryRecorder.new { get board_lists_path(board) }.count
create_list(:list, 5, board: board)
expect { get board_lists_path(board) }.not_to exceed_query_limit(control_count)
end
end
end
...@@ -422,6 +422,18 @@ describe Ci::ProcessPipelineService, '#execute' do ...@@ -422,6 +422,18 @@ describe Ci::ProcessPipelineService, '#execute' do
end end
end end
context 'when an exception is raised during a persistent ref creation' do
before do
successful_build('test', stage_idx: 0)
allow_any_instance_of(Ci::PersistentRef).to receive(:delete_refs) { raise ArgumentError }
end
it 'process the pipeline' do
expect { process_pipeline }.not_to raise_error
end
end
context 'when there are manual action in earlier stages' do context 'when there are manual action in earlier stages' do
context 'when first stage has only optional manual actions' do context 'when first stage has only optional manual actions' do
before do before do
...@@ -907,6 +919,10 @@ describe Ci::ProcessPipelineService, '#execute' do ...@@ -907,6 +919,10 @@ describe Ci::ProcessPipelineService, '#execute' do
create(:ci_build, :created, pipeline: pipeline, name: name, **opts) create(:ci_build, :created, pipeline: pipeline, name: name, **opts)
end end
def successful_build(name, **opts)
create(:ci_build, :success, pipeline: pipeline, name: name, **opts)
end
def delayed_options def delayed_options
{ when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } } { when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } }
end end
......
...@@ -501,6 +501,22 @@ module Ci ...@@ -501,6 +501,22 @@ module Ci
expect(pending_job).to be_archived_failure expect(pending_job).to be_archived_failure
end end
end end
context 'when an exception is raised during a persistent ref creation' do
before do
allow_any_instance_of(Ci::PersistentRef).to receive(:exist?) { false }
allow_any_instance_of(Ci::PersistentRef).to receive(:create_ref) { raise ArgumentError }
end
subject { execute(specific_runner, {}) }
it 'picks the build' do
expect(subject).to eq(pending_job)
pending_job.reload
expect(pending_job).to be_running
end
end
end end
describe '#register_success' do describe '#register_success' do
......
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment