Commit 6a7cc8c1 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 87231973
......@@ -202,7 +202,7 @@
- name: redis:alpine
.use-pg10:
image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.3-golang-1.11-git-2.22-chrome-73.0-node-12.x-yarn-1.16-postgresql-10-graphicsmagick-1.3.33"
image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.3-golang-1.12-git-2.22-chrome-73.0-node-12.x-yarn-1.16-postgresql-10-graphicsmagick-1.3.33"
services:
- name: postgres:10.9
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
......@@ -216,7 +216,7 @@
- name: docker.elastic.co/elasticsearch/elasticsearch:5.6.12
.use-pg10-ee:
image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.3-golang-1.11-git-2.22-chrome-73.0-node-12.x-yarn-1.16-postgresql-10-graphicsmagick-1.3.33"
image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.3-golang-1.12-git-2.22-chrome-73.0-node-12.x-yarn-1.16-postgresql-10-graphicsmagick-1.3.33"
services:
- name: postgres:10.9
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
......
/* eslint-disable class-methods-use-this */
/**
* This file is intended to be deleted.
* The existing functions will removed one by one in favor of using the board store directly.
* see https://gitlab.com/gitlab-org/gitlab-foss/issues/61621
*/
import boardsStore from '~/boards/stores/boards_store';
export default class BoardService {
generateBoardsPath(id) {
return boardsStore.generateBoardsPath(id);
}
generateIssuesPath(id) {
return boardsStore.generateIssuesPath(id);
}
static generateIssuePath(boardId, id) {
return boardsStore.generateIssuePath(boardId, id);
}
all() {
return boardsStore.all();
}
generateDefaultLists() {
return boardsStore.generateDefaultLists();
}
createList(entityId, entityType) {
return boardsStore.createList(entityId, entityType);
}
updateList(id, position, collapsed) {
return boardsStore.updateList(id, position, collapsed);
}
destroyList(id) {
return boardsStore.destroyList(id);
}
getIssuesForList(id, filter = {}) {
return boardsStore.getIssuesForList(id, filter);
}
moveIssue(id, fromListId = null, toListId = null, moveBeforeId = null, moveAfterId = null) {
return boardsStore.moveIssue(id, fromListId, toListId, moveBeforeId, moveAfterId);
}
moveMultipleIssues({
ids,
fromListId = null,
toListId = null,
moveBeforeId = null,
moveAfterId = null,
}) {
return boardsStore.moveMultipleIssues({ ids, fromListId, toListId, moveBeforeId, moveAfterId });
}
newIssue(id, issue) {
return boardsStore.newIssue(id, issue);
}
getBacklog(data) {
return boardsStore.getBacklog(data);
}
bulkUpdate(issueIds, extraData = {}) {
return boardsStore.bulkUpdate(issueIds, extraData);
}
static getIssueInfo(endpoint) {
return boardsStore.getIssueInfo(endpoint);
}
static toggleIssueSubscription(endpoint) {
return boardsStore.toggleIssueSubscription(endpoint);
}
allBoards() {
return boardsStore.allBoards();
}
recentBoards() {
return boardsStore.recentBoards();
}
createBoard(board) {
return boardsStore.createBoard(board);
}
deleteBoard({ id }) {
return boardsStore.deleteBoard({ id });
}
}
window.BoardService = BoardService;
<script>
import { __ } from '~/locale';
import { mapGetters, mapActions } from 'vuex';
import { __ } from '~/locale';
import { getLocationHash, doesHashExistInUrl } from '../../lib/utils/url_utility';
import Flash from '../../flash';
import * as constants from '../constants';
......@@ -71,6 +71,9 @@ export default {
'userCanReply',
'discussionTabCounter',
]),
discussionTabCounterText() {
return this.isLoading ? '' : this.discussionTabCounter;
},
noteableType() {
return this.noteableData.noteableType;
},
......@@ -95,9 +98,9 @@ export default {
this.fetchNotes();
}
},
allDiscussions() {
if (this.discussionsCount && !this.isLoading) {
this.discussionsCount.textContent = this.discussionTabCounter;
discussionTabCounterText(val) {
if (this.discussionsCount) {
this.discussionsCount.textContent = val;
}
},
},
......
<script>
import { GlSkeletonLoader } from '@gitlab/ui';
export default {
components: {
GlSkeletonLoader,
},
};
</script>
<template>
<div class="prepend-top-default">
<div class="mr-widget-heading p-3">
<gl-skeleton-loader :width="577" :height="12">
<rect width="86" height="12" rx="2" />
<rect x="96" width="300" height="12" rx="2" />
</gl-skeleton-loader>
</div>
<div class="mr-widget-heading mr-widget-workflow p-3">
<gl-skeleton-loader :width="577" :height="72">
<rect width="120" height="12" rx="2" />
<rect y="20" width="300" height="12" rx="2" />
<rect y="40" width="60" height="12" rx="2" />
<rect y="40" x="68" width="100" height="12" rx="2" />
<rect y="60" width="40" height="12" rx="2" />
</gl-skeleton-loader>
</div>
</div>
</template>
......@@ -7,6 +7,7 @@ import MRWidgetStore from 'ee_else_ce/vue_merge_request_widget/stores/mr_widget_
import MRWidgetService from 'ee_else_ce/vue_merge_request_widget/services/mr_widget_service';
import stateMaps from 'ee_else_ce/vue_merge_request_widget/stores/state_maps';
import createFlash from '../flash';
import Loading from './components/loading.vue';
import WidgetHeader from './components/mr_widget_header.vue';
import WidgetMergeHelp from './components/mr_widget_merge_help.vue';
import MrWidgetPipelineContainer from './components/mr_widget_pipeline_container.vue';
......@@ -44,6 +45,7 @@ export default {
// eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
name: 'MRWidget',
components: {
Loading,
'mr-widget-header': WidgetHeader,
'mr-widget-merge-help': WidgetMergeHelp,
MrWidgetPipelineContainer,
......@@ -80,12 +82,12 @@ export default {
},
},
data() {
const store = new MRWidgetStore(this.mrData || window.gl.mrWidgetData);
const service = this.createService(store);
const store = this.mrData && new MRWidgetStore(this.mrData);
return {
mr: store,
state: store.state,
service,
state: store && store.state,
service: store && this.createService(store),
};
},
computed: {
......@@ -133,29 +135,58 @@ export default {
}
},
},
created() {
this.initPolling();
this.bindEventHubListeners();
eventHub.$on('mr.discussion.updated', this.checkStatus);
},
mounted() {
this.setFaviconHelper();
this.initDeploymentsPolling();
if (this.shouldRenderMergedPipeline) {
this.initPostMergeDeploymentsPolling();
if (gon && gon.features && gon.features.asyncMrWidget) {
MRWidgetService.fetchInitialData()
.then(({ data }) => this.initWidget(data))
.catch(() =>
createFlash(__('Unable to load the merge request widget. Try reloading the page.')),
);
} else {
this.initWidget();
}
},
beforeDestroy() {
eventHub.$off('mr.discussion.updated', this.checkStatus);
if (this.pollingInterval) {
this.pollingInterval.destroy();
}
if (this.deploymentsInterval) {
this.deploymentsInterval.destroy();
}
if (this.postMergeDeploymentsInterval) {
this.postMergeDeploymentsInterval.destroy();
}
},
methods: {
initWidget(data = {}) {
if (this.mr) {
this.mr.setData({ ...window.gl.mrWidgetData, ...data });
} else {
this.mr = new MRWidgetStore({ ...window.gl.mrWidgetData, ...data });
}
if (!this.state) {
this.state = this.mr.state;
}
if (!this.service) {
this.service = this.createService(this.mr);
}
this.setFaviconHelper();
this.initDeploymentsPolling();
if (this.shouldRenderMergedPipeline) {
this.initPostMergeDeploymentsPolling();
}
this.initPolling();
this.bindEventHubListeners();
eventHub.$on('mr.discussion.updated', this.checkStatus);
},
getServiceEndpoints(store) {
return {
mergePath: store.mergePath,
......@@ -319,7 +350,7 @@ export default {
};
</script>
<template>
<div class="mr-state-widget prepend-top-default">
<div v-if="mr" class="mr-state-widget prepend-top-default">
<mr-widget-header :mr="mr" />
<mr-widget-pipeline-container
v-if="shouldRenderPipelines"
......@@ -377,4 +408,5 @@ export default {
:is-post-merge="true"
/>
</div>
<loading v-else />
</template>
......@@ -61,4 +61,11 @@ export default class MRWidgetService {
static fetchMetrics(metricsUrl) {
return axios.get(`${metricsUrl}.json`);
}
static fetchInitialData() {
return Promise.all([
axios.get(window.gl.mrWidgetData.merge_request_cached_widget_path),
axios.get(window.gl.mrWidgetData.merge_request_widget_path),
]).then(axios.spread((res, cachedRes) => ({ data: Object.assign(res.data, cachedRes.data) })));
}
}
......@@ -51,6 +51,10 @@
position: relative;
border: 1px solid $border-color;
border-radius: $border-radius-default;
.gl-skeleton-loader {
display: block;
}
}
.mr-widget-extension {
......
......@@ -25,6 +25,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
before_action do
push_frontend_feature_flag(:vue_issuable_sidebar, @project.group)
push_frontend_feature_flag(:release_search_filter, @project, default_enabled: true)
push_frontend_feature_flag(:async_mr_widget, @project)
end
around_action :allow_gitaly_ref_name_caching, only: [:index, :show, :discussions]
......
......@@ -11,7 +11,6 @@ class Projects::PipelinesController < Projects::ApplicationController
before_action :authorize_create_pipeline!, only: [:new, :create]
before_action :authorize_update_pipeline!, only: [:retry, :cancel]
before_action do
push_frontend_feature_flag(:hide_dismissed_vulnerabilities)
push_frontend_feature_flag(:junit_pipeline_view)
end
......
......@@ -195,7 +195,7 @@ module GitlabRoutingHelper
end
def snippet_path(snippet, *args)
if snippet.is_a?(ProjectSnippet)
if snippet.type == "ProjectSnippet"
application_url_helpers.project_snippet_path(snippet.project, snippet, *args)
else
new_args = snippet_query_params(snippet, *args)
......@@ -204,7 +204,7 @@ module GitlabRoutingHelper
end
def snippet_url(snippet, *args)
if snippet.is_a?(ProjectSnippet)
if snippet.type == "ProjectSnippet"
application_url_helpers.project_snippet_url(snippet.project, snippet, *args)
else
new_args = snippet_query_params(snippet, *args)
......@@ -213,7 +213,7 @@ module GitlabRoutingHelper
end
def raw_snippet_path(snippet, *args)
if snippet.is_a?(ProjectSnippet)
if snippet.type == "ProjectSnippet"
application_url_helpers.raw_project_snippet_path(snippet.project, snippet, *args)
else
new_args = snippet_query_params(snippet, *args)
......@@ -222,7 +222,7 @@ module GitlabRoutingHelper
end
def raw_snippet_url(snippet, *args)
if snippet.is_a?(ProjectSnippet)
if snippet.type == "ProjectSnippet"
application_url_helpers.raw_project_snippet_url(snippet.project, snippet, *args)
else
new_args = snippet_query_params(snippet, *args)
......
......@@ -31,13 +31,14 @@ module SearchHelper
from = collection.offset_value + 1
to = collection.offset_value + collection.to_a.size
count = collection.total_count
term_element = "<span>&nbsp;<code>#{h(term)}</code>&nbsp;</span>".html_safe
search_entries_info_template(collection) % {
from: from,
to: to,
count: count,
scope: search_entries_scope_label(scope, count),
term: term
term_element: term_element
}
end
......@@ -72,9 +73,9 @@ module SearchHelper
def search_entries_info_template(collection)
if collection.total_pages > 1
s_("SearchResults|Showing %{from} - %{to} of %{count} %{scope} for \"%{term}\"")
s_("SearchResults|Showing %{from} - %{to} of %{count} %{scope} for%{term_element}").html_safe
else
s_("SearchResults|Showing %{count} %{scope} for \"%{term}\"")
s_("SearchResults|Showing %{count} %{scope} for%{term_element}").html_safe
end
end
......
......@@ -204,7 +204,7 @@ module Ci
end
scope :internal, -> { where(source: internal_sources) }
scope :ci_sources, -> { where(config_source: ci_sources_values) }
scope :ci_sources, -> { where(config_source: ::Ci::PipelineEnums.ci_config_sources_values) }
scope :for_user, -> (user) { where(user: user) }
scope :for_sha, -> (sha) { where(sha: sha) }
scope :for_source_sha, -> (source_sha) { where(source_sha: source_sha) }
......@@ -315,10 +315,6 @@ module Ci
sources.reject { |source| source == "external" }.values
end
def self.ci_sources_values
config_sources.values_at(:repository_source, :auto_devops_source, :unknown_source)
end
def self.bridgeable_statuses
::Ci::Pipeline::AVAILABLE_STATUSES - %w[created preparing pending]
end
......
......@@ -35,9 +35,20 @@ module Ci
{
unknown_source: nil,
repository_source: 1,
auto_devops_source: 2
auto_devops_source: 2,
remote_source: 4,
external_project_source: 5
}
end
def self.ci_config_sources_values
config_sources.values_at(
:unknown_source,
:repository_source,
:auto_devops_source,
:remote_source,
:external_project_source)
end
end
end
......
......@@ -58,7 +58,7 @@ class ProjectWiki
end
def wiki_base_path
[Gitlab.config.gitlab.relative_url_root, '/', @project.full_path, '/wikis'].join('')
[Gitlab.config.gitlab.relative_url_root, '/', @project.full_path, '/-', '/wikis'].join('')
end
# Returns the Gitlab::Git::Wiki object.
......
......@@ -3,6 +3,9 @@
class MergeRequestWidgetEntity < Grape::Entity
include RequestAwareEntity
expose :id
expose :iid
expose :source_project_full_path do |merge_request|
merge_request.source_project&.full_path
end
......@@ -65,6 +68,8 @@ class MergeRequestWidgetEntity < Grape::Entity
end
def as_json(options = {})
return super(options) if Feature.enabled?(:async_mr_widget)
super(options)
.merge(MergeRequestPollCachedWidgetEntity.new(object, **@options.opts_hash).as_json(options))
.merge(MergeRequestPollWidgetEntity.new(object, **@options.opts_hash).as_json(options))
......
......@@ -44,31 +44,21 @@
.ci-variable-body-item.ci-variable-protected-item.table-section.section-20.mr-0.border-top-0
.append-right-default
= s_("CiVariable|Protected")
%button{ type: 'button',
class: "js-project-feature-toggle project-feature-toggle #{'is-checked' if is_protected}",
"aria-label": s_("CiVariable|Toggle protected") }
= render "shared/buttons/project_feature_toggle", is_checked: is_protected, label: s_("CiVariable|Toggle protected") do
%input{ type: "hidden",
class: 'js-ci-variable-input-protected js-project-feature-toggle-input',
name: protected_input_name,
value: is_protected,
data: { default: is_protected_default.to_s } }
%span.toggle-icon
= sprite_icon('status_success_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-checked')
= sprite_icon('status_failed_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-unchecked')
.ci-variable-body-item.ci-variable-masked-item.table-section.section-20.mr-0.border-top-0
.append-right-default
= s_("CiVariable|Masked")
%button{ type: 'button',
class: "js-project-feature-toggle project-feature-toggle qa-variable-masked #{'is-checked' if is_masked}",
"aria-label": s_("CiVariable|Toggle masked") }
= render "shared/buttons/project_feature_toggle", is_checked: is_masked, label: s_("CiVariable|Toggle masked"), class_list: "js-project-feature-toggle project-feature-toggle qa-variable-masked" do
%input{ type: "hidden",
class: 'js-ci-variable-input-masked js-project-feature-toggle-input',
name: masked_input_name,
value: is_masked,
data: { default: is_masked_default.to_s } }
%span.toggle-icon
= sprite_icon('status_success_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-checked')
= sprite_icon('status_failed_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-unchecked')
= render_if_exists 'ci/variables/environment_scope', form_field: form_field, variable: variable
%button.js-row-remove-button.ci-variable-row-remove-button.table-section.section-5.border-top-0{ type: 'button', 'aria-label': s_('CiVariables|Remove variable row') }
= icon('minus-circle')
......@@ -3,14 +3,8 @@
.form-group
%h5= s_('ClusterIntegration|Integration status')
%label.append-bottom-0.js-cluster-enable-toggle-area
%button{ type: 'button',
class: "js-project-feature-toggle project-feature-toggle #{'is-checked' if @cluster.enabled?} #{'is-disabled' unless can?(current_user, :update_cluster, @cluster)}",
"aria-label": s_("ClusterIntegration|Toggle Kubernetes cluster"),
disabled: !can?(current_user, :update_cluster, @cluster) }
= render "shared/buttons/project_feature_toggle", is_checked: @cluster.enabled?, label: s_("ClusterIntegration|Toggle Kubernetes cluster"), disabled: !can?(current_user, :update_cluster, @cluster) do
= field.hidden_field :enabled, { class: 'js-project-feature-toggle-input'}
%span.toggle-icon
= sprite_icon('status_success_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-checked')
= sprite_icon('status_failed_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-unchecked')
.form-text.text-muted= s_('ClusterIntegration|Enable or disable GitLab\'s connection to your Kubernetes cluster.')
.form-group
......
- class_list ||= "js-project-feature-toggle project-feature-toggle"
- data ||= nil
- disabled ||= false
- is_checked ||= false
- label ||= nil
%button{ type: 'button',
class: "#{class_list} #{'is-disabled' if disabled} #{'is-checked' if is_checked}",
"aria-label": label,
disabled: disabled,
data: data }
- if yield.present?
= yield
%span.toggle-icon
= sprite_icon('status_success_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-checked')
= sprite_icon('status_failed_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-unchecked')
---
title: Changes to how the search term is styled in the results
merge_request: 20416
author:
type: changed
---
title: Added legend to deploy boards
merge_request: 20208
author:
type: added
---
title: Allow NPM package downloads with CI_JOB_TOKEN
merge_request: 20868
author:
type: added
---
title: delete board_service.js
merge_request: 20168
author: nuwe1
type: other
---
title: Allow CI config path to point to a URL or file in a different repository
merge_request: 20179
author:
type: added
---
title: Move wiki routing under /-/ scope
merge_request: 21185
author:
type: deprecated
---
title: Fix snippet routes
merge_request: 21248
author:
type: fixed
---
title: Fetches initial merge request widget data async
merge_request: 20719
author:
type: changed
......@@ -258,6 +258,10 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
post :list_projects
end
end
# The wiki routing contains wildcard characters so
# its preferable to keep it below all other project routes
draw :wiki
end
# End of the /-/ scope.
......@@ -523,9 +527,8 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
post :web_ide_clientside_preview
end
# Since both wiki and repository routing contains wildcard characters
# The repository routing contains wildcard characters so
# its preferable to keep it below all other project routes
draw :wiki
draw :repository
# All new routes should go under /-/ scope.
......@@ -542,7 +545,7 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
:forks, :group_links, :import, :avatar, :mirror,
:cycle_analytics, :mattermost, :variables, :triggers,
:environments, :protected_environments, :error_tracking,
:serverless, :clusters, :audit_events)
:serverless, :clusters, :audit_events, :wikis)
end
# rubocop: disable Cop/PutProjectRoutesUnderScope
......
......@@ -185,7 +185,7 @@ module.exports = {
options: { limit: 2048 },
},
{
test: /\_worker\.js$/,
test: /_worker\.js$/,
use: [
{
loader: 'worker-loader',
......
# frozen_string_literal: true
class AddUniqueConstraintToSoftwareLicenses < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
NEW_INDEX = 'index_software_licenses_on_unique_name'
OLD_INDEX = 'index_software_licenses_on_name'
disable_ddl_transaction!
# 12 software licenses will be removed on GitLab.com
# 0 software license policies will be updated on GitLab.com
def up(attempts: 100)
remove_redundant_software_licenses!
add_concurrent_index :software_licenses, :name, unique: true, name: NEW_INDEX
remove_concurrent_index :software_licenses, :name, name: OLD_INDEX
rescue ActiveRecord::RecordNotUnique
retry if (attempts -= 1) > 0
raise StandardError, <<~EOS
Failed to add an unique index to software_licenses, despite retrying the
migration 100 times.
See https://gitlab.com/gitlab-org/gitlab/merge_requests/19840.
EOS
end
def down
remove_concurrent_index :software_licenses, :name, unique: true, name: NEW_INDEX
add_concurrent_index :software_licenses, :name, name: OLD_INDEX
end
private
def remove_redundant_software_licenses!
redundant_software_licenses = execute <<~SQL
SELECT min(id) id, name
FROM software_licenses
WHERE name IN (select name from software_licenses group by name having count(name) > 1)
GROUP BY name
SQL
say "Detected #{redundant_software_licenses.count} duplicates."
redundant_software_licenses.each_row do |id, name|
say_with_time("Reassigning policies that reference software license #{name}.") do
duplicates = software_licenses.where.not(id: id).where(name: name)
software_license_policies
.where(software_license_id: duplicates)
.update_all(software_license_id: id)
duplicates.delete_all
end
end
end
def table(name)
Class.new(ActiveRecord::Base) { self.table_name = name }
end
def software_licenses
@software_licenses ||= table(:software_licenses)
end
def software_license_policies
@software_license_policies ||= table(:software_license_policies)
end
end
......@@ -3698,7 +3698,7 @@ ActiveRecord::Schema.define(version: 2019_12_02_031812) do
create_table "software_licenses", id: :serial, force: :cascade do |t|
t.string "name", null: false
t.string "spdx_identifier", limit: 255
t.index ["name"], name: "index_software_licenses_on_name"
t.index ["name"], name: "index_software_licenses_on_unique_name", unique: true
t.index ["spdx_identifier"], name: "index_software_licenses_on_spdx_identifier"
end
......
......@@ -969,7 +969,7 @@ X-Gitlab-Event: Wiki Page Hook
"http_url": "http://example.com/root/awesome-project.git"
},
"wiki": {
"web_url": "http://example.com/root/awesome-project/wikis/home",
"web_url": "http://example.com/root/awesome-project/-/wikis/home",
"git_ssh_url": "git@example.com:root/awesome-project.wiki.git",
"git_http_url": "http://example.com/root/awesome-project.wiki.git",
"path_with_namespace": "root/awesome-project.wiki",
......@@ -981,7 +981,7 @@ X-Gitlab-Event: Wiki Page Hook
"format": "markdown",
"message": "adding an awesome page to the wiki",
"slug": "awesome",
"url": "http://example.com/root/awesome-project/wikis/awesome",
"url": "http://example.com/root/awesome-project/-/wikis/awesome",
"action": "create"
}
}
......
......@@ -67,20 +67,37 @@ For information about setting a maximum artifact size for a project, see
## Custom CI configuration path
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/12509) in GitLab 9.4.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/12509) in GitLab 9.4.
> - [Support for external `.gitlab-ci.yml` locations](https://gitlab.com/gitlab-org/gitlab/issues/14376) introduced in GitLab 12.6.
By default we look for the `.gitlab-ci.yml` file in the project's root
directory. If you require a different location **within** the repository,
you can set a custom path that will be used to look up the configuration file,
this path should be **relative** to the root.
directory. If needed, you can specify an alternate path and file name, including locations outside the project.
Here are some valid examples:
Hosting the configuration file in a separate project will allow stricter control of the
configuration file. You can limit access to the project hosting the configuration to only people
with proper authorization, and users can use the configuration for their pipelines,
without being able to modify it.
- `.gitlab-ci.yml`
If the CI configuration will stay within the repository, but in a
location different than the default,
the path must be relative to the root directory. Examples of valid paths and file names:
- `.gitlab-ci.yml` (default)
- `.my-custom-file.yml`
- `my/path/.gitlab-ci.yml`
- `my/path/.my-custom-file.yml`
If the CI configuration will be hosted in a different project within GitLab, the path must be relative
to the root directory in the other project, with the group and project name added to the end:
- `.gitlab-ci.yml@mygroup/another-project`
- `my/path/.my-custom-file.yml@mygroup/another-project`
If the CI configuration will be hosted on an external site, different than the GitLab instance,
the URL link must end with `.yml`:
- `http://example.com/generate/ci/config.yml`
The path can be customized at a project level. To customize the path:
1. Go to the project's **Settings > CI / CD**.
......
......@@ -8,21 +8,28 @@ module Gitlab
class Content < Chain::Base
include Chain::Helpers
def perform!
return if @command.config_content
if content = content_from_repo
@command.config_content = content
@pipeline.config_source = :repository_source
# TODO: we should persist ci_config_path
# @pipeline.config_path = ci_config_path
elsif content = content_from_auto_devops
@command.config_content = content
@pipeline.config_source = :auto_devops_source
end
SOURCES = [
Gitlab::Ci::Pipeline::Chain::Config::Content::Runtime,
Gitlab::Ci::Pipeline::Chain::Config::Content::Repository,
Gitlab::Ci::Pipeline::Chain::Config::Content::ExternalProject,
Gitlab::Ci::Pipeline::Chain::Config::Content::Remote,
Gitlab::Ci::Pipeline::Chain::Config::Content::AutoDevops
].freeze
LEGACY_SOURCES = [
Gitlab::Ci::Pipeline::Chain::Config::Content::Runtime,
Gitlab::Ci::Pipeline::Chain::Config::Content::LegacyRepository,
Gitlab::Ci::Pipeline::Chain::Config::Content::LegacyAutoDevops
].freeze
unless @command.config_content
return error("Missing #{ci_config_path} file")
def perform!
if config = find_config
# TODO: we should persist config_content
# @pipeline.config_content = config.content
@command.config_content = config.content
@pipeline.config_source = config.source
else
error('Missing CI config file')
end
end
......@@ -32,24 +39,21 @@ module Gitlab
private
def content_from_repo
return unless project
return unless @pipeline.sha
return unless ci_config_path
def find_config
sources.each do |source|
config = source.new(@pipeline, @command)
return config if config.exists?
end
project.repository.gitlab_ci_yml_for(@pipeline.sha, ci_config_path)
rescue GRPC::NotFound, GRPC::Internal
nil
end
def content_from_auto_devops
return unless project&.auto_devops_enabled?
Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps').content
def sources
if Feature.enabled?(:ci_root_config_content, @command.project, default_enabled: true)
SOURCES
else
LEGACY_SOURCES
end
def ci_config_path
project.ci_config_path.presence || '.gitlab-ci.yml'
end
end
end
......
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class AutoDevops < Source
def content
strong_memoize(:content) do
next unless project&.auto_devops_enabled?
template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
YAML.dump('include' => [{ 'template' => template.full_name }])
end
end
def source
:auto_devops_source
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class ExternalProject < Source
def content
strong_memoize(:content) do
next unless external_project_path?
path_file, path_project = ci_config_path.split('@', 2)
YAML.dump('include' => [{ 'project' => path_project, 'file' => path_file }])
end
end
def source
:external_project_source
end
private
# Example: path/to/.gitlab-ci.yml@another-group/another-project
def external_project_path?
ci_config_path =~ /\A.+(yml|yaml)@.+\z/
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class LegacyAutoDevops < Source
def content
strong_memoize(:content) do
next unless project&.auto_devops_enabled?
template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
template.content
end
end
def source
:auto_devops_source
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class LegacyRepository < Source
def content
strong_memoize(:content) do
next unless project
next unless @pipeline.sha
next unless ci_config_path
project.repository.gitlab_ci_yml_for(@pipeline.sha, ci_config_path)
rescue GRPC::NotFound, GRPC::Internal
nil
end
end
def source
:repository_source
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class Remote < Source
def content
strong_memoize(:content) do
next unless ci_config_path =~ URI.regexp(%w[http https])
YAML.dump('include' => [{ 'remote' => ci_config_path }])
end
end
def source
:remote_source
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class Repository < Source
def content
strong_memoize(:content) do
next unless file_in_repository?
YAML.dump('include' => [{ 'local' => ci_config_path }])
end
end
def source
:repository_source
end
private
def file_in_repository?
return unless project
return unless @pipeline.sha
project.repository.gitlab_ci_yml_for(@pipeline.sha, ci_config_path).present?
rescue GRPC::NotFound, GRPC::Internal
nil
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class Runtime < Source
def content
@command.config_content
end
def source
# The only case when this source is used is when the config content
# is passed in as parameter to Ci::CreatePipelineService.
# This would only occur with parent/child pipelines which is being
# implemented.
# TODO: change source to return :runtime_source
# https://gitlab.com/gitlab-org/gitlab/merge_requests/21041
nil
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
module Config
class Content
class Source
include Gitlab::Utils::StrongMemoize
DEFAULT_YAML_FILE = '.gitlab-ci.yml'
def initialize(pipeline, command)
@pipeline = pipeline
@command = command
end
def exists?
strong_memoize(:exists) do
content.present?
end
end
def content
raise NotImplementedError
end
def source
raise NotImplementedError
end
def project
@project ||= @pipeline.project
end
def ci_config_path
@ci_config_path ||= project.ci_config_path.presence || DEFAULT_YAML_FILE
end
end
end
end
end
end
end
end
......@@ -105,7 +105,7 @@ module Gitlab
save_id_mapping(relation_key, data_hash, relation_object)
rescue => e
# re-raise if not enabled
raise e unless Feature.enabled?(:import_graceful_failures, @project.group)
raise e unless Feature.enabled?(:import_graceful_failures, @project.group, default_enabled: true)
log_import_failure(relation_key, relation_index, e)
end
......
......@@ -2960,6 +2960,9 @@ msgstr ""
msgid "Can't scan the code?"
msgstr ""
msgid "Canary"
msgstr ""
msgid "Canary Deployments is a popular CI strategy, where a small portion of the fleet is updated to the new version of your application."
msgstr ""
......@@ -15325,16 +15328,16 @@ msgstr ""
msgid "SearchCodeResults|of %{link_to_project}"
msgstr ""
msgid "SearchResults|Showing %{count} %{scope} for \"%{term}\""
msgid "SearchResults|Showing %{count} %{scope} for%{term_element}"
msgstr ""
msgid "SearchResults|Showing %{count} %{scope} for \"%{term}\" in your personal and project snippets"
msgid "SearchResults|Showing %{count} %{scope} for%{term_element} in your personal and project snippets"
msgstr ""
msgid "SearchResults|Showing %{from} - %{to} of %{count} %{scope} for \"%{term}\""
msgid "SearchResults|Showing %{from} - %{to} of %{count} %{scope} for%{term_element}"
msgstr ""
msgid "SearchResults|Showing %{from} - %{to} of %{count} %{scope} for \"%{term}\" in your personal and project snippets"
msgid "SearchResults|Showing %{from} - %{to} of %{count} %{scope} for%{term_element} in your personal and project snippets"
msgstr ""
msgid "SearchResults|We couldn't find any %{scope} matching %{term}"
......@@ -16963,6 +16966,9 @@ msgstr ""
msgid "Subtracts"
msgstr ""
msgid "Succeeded"
msgstr ""
msgid "Successfully activated"
msgstr ""
......@@ -18760,6 +18766,9 @@ msgstr ""
msgid "Unable to load the diff. %{button_try_again}"
msgstr ""
msgid "Unable to load the merge request widget. Try reloading the page."
msgstr ""
msgid "Unable to resolve"
msgstr ""
......
......@@ -1073,7 +1073,7 @@ describe Projects::MergeRequestsController do
end
it 'renders MergeRequest as JSON' do
expect(json_response.keys).to include('id', 'iid', 'description')
expect(json_response.keys).to include('id', 'iid')
end
end
......@@ -1107,7 +1107,7 @@ describe Projects::MergeRequestsController do
it 'renders MergeRequest as JSON' do
subject
expect(json_response.keys).to include('id', 'iid', 'description')
expect(json_response.keys).to include('id', 'iid')
end
end
......
......@@ -706,7 +706,7 @@ describe 'Pipelines', :js do
click_on 'Run Pipeline'
end
it { expect(page).to have_content('Missing .gitlab-ci.yml file') }
it { expect(page).to have_content('Missing CI config file') }
it 'creates a pipeline after first request failed and a valid gitlab-ci.yml file is available when trying again' do
click_button project.default_branch
......
......@@ -29,11 +29,11 @@ describe 'Projects > Wiki > User previews markdown changes', :js do
expect(page).to have_content("regular link")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/c/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/c/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
end
end
......@@ -43,11 +43,11 @@ describe 'Projects > Wiki > User previews markdown changes', :js do
expect(page).to have_content("regular link")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
end
end
......@@ -57,11 +57,11 @@ describe 'Projects > Wiki > User previews markdown changes', :js do
expect(page).to have_content("regular link")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
end
end
end
......@@ -77,11 +77,11 @@ describe 'Projects > Wiki > User previews markdown changes', :js do
expect(page).to have_content("regular link")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/c/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a/b/c/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
end
end
......@@ -95,11 +95,11 @@ describe 'Projects > Wiki > User previews markdown changes', :js do
expect(page).to have_content("regular link")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
end
end
......@@ -113,11 +113,11 @@ describe 'Projects > Wiki > User previews markdown changes', :js do
expect(page).to have_content("regular link")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/wikis/title%20with%20spaces\">spaced link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
end
end
......
......@@ -55,7 +55,7 @@ describe "User creates wiki page" do
end
expect(current_path).to include("one/two/three-test")
expect(page).to have_xpath("//a[@href='/#{project.full_path}/wikis/one/two/three-test']")
expect(page).to have_xpath("//a[@href='/#{project.full_path}/-/wikis/one/two/three-test']")
end
it "has `Create home` as a commit message", :js do
......
import BoardService from '~/boards/services/board_service';
import { TEST_HOST } from 'helpers/test_constants';
import AxiosMockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import boardsStore from '~/boards/stores/boards_store';
describe('BoardService', () => {
describe('boardsStore', () => {
const dummyResponse = "without type checking this doesn't matter";
const boardId = 'dummy-board-id';
const endpoints = {
......@@ -14,7 +13,6 @@ describe('BoardService', () => {
recentBoardsEndpoint: `${TEST_HOST}/recent/boards`,
};
let service;
let axiosMock;
beforeEach(() => {
......@@ -23,7 +21,6 @@ describe('BoardService', () => {
...endpoints,
boardId,
});
service = new BoardService();
});
describe('all', () => {
......@@ -31,13 +28,13 @@ describe('BoardService', () => {
axiosMock.onGet(endpoints.listsEndpoint).replyOnce(200, dummyResponse);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.all()).resolves.toEqual(expectedResponse);
return expect(boardsStore.all()).resolves.toEqual(expectedResponse);
});
it('fails for error response', () => {
axiosMock.onGet(endpoints.listsEndpoint).replyOnce(500);
return expect(service.all()).rejects.toThrow();
return expect(boardsStore.all()).rejects.toThrow();
});
});
......@@ -48,13 +45,13 @@ describe('BoardService', () => {
axiosMock.onPost(listsEndpointGenerate).replyOnce(200, dummyResponse);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.generateDefaultLists()).resolves.toEqual(expectedResponse);
return expect(boardsStore.generateDefaultLists()).resolves.toEqual(expectedResponse);
});
it('fails for error response', () => {
axiosMock.onPost(listsEndpointGenerate).replyOnce(500);
return expect(service.generateDefaultLists()).rejects.toThrow();
return expect(boardsStore.generateDefaultLists()).rejects.toThrow();
});
});
......@@ -76,7 +73,7 @@ describe('BoardService', () => {
requestSpy.mockReturnValue([200, dummyResponse]);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.createList(entityId, entityType))
return expect(boardsStore.createList(entityId, entityType))
.resolves.toEqual(expectedResponse)
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -86,7 +83,7 @@ describe('BoardService', () => {
it('fails for error response', () => {
requestSpy.mockReturnValue([500]);
return expect(service.createList(entityId, entityType))
return expect(boardsStore.createList(entityId, entityType))
.rejects.toThrow()
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -113,7 +110,7 @@ describe('BoardService', () => {
requestSpy.mockReturnValue([200, dummyResponse]);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.updateList(id, position, collapsed))
return expect(boardsStore.updateList(id, position, collapsed))
.resolves.toEqual(expectedResponse)
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -123,7 +120,7 @@ describe('BoardService', () => {
it('fails for error response', () => {
requestSpy.mockReturnValue([500]);
return expect(service.updateList(id, position, collapsed))
return expect(boardsStore.updateList(id, position, collapsed))
.rejects.toThrow()
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -147,7 +144,7 @@ describe('BoardService', () => {
requestSpy.mockReturnValue([200, dummyResponse]);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.destroyList(id))
return expect(boardsStore.destroyList(id))
.resolves.toEqual(expectedResponse)
.then(() => {
expect(requestSpy).toHaveBeenCalled();
......@@ -157,7 +154,7 @@ describe('BoardService', () => {
it('fails for error response', () => {
requestSpy.mockReturnValue([500]);
return expect(service.destroyList(id))
return expect(boardsStore.destroyList(id))
.rejects.toThrow()
.then(() => {
expect(requestSpy).toHaveBeenCalled();
......@@ -173,7 +170,7 @@ describe('BoardService', () => {
axiosMock.onGet(url).replyOnce(200, dummyResponse);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.getIssuesForList(id)).resolves.toEqual(expectedResponse);
return expect(boardsStore.getIssuesForList(id)).resolves.toEqual(expectedResponse);
});
it('makes a request to fetch list issues with filter', () => {
......@@ -181,13 +178,13 @@ describe('BoardService', () => {
axiosMock.onGet(`${url}&algal=scrubber`).replyOnce(200, dummyResponse);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.getIssuesForList(id, filter)).resolves.toEqual(expectedResponse);
return expect(boardsStore.getIssuesForList(id, filter)).resolves.toEqual(expectedResponse);
});
it('fails for error response', () => {
axiosMock.onGet(url).replyOnce(500);
return expect(service.getIssuesForList(id)).rejects.toThrow();
return expect(boardsStore.getIssuesForList(id)).rejects.toThrow();
});
});
......@@ -228,7 +225,7 @@ describe('BoardService', () => {
requestSpy.mockReturnValue([200, dummyResponse]);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.moveIssue(id, fromListId, toListId, moveBeforeId, moveAfterId))
return expect(boardsStore.moveIssue(id, fromListId, toListId, moveBeforeId, moveAfterId))
.resolves.toEqual(expectedResponse)
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -238,7 +235,7 @@ describe('BoardService', () => {
it('fails for error response', () => {
requestSpy.mockReturnValue([500]);
return expect(service.moveIssue(id, fromListId, toListId, moveBeforeId, moveAfterId))
return expect(boardsStore.moveIssue(id, fromListId, toListId, moveBeforeId, moveAfterId))
.rejects.toThrow()
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -267,7 +264,7 @@ describe('BoardService', () => {
requestSpy.mockReturnValue([200, dummyResponse]);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.newIssue(id, issue))
return expect(boardsStore.newIssue(id, issue))
.resolves.toEqual(expectedResponse)
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -277,7 +274,7 @@ describe('BoardService', () => {
it('fails for error response', () => {
requestSpy.mockReturnValue([500]);
return expect(service.newIssue(id, issue))
return expect(boardsStore.newIssue(id, issue))
.rejects.toThrow()
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -304,13 +301,13 @@ describe('BoardService', () => {
axiosMock.onGet(url).replyOnce(200, dummyResponse);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.getBacklog(requestParams)).resolves.toEqual(expectedResponse);
return expect(boardsStore.getBacklog(requestParams)).resolves.toEqual(expectedResponse);
});
it('fails for error response', () => {
axiosMock.onGet(url).replyOnce(500);
return expect(service.getBacklog(requestParams)).rejects.toThrow();
return expect(boardsStore.getBacklog(requestParams)).rejects.toThrow();
});
});
......@@ -337,7 +334,7 @@ describe('BoardService', () => {
requestSpy.mockReturnValue([200, dummyResponse]);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.bulkUpdate(issueIds, extraData))
return expect(boardsStore.bulkUpdate(issueIds, extraData))
.resolves.toEqual(expectedResponse)
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -347,7 +344,7 @@ describe('BoardService', () => {
it('fails for error response', () => {
requestSpy.mockReturnValue([500]);
return expect(service.bulkUpdate(issueIds, extraData))
return expect(boardsStore.bulkUpdate(issueIds, extraData))
.rejects.toThrow()
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -362,13 +359,13 @@ describe('BoardService', () => {
axiosMock.onGet(dummyEndpoint).replyOnce(200, dummyResponse);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(BoardService.getIssueInfo(dummyEndpoint)).resolves.toEqual(expectedResponse);
return expect(boardsStore.getIssueInfo(dummyEndpoint)).resolves.toEqual(expectedResponse);
});
it('fails for error response', () => {
axiosMock.onGet(dummyEndpoint).replyOnce(500);
return expect(BoardService.getIssueInfo(dummyEndpoint)).rejects.toThrow();
return expect(boardsStore.getIssueInfo(dummyEndpoint)).rejects.toThrow();
});
});
......@@ -379,7 +376,7 @@ describe('BoardService', () => {
axiosMock.onPost(dummyEndpoint).replyOnce(200, dummyResponse);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(BoardService.toggleIssueSubscription(dummyEndpoint)).resolves.toEqual(
return expect(boardsStore.toggleIssueSubscription(dummyEndpoint)).resolves.toEqual(
expectedResponse,
);
});
......@@ -387,7 +384,7 @@ describe('BoardService', () => {
it('fails for error response', () => {
axiosMock.onPost(dummyEndpoint).replyOnce(500);
return expect(BoardService.toggleIssueSubscription(dummyEndpoint)).rejects.toThrow();
return expect(boardsStore.toggleIssueSubscription(dummyEndpoint)).rejects.toThrow();
});
});
......@@ -398,13 +395,13 @@ describe('BoardService', () => {
axiosMock.onGet(url).replyOnce(200, dummyResponse);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.allBoards()).resolves.toEqual(expectedResponse);
return expect(boardsStore.allBoards()).resolves.toEqual(expectedResponse);
});
it('fails for error response', () => {
axiosMock.onGet(url).replyOnce(500);
return expect(service.allBoards()).rejects.toThrow();
return expect(boardsStore.allBoards()).rejects.toThrow();
});
});
......@@ -415,13 +412,13 @@ describe('BoardService', () => {
axiosMock.onGet(url).replyOnce(200, dummyResponse);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.recentBoards()).resolves.toEqual(expectedResponse);
return expect(boardsStore.recentBoards()).resolves.toEqual(expectedResponse);
});
it('fails for error response', () => {
axiosMock.onGet(url).replyOnce(500);
return expect(service.recentBoards()).rejects.toThrow();
return expect(boardsStore.recentBoards()).rejects.toThrow();
});
});
......@@ -462,7 +459,7 @@ describe('BoardService', () => {
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(
service.createBoard({
boardsStore.createBoard({
...board,
id,
}),
......@@ -477,7 +474,7 @@ describe('BoardService', () => {
requestSpy.mockReturnValue([500]);
return expect(
service.createBoard({
boardsStore.createBoard({
...board,
id,
}),
......@@ -513,7 +510,7 @@ describe('BoardService', () => {
requestSpy.mockReturnValue([200, dummyResponse]);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.createBoard(board))
return expect(boardsStore.createBoard(board))
.resolves.toEqual(expectedResponse)
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -523,7 +520,7 @@ describe('BoardService', () => {
it('fails for error response', () => {
requestSpy.mockReturnValue([500]);
return expect(service.createBoard(board))
return expect(boardsStore.createBoard(board))
.rejects.toThrow()
.then(() => {
expect(requestSpy).toHaveBeenCalledWith(expectedRequest);
......@@ -540,13 +537,13 @@ describe('BoardService', () => {
axiosMock.onDelete(url).replyOnce(200, dummyResponse);
const expectedResponse = expect.objectContaining({ data: dummyResponse });
return expect(service.deleteBoard({ id })).resolves.toEqual(expectedResponse);
return expect(boardsStore.deleteBoard({ id })).resolves.toEqual(expectedResponse);
});
it('fails for error response', () => {
axiosMock.onDelete(url).replyOnce(500);
return expect(service.deleteBoard({ id })).rejects.toThrow();
return expect(boardsStore.deleteBoard({ id })).rejects.toThrow();
});
});
});
/* eslint no-param-reassign: "off" */
import $ from 'jquery';
import { membersBeforeSave } from '~/gfm_auto_complete';
import GfmAutoComplete from 'ee_else_ce/gfm_auto_complete';
import GfmAutoComplete, { membersBeforeSave } from 'ee_else_ce/gfm_auto_complete';
import 'jquery.caret';
import 'at.js';
......
import $ from 'helpers/jquery';
import AxiosMockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import Vue from 'vue';
import { mount, createLocalVue } from '@vue/test-utils';
import { setTestTimeout } from 'helpers/timeout';
import axios from '~/lib/utils/axios_utils';
import NotesApp from '~/notes/components/notes_app.vue';
import service from '~/notes/services/notes_service';
import createStore from '~/notes/stores';
import '~/behaviors/markdown/render_gfm';
import { setTestTimeout } from 'helpers/timeout';
// TODO: use generated fixture (https://gitlab.com/gitlab-org/gitlab-foss/issues/62491)
import * as mockData from '../../notes/mock_data';
import * as urlUtility from '~/lib/utils/url_utility';
......@@ -77,6 +77,8 @@ describe('note_app', () => {
describe('set data', () => {
beforeEach(() => {
setFixtures('<div class="js-discussions-count"></div>');
axiosMock.onAny().reply(200, []);
wrapper = mountComponent();
return waitForDiscussionsRequest();
......@@ -97,6 +99,10 @@ describe('note_app', () => {
it('should fetch discussions', () => {
expect(store.state.discussions).toEqual([]);
});
it('updates discussions badge', () => {
expect(document.querySelector('.js-discussions-count').textContent).toEqual('0');
});
});
describe('render', () => {
......@@ -161,6 +167,7 @@ describe('note_app', () => {
describe('while fetching data', () => {
beforeEach(() => {
setFixtures('<div class="js-discussions-count"></div>');
axiosMock.onAny().reply(200, []);
wrapper = mountComponent();
});
......@@ -177,6 +184,10 @@ describe('note_app', () => {
'Write a comment or drag your files here…',
);
});
it('should not update discussions badge (it should be blank)', () => {
expect(document.querySelector('.js-discussions-count').textContent).toEqual('');
});
});
describe('update note', () => {
......
......@@ -122,13 +122,13 @@ describe SearchHelper do
it 'uses the correct singular label' do
collection = Kaminari.paginate_array([:foo]).page(1).per(10)
expect(search_entries_info(collection, scope, 'foo')).to eq("Showing 1 #{label} for \"foo\"")
expect(search_entries_info(collection, scope, 'foo')).to eq("Showing 1 #{label} for<span>&nbsp;<code>foo</code>&nbsp;</span>")
end
it 'uses the correct plural label' do
collection = Kaminari.paginate_array([:foo] * 23).page(1).per(10)
expect(search_entries_info(collection, scope, 'foo')).to eq("Showing 1 - 10 of 23 #{label.pluralize} for \"foo\"")
expect(search_entries_info(collection, scope, 'foo')).to eq("Showing 1 - 10 of 23 #{label.pluralize} for<span>&nbsp;<code>foo</code>&nbsp;</span>")
end
end
......
......@@ -27,7 +27,7 @@ describe WikiHelper do
let(:classes) { "btn btn-default has-tooltip reverse-sort-btn qa-reverse-sort rspec-reverse-sort" }
def expected_link(sort, direction, icon_class)
path = "/#{project.full_path}/wikis/pages?direction=#{direction}&sort=#{sort}"
path = "/#{project.full_path}/-/wikis/pages?direction=#{direction}&sort=#{sort}"
helper.link_to(path, type: 'button', class: classes, title: 'Sort direction') do
helper.sprite_icon("sort-#{icon_class}", size: 16)
......
......@@ -8,7 +8,6 @@ import '~/boards/models/label';
import '~/boards/models/assignee';
import '~/boards/models/issue';
import '~/boards/models/list';
import '~/boards/services/board_service';
import boardsStore from '~/boards/stores/boards_store';
import eventHub from '~/boards/eventhub';
import { listObj, listObjDuplicate, boardsMockInterceptor } from './mock_data';
......
......@@ -5,7 +5,6 @@ import '~/boards/models/label';
import '~/boards/models/assignee';
import '~/boards/models/issue';
import '~/boards/models/list';
import '~/boards/services/board_service';
import boardsStore from '~/boards/stores/boards_store';
import { setMockEndpoints } from './mock_data';
......
......@@ -10,7 +10,6 @@ import '~/boards/models/label';
import '~/boards/models/assignee';
import '~/boards/models/issue';
import '~/boards/models/list';
import '~/boards/services/board_service';
import boardsStore from '~/boards/stores/boards_store';
import { listObj, listObjDuplicate, boardsMockInterceptor } from './mock_data';
......
......@@ -222,6 +222,7 @@ export default {
plain_diff_path: '/root/acets-app/merge_requests/22.diff',
merge_request_basic_path: '/root/acets-app/merge_requests/22.json?serializer=basic',
merge_request_widget_path: '/root/acets-app/merge_requests/22/widget.json',
merge_request_cached_widget_path: '/cached.json',
merge_check_path: '/root/acets-app/merge_requests/22/merge_check',
ci_environments_status_url: '/root/acets-app/merge_requests/22/ci_environments_status',
project_archived: false,
......
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
import notify from '~/lib/utils/notify';
......@@ -17,6 +19,7 @@ const returnPromise = data =>
describe('mrWidgetOptions', () => {
let vm;
let mock;
let MrWidgetOptions;
const COLLABORATION_MESSAGE = 'Allows commits from members who can merge to the target branch';
......@@ -25,6 +28,13 @@ describe('mrWidgetOptions', () => {
// Prevent component mounting
delete mrWidgetOptions.el;
gl.mrWidgetData = { ...mockData };
gon.features = { asyncMrWidget: true };
mock = new MockAdapter(axios);
mock.onGet(mockData.merge_request_widget_path).reply(() => [200, { ...mockData }]);
mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [200, { ...mockData }]);
MrWidgetOptions = Vue.extend(mrWidgetOptions);
vm = mountComponent(MrWidgetOptions, {
mrData: { ...mockData },
......@@ -33,6 +43,9 @@ describe('mrWidgetOptions', () => {
afterEach(() => {
vm.$destroy();
mock.restore();
gl.mrWidgetData = {};
gon.features = {};
});
describe('data', () => {
......@@ -308,17 +321,16 @@ describe('mrWidgetOptions', () => {
});
describe('bindEventHubListeners', () => {
it('should bind eventHub listeners', () => {
it('should bind eventHub listeners', done => {
spyOn(vm, 'checkStatus').and.returnValue(() => {});
spyOn(vm.service, 'checkStatus').and.returnValue(returnPromise(mockData));
spyOn(vm, 'fetchActionsContent');
spyOn(vm.mr, 'setData');
spyOn(vm, 'resumePolling');
spyOn(vm, 'stopPolling');
spyOn(eventHub, '$on');
vm.bindEventHubListeners();
spyOn(eventHub, '$on').and.callThrough();
setTimeout(() => {
eventHub.$emit('SetBranchRemoveFlag', ['flag']);
expect(vm.mr.isRemovingSourceBranch).toEqual('flag');
......@@ -361,6 +373,9 @@ describe('mrWidgetOptions', () => {
listenersWithServiceRequest.FetchActionsContent();
expect(vm.fetchActionsContent).toHaveBeenCalled();
done();
});
});
});
......@@ -451,22 +466,30 @@ describe('mrWidgetOptions', () => {
});
describe('resumePolling', () => {
it('should call stopTimer on pollingInterval', () => {
it('should call stopTimer on pollingInterval', done => {
setTimeout(() => {
spyOn(vm.pollingInterval, 'resume');
vm.resumePolling();
expect(vm.pollingInterval.resume).toHaveBeenCalled();
done();
});
});
});
describe('stopPolling', () => {
it('should call stopTimer on pollingInterval', () => {
it('should call stopTimer on pollingInterval', done => {
setTimeout(() => {
spyOn(vm.pollingInterval, 'stopTimer');
vm.stopPolling();
expect(vm.pollingInterval.stopTimer).toHaveBeenCalled();
done();
});
});
});
});
......
......@@ -72,14 +72,14 @@ describe Banzai::Pipeline::WikiPipeline do
markdown = "[Page](./page)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/nested/twice/page\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/page\"")
end
it "rewrites file links to be at the scope of the current directory" do
markdown = "[Link to Page](./page.md)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/nested/twice/page.md\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/page.md\"")
end
end
......@@ -88,14 +88,14 @@ describe Banzai::Pipeline::WikiPipeline do
markdown = "[Link to Page](../page)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/nested/page\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/page\"")
end
it "rewrites file links to be at the scope of the parent directory" do
markdown = "[Link to Page](../page.md)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/nested/page.md\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/page.md\"")
end
end
......@@ -104,14 +104,14 @@ describe Banzai::Pipeline::WikiPipeline do
markdown = "[Link to Page](./subdirectory/page)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/nested/twice/subdirectory/page\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/subdirectory/page\"")
end
it "rewrites file links to be at the scope of the sub-directory" do
markdown = "[Link to Page](./subdirectory/page.md)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/nested/twice/subdirectory/page.md\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/subdirectory/page.md\"")
end
end
......@@ -120,35 +120,35 @@ describe Banzai::Pipeline::WikiPipeline do
markdown = "[Link to Page](page)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/page\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/page\"")
end
it 'rewrites non-file links (with spaces) to be at the scope of the wiki root' do
markdown = "[Link to Page](page slug)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/page%20slug\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/page%20slug\"")
end
it "rewrites file links to be at the scope of the current directory" do
markdown = "[Link to Page](page.md)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/nested/twice/page.md\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/page.md\"")
end
it 'rewrites links with anchor' do
markdown = '[Link to Header](start-page#title)'
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/start-page#title\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/start-page#title\"")
end
it 'rewrites links (with spaces) with anchor' do
markdown = '[Link to Header](start page#title)'
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/start%20page#title\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/start%20page#title\"")
end
end
......@@ -157,14 +157,14 @@ describe Banzai::Pipeline::WikiPipeline do
markdown = "[Link to Page](/page)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/page\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/page\"")
end
it 'rewrites file links to be at the scope of the wiki root' do
markdown = "[Link to Page](/page.md)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/wikis/page.md\"")
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/page.md\"")
end
end
end
......@@ -270,28 +270,28 @@ describe Banzai::Pipeline::WikiPipeline do
markdown = "![video_file](video_file_name.mp4)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include('<video src="/wiki_link_ns/wiki_link_project/wikis/nested/twice/video_file_name.mp4"')
expect(output).to include('<video src="/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/video_file_name.mp4"')
end
it 'rewrites and replaces video links names with white spaces to %20' do
markdown = "![video file](video file name.mp4)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include('<video src="/wiki_link_ns/wiki_link_project/wikis/nested/twice/video%20file%20name.mp4"')
expect(output).to include('<video src="/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/video%20file%20name.mp4"')
end
it 'generates audio html structure' do
markdown = "![audio_file](audio_file_name.wav)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include('<audio src="/wiki_link_ns/wiki_link_project/wikis/nested/twice/audio_file_name.wav"')
expect(output).to include('<audio src="/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/audio_file_name.wav"')
end
it 'rewrites and replaces audio links names with white spaces to %20' do
markdown = "![audio file](audio file name.wav)"
output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
expect(output).to include('<audio src="/wiki_link_ns/wiki_link_project/wikis/nested/twice/audio%20file%20name.wav"')
expect(output).to include('<audio src="/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/audio%20file%20name.wav"')
end
end
end
......@@ -44,7 +44,7 @@ describe Gitlab::Chat::Command do
let(:pipeline) { command.create_pipeline }
before do
stub_repository_ci_yaml_file(sha: project.commit.id)
stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
project.add_developer(chat_name.user)
end
......
......@@ -30,7 +30,7 @@ describe Gitlab::Ci::Pipeline::Chain::Build do
let(:step) { described_class.new(pipeline, command) }
before do
stub_repository_ci_yaml_file(sha: anything)
stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
end
it 'never breaks the chain' do
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Pipeline::Chain::Config::Content do
let(:project) { create(:project, ci_config_path: ci_config_path) }
let(:pipeline) { build(:ci_pipeline, project: project) }
let(:command) { Gitlab::Ci::Pipeline::Chain::Command.new(project: project) }
subject { described_class.new(pipeline, command) }
describe '#perform!' do
context 'when feature flag is disabled' do
before do
stub_feature_flags(ci_root_config_content: false)
end
context 'when config is defined in a custom path in the repository' do
let(:ci_config_path) { 'path/to/config.yml' }
before do
expect(project.repository)
.to receive(:gitlab_ci_yml_for)
.with(pipeline.sha, ci_config_path)
.and_return('the-content')
end
it 'returns the content of the YAML file' do
subject.perform!
expect(pipeline.config_source).to eq 'repository_source'
expect(command.config_content).to eq('the-content')
end
end
context 'when config is defined remotely' do
let(:ci_config_path) { 'http://example.com/path/to/ci/config.yml' }
it 'does not support URLs and default to AutoDevops' do
subject.perform!
expect(pipeline.config_source).to eq 'auto_devops_source'
template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
expect(command.config_content).to eq(template.content)
end
end
context 'when config is defined in a separate repository' do
let(:ci_config_path) { 'path/to/.gitlab-ci.yml@another-group/another-repo' }
it 'does not support YAML from external repository and default to AutoDevops' do
subject.perform!
expect(pipeline.config_source).to eq 'auto_devops_source'
template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
expect(command.config_content).to eq(template.content)
end
end
context 'when config is defined in the default .gitlab-ci.yml' do
let(:ci_config_path) { nil }
before do
expect(project.repository)
.to receive(:gitlab_ci_yml_for)
.with(pipeline.sha, '.gitlab-ci.yml')
.and_return('the-content')
end
it 'returns the content of the canonical config file' do
subject.perform!
expect(pipeline.config_source).to eq 'repository_source'
expect(command.config_content).to eq('the-content')
end
end
context 'when config is the Auto-Devops template' do
let(:ci_config_path) { nil }
before do
expect(project).to receive(:auto_devops_enabled?).and_return(true)
end
it 'returns the content of AutoDevops template' do
subject.perform!
expect(pipeline.config_source).to eq 'auto_devops_source'
template = Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps')
expect(command.config_content).to eq(template.content)
end
end
context 'when config is not defined anywhere' do
let(:ci_config_path) { nil }
before do
expect(project).to receive(:auto_devops_enabled?).and_return(false)
end
it 'builds root config including the auto-devops template' do
subject.perform!
expect(pipeline.config_source).to eq('unknown_source')
expect(command.config_content).to be_nil
expect(pipeline.errors.full_messages).to include('Missing CI config file')
end
end
end
context 'when config is defined in a custom path in the repository' do
let(:ci_config_path) { 'path/to/config.yml' }
before do
expect(project.repository)
.to receive(:gitlab_ci_yml_for)
.with(pipeline.sha, ci_config_path)
.and_return('the-content')
end
it 'builds root config including the local custom file' do
subject.perform!
expect(pipeline.config_source).to eq 'repository_source'
expect(command.config_content).to eq(<<~EOY)
---
include:
- local: #{ci_config_path}
EOY
end
end
context 'when config is defined remotely' do
let(:ci_config_path) { 'http://example.com/path/to/ci/config.yml' }
it 'builds root config including the remote config' do
subject.perform!
expect(pipeline.config_source).to eq 'remote_source'
expect(command.config_content).to eq(<<~EOY)
---
include:
- remote: #{ci_config_path}
EOY
end
end
context 'when config is defined in a separate repository' do
let(:ci_config_path) { 'path/to/.gitlab-ci.yml@another-group/another-repo' }
it 'builds root config including the path to another repository' do
subject.perform!
expect(pipeline.config_source).to eq 'external_project_source'
expect(command.config_content).to eq(<<~EOY)
---
include:
- project: another-group/another-repo
file: path/to/.gitlab-ci.yml
EOY
end
end
context 'when config is defined in the default .gitlab-ci.yml' do
let(:ci_config_path) { nil }
before do
expect(project.repository)
.to receive(:gitlab_ci_yml_for)
.with(pipeline.sha, '.gitlab-ci.yml')
.and_return('the-content')
end
it 'builds root config including the canonical CI config file' do
subject.perform!
expect(pipeline.config_source).to eq 'repository_source'
expect(command.config_content).to eq(<<~EOY)
---
include:
- local: ".gitlab-ci.yml"
EOY
end
end
context 'when config is the Auto-Devops template' do
let(:ci_config_path) { nil }
before do
expect(project).to receive(:auto_devops_enabled?).and_return(true)
end
it 'builds root config including the auto-devops template' do
subject.perform!
expect(pipeline.config_source).to eq 'auto_devops_source'
expect(command.config_content).to eq(<<~EOY)
---
include:
- template: Auto-DevOps.gitlab-ci.yml
EOY
end
end
context 'when config is not defined anywhere' do
let(:ci_config_path) { nil }
before do
expect(project).to receive(:auto_devops_enabled?).and_return(false)
end
it 'builds root config including the auto-devops template' do
subject.perform!
expect(pipeline.config_source).to eq('unknown_source')
expect(command.config_content).to be_nil
expect(pipeline.errors.full_messages).to include('Missing CI config file')
end
end
end
end
......@@ -28,7 +28,7 @@ describe ProjectWiki do
describe '#web_url' do
it 'returns the full web URL to the wiki' do
expect(subject.web_url).to eq("#{Gitlab.config.gitlab.url}/#{project.full_path}/wikis/home")
expect(subject.web_url).to eq("#{Gitlab.config.gitlab.url}/#{project.full_path}/-/wikis/home")
end
end
......@@ -71,7 +71,7 @@ describe ProjectWiki do
describe "#wiki_base_path" do
it "returns the wiki base path" do
wiki_base_path = "#{Gitlab.config.gitlab.relative_url_root}/#{project.full_path}/wikis"
wiki_base_path = "#{Gitlab.config.gitlab.relative_url_root}/#{project.full_path}/-/wikis"
expect(subject.wiki_base_path).to eq(wiki_base_path)
end
......
......@@ -384,7 +384,7 @@ describe API::Pipelines do
post api("/projects/#{project.id}/pipeline", user), params: { ref: project.default_branch }
expect(response).to have_gitlab_http_status(400)
expect(json_response['message']['base'].first).to eq 'Missing .gitlab-ci.yml file'
expect(json_response['message']['base'].first).to eq 'Missing CI config file'
expect(json_response).not_to be_an Array
end
end
......
......@@ -155,17 +155,21 @@ describe 'project routing' do
# DELETE /:project_id/wikis/:id(.:format) projects/wikis#destroy
describe Projects::WikisController, 'routing' do
it 'to #pages' do
expect(get('/gitlab/gitlabhq/wikis/pages')).to route_to('projects/wikis#pages', namespace_id: 'gitlab', project_id: 'gitlabhq')
expect(get('/gitlab/gitlabhq/-/wikis/pages')).to route_to('projects/wikis#pages', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
it 'to #history' do
expect(get('/gitlab/gitlabhq/wikis/1/history')).to route_to('projects/wikis#history', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
expect(get('/gitlab/gitlabhq/-/wikis/1/history')).to route_to('projects/wikis#history', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
it_behaves_like 'RESTful project resources' do
let(:actions) { [:create, :edit, :show, :destroy] }
let(:controller) { 'wikis' }
let(:controller_path) { '/-/wikis' }
end
it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/wikis", "/gitlab/gitlabhq/-/wikis"
it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/wikis/home/edit", "/gitlab/gitlabhq/-/wikis/home/edit"
end
# branches_project_repository GET /:project_id/repository/branches(.:format) projects/repositories#branches
......
# frozen_string_literal: true
require 'spec_helper'
describe MergeRequestPollCachedWidgetEntity do
include ProjectForksHelper
let(:project) { create :project, :repository }
let(:resource) { create(:merge_request, source_project: project, target_project: project) }
let(:user) { create(:user) }
let(:request) { double('request', current_user: user, project: project) }
subject do
described_class.new(resource, request: request).as_json
end
it 'has the latest sha of the target branch' do
is_expected.to include(:target_branch_sha)
end
describe 'diverged_commits_count' do
context 'when MR open and its diverging' do
it 'returns diverged commits count' do
allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: true,
diverged_commits_count: 10)
expect(subject[:diverged_commits_count]).to eq(10)
end
end
context 'when MR is not open' do
it 'returns 0' do
allow(resource).to receive_messages(open?: false)
expect(subject[:diverged_commits_count]).to be_zero
end
end
context 'when MR is not diverging' do
it 'returns 0' do
allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: false)
expect(subject[:diverged_commits_count]).to be_zero
end
end
end
describe 'diff_head_sha' do
before do
allow(resource).to receive(:diff_head_sha) { 'sha' }
end
context 'when diff head commit is empty' do
it 'returns nil' do
allow(resource).to receive(:diff_head_sha) { '' }
expect(subject[:diff_head_sha]).to be_nil
end
end
context 'when diff head commit present' do
it 'returns diff head commit short id' do
expect(subject[:diff_head_sha]).to eq('sha')
end
end
end
describe 'metrics' do
context 'when metrics record exists with merged data' do
before do
resource.mark_as_merged!
resource.metrics.update!(merged_by: user)
end
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from metrics record' do
expect(subject.dig(:metrics, :merged_by, :id))
.to eq(resource.metrics.merged_by_id)
end
end
context 'when metrics record exists with closed data' do
before do
resource.close!
resource.metrics.update!(latest_closed_by: user)
end
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from metrics record' do
expect(subject.dig(:metrics, :closed_by, :id))
.to eq(resource.metrics.latest_closed_by_id)
end
end
context 'when metrics does not exists' do
before do
resource.mark_as_merged!
resource.metrics.destroy!
resource.reload
end
context 'when events exists' do
let!(:closed_event) { create(:event, :closed, project: project, target: resource) }
let!(:merge_event) { create(:event, :merged, project: project, target: resource) }
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from events record' do
expect(subject.dig(:metrics, :merged_by, :id))
.to eq(merge_event.author_id)
expect(subject.dig(:metrics, :closed_by, :id))
.to eq(closed_event.author_id)
expect(subject.dig(:metrics, :merged_at).to_s)
.to eq(merge_event.updated_at.to_s)
expect(subject.dig(:metrics, :closed_at).to_s)
.to eq(closed_event.updated_at.to_s)
end
end
context 'when events does not exists' do
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
end
end
end
describe 'commits_without_merge_commits' do
def find_matching_commit(short_id)
resource.commits.find { |c| c.short_id == short_id }
end
it 'does not include merge commits' do
commits_in_widget = subject[:commits_without_merge_commits]
expect(commits_in_widget.length).to be < resource.commits.length
expect(commits_in_widget.length).to eq(resource.commits.without_merge_commits.length)
commits_in_widget.each do |c|
expect(find_matching_commit(c[:short_id]).merge_commit?).to eq(false)
end
end
end
describe 'auto merge' do
context 'when auto merge is enabled' do
let(:resource) { create(:merge_request, :merge_when_pipeline_succeeds) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_enabled]).to be_truthy
end
end
context 'when auto merge is not enabled' do
let(:resource) { create(:merge_request) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_enabled]).to be_falsy
end
end
end
describe 'attributes for squash commit message' do
context 'when merge request is mergeable' do
before do
stub_const('MergeRequestDiff::COMMITS_SAFE_SIZE', 20)
end
it 'has default_squash_commit_message and commits_without_merge_commits' do
expect(subject[:default_squash_commit_message])
.to eq(resource.default_squash_commit_message)
expect(subject[:commits_without_merge_commits].size).to eq(12)
end
end
context 'when merge request is not mergeable' do
before do
allow(resource).to receive(:mergeable?).and_return(false)
end
it 'does not have default_squash_commit_message and commits_without_merge_commits' do
expect(subject[:default_squash_commit_message]).to eq(nil)
expect(subject[:commits_without_merge_commits]).to eq(nil)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe MergeRequestPollWidgetEntity do
include ProjectForksHelper
let(:project) { create :project, :repository }
let(:resource) { create(:merge_request, source_project: project, target_project: project) }
let(:user) { create(:user) }
let(:request) { double('request', current_user: user, project: project) }
subject do
described_class.new(resource, request: request).as_json
end
it 'has default_merge_commit_message_with_description' do
expect(subject[:default_merge_commit_message_with_description])
.to eq(resource.default_merge_commit_message(include_description: true))
end
describe 'merge_pipeline' do
it 'returns nil' do
expect(subject[:merge_pipeline]).to be_nil
end
context 'when is merged' do
let(:resource) { create(:merged_merge_request, source_project: project, merge_commit_sha: project.commit.id) }
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.target_branch, sha: resource.merge_commit_sha) }
before do
project.add_maintainer(user)
end
it 'returns merge_pipeline' do
pipeline.reload
pipeline_payload = PipelineDetailsEntity
.represent(pipeline, request: request)
.as_json
expect(subject[:merge_pipeline]).to eq(pipeline_payload)
end
context 'when user cannot read pipelines on target project' do
before do
project.add_guest(user)
end
it 'returns nil' do
expect(subject[:merge_pipeline]).to be_nil
end
end
end
end
describe 'new_blob_path' do
context 'when user can push to project' do
it 'returns path' do
project.add_developer(user)
expect(subject[:new_blob_path])
.to eq("/#{resource.project.full_path}/new/#{resource.source_branch}")
end
end
context 'when user cannot push to project' do
it 'returns nil' do
expect(subject[:new_blob_path]).to be_nil
end
end
end
describe 'exposed_artifacts_path' do
context 'when merge request has exposed artifacts' do
before do
expect(resource).to receive(:has_exposed_artifacts?).and_return(true)
end
it 'set the path to poll data' do
expect(subject[:exposed_artifacts_path]).to be_present
end
end
context 'when merge request has no exposed artifacts' do
before do
expect(resource).to receive(:has_exposed_artifacts?).and_return(false)
end
it 'set the path to poll data' do
expect(subject[:exposed_artifacts_path]).to be_nil
end
end
end
describe 'auto merge' do
context 'when auto merge is enabled' do
let(:resource) { create(:merge_request, :merge_when_pipeline_succeeds) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_strategy]).to eq('merge_when_pipeline_succeeds')
end
end
context 'when auto merge is not enabled' do
let(:resource) { create(:merge_request) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_strategy]).to be_nil
end
end
context 'when head pipeline is running' do
before do
create(:ci_pipeline, :running, project: project,
ref: resource.source_branch,
sha: resource.diff_head_sha)
resource.update_head_pipeline
end
it 'returns available auto merge strategies' do
expect(subject[:available_auto_merge_strategies]).to eq(%w[merge_when_pipeline_succeeds])
end
end
context 'when head pipeline is finished' do
before do
create(:ci_pipeline, :success, project: project,
ref: resource.source_branch,
sha: resource.diff_head_sha)
resource.update_head_pipeline
end
it 'returns available auto merge strategies' do
expect(subject[:available_auto_merge_strategies]).to be_empty
end
end
end
describe 'pipeline' do
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.source_branch, sha: resource.source_branch_sha, head_pipeline_of: resource) }
before do
allow_any_instance_of(MergeRequestPresenter).to receive(:can?).and_call_original
allow_any_instance_of(MergeRequestPresenter).to receive(:can?).with(user, :read_pipeline, anything).and_return(result)
end
context 'when user has access to pipelines' do
let(:result) { true }
context 'when is up to date' do
let(:req) { double('request', current_user: user, project: project) }
it 'returns pipeline' do
pipeline_payload = PipelineDetailsEntity
.represent(pipeline, request: req)
.as_json
expect(subject[:pipeline]).to eq(pipeline_payload)
end
end
context 'when is not up to date' do
it 'returns nil' do
pipeline.update(sha: "not up to date")
expect(subject[:pipeline]).to eq(nil)
end
end
end
context 'when user does not have access to pipelines' do
let(:result) { false }
it 'does not have pipeline' do
expect(subject[:pipeline]).to eq(nil)
end
end
end
end
......@@ -15,10 +15,6 @@ describe MergeRequestWidgetEntity do
described_class.new(resource, request: request).as_json
end
it 'has the latest sha of the target branch' do
is_expected.to include(:target_branch_sha)
end
describe 'source_project_full_path' do
it 'includes the full path of the source project' do
expect(subject[:source_project_full_path]).to be_present
......@@ -47,156 +43,6 @@ describe MergeRequestWidgetEntity do
end
end
describe 'pipeline' do
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.source_branch, sha: resource.source_branch_sha, head_pipeline_of: resource) }
before do
allow_any_instance_of(MergeRequestPresenter).to receive(:can?).and_call_original
allow_any_instance_of(MergeRequestPresenter).to receive(:can?).with(user, :read_pipeline, anything).and_return(result)
end
context 'when user has access to pipelines' do
let(:result) { true }
context 'when is up to date' do
let(:req) { double('request', current_user: user, project: project) }
it 'returns pipeline' do
pipeline_payload = PipelineDetailsEntity
.represent(pipeline, request: req)
.as_json
expect(subject[:pipeline]).to eq(pipeline_payload)
end
end
context 'when is not up to date' do
it 'returns nil' do
pipeline.update(sha: "not up to date")
expect(subject[:pipeline]).to eq(nil)
end
end
end
context 'when user does not have access to pipelines' do
let(:result) { false }
it 'does not have pipeline' do
expect(subject[:pipeline]).to eq(nil)
end
end
end
describe 'merge_pipeline' do
it 'returns nil' do
expect(subject[:merge_pipeline]).to be_nil
end
context 'when is merged' do
let(:resource) { create(:merged_merge_request, source_project: project, merge_commit_sha: project.commit.id) }
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.target_branch, sha: resource.merge_commit_sha) }
before do
project.add_maintainer(user)
end
it 'returns merge_pipeline' do
pipeline.reload
pipeline_payload = PipelineDetailsEntity
.represent(pipeline, request: request)
.as_json
expect(subject[:merge_pipeline]).to eq(pipeline_payload)
end
context 'when user cannot read pipelines on target project' do
before do
project.add_guest(user)
end
it 'returns nil' do
expect(subject[:merge_pipeline]).to be_nil
end
end
end
end
describe 'metrics' do
context 'when metrics record exists with merged data' do
before do
resource.mark_as_merged!
resource.metrics.update!(merged_by: user)
end
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from metrics record' do
expect(subject.dig(:metrics, :merged_by, :id))
.to eq(resource.metrics.merged_by_id)
end
end
context 'when metrics record exists with closed data' do
before do
resource.close!
resource.metrics.update!(latest_closed_by: user)
end
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from metrics record' do
expect(subject.dig(:metrics, :closed_by, :id))
.to eq(resource.metrics.latest_closed_by_id)
end
end
context 'when metrics does not exists' do
before do
resource.mark_as_merged!
resource.metrics.destroy!
resource.reload
end
context 'when events exists' do
let!(:closed_event) { create(:event, :closed, project: project, target: resource) }
let!(:merge_event) { create(:event, :merged, project: project, target: resource) }
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from events record' do
expect(subject.dig(:metrics, :merged_by, :id))
.to eq(merge_event.author_id)
expect(subject.dig(:metrics, :closed_by, :id))
.to eq(closed_event.author_id)
expect(subject.dig(:metrics, :merged_at).to_s)
.to eq(merge_event.updated_at.to_s)
expect(subject.dig(:metrics, :closed_at).to_s)
.to eq(closed_event.updated_at.to_s)
end
end
context 'when events does not exists' do
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
end
end
end
it 'has email_patches_path' do
expect(subject[:email_patches_path])
.to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}.patch")
......@@ -207,100 +53,6 @@ describe MergeRequestWidgetEntity do
.to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}.diff")
end
it 'has default_merge_commit_message_with_description' do
expect(subject[:default_merge_commit_message_with_description])
.to eq(resource.default_merge_commit_message(include_description: true))
end
describe 'attributes for squash commit message' do
context 'when merge request is mergeable' do
before do
stub_const('MergeRequestDiff::COMMITS_SAFE_SIZE', 20)
end
it 'has default_squash_commit_message and commits_without_merge_commits' do
expect(subject[:default_squash_commit_message])
.to eq(resource.default_squash_commit_message)
expect(subject[:commits_without_merge_commits].size).to eq(12)
end
end
context 'when merge request is not mergeable' do
before do
allow(resource).to receive(:mergeable?).and_return(false)
end
it 'does not have default_squash_commit_message and commits_without_merge_commits' do
expect(subject[:default_squash_commit_message]).to eq(nil)
expect(subject[:commits_without_merge_commits]).to eq(nil)
end
end
end
describe 'new_blob_path' do
context 'when user can push to project' do
it 'returns path' do
project.add_developer(user)
expect(subject[:new_blob_path])
.to eq("/#{resource.project.full_path}/new/#{resource.source_branch}")
end
end
context 'when user cannot push to project' do
it 'returns nil' do
expect(subject[:new_blob_path]).to be_nil
end
end
end
describe 'diff_head_sha' do
before do
allow(resource).to receive(:diff_head_sha) { 'sha' }
end
context 'when diff head commit is empty' do
it 'returns nil' do
allow(resource).to receive(:diff_head_sha) { '' }
expect(subject[:diff_head_sha]).to be_nil
end
end
context 'when diff head commit present' do
it 'returns diff head commit short id' do
expect(subject[:diff_head_sha]).to eq('sha')
end
end
end
describe 'diverged_commits_count' do
context 'when MR open and its diverging' do
it 'returns diverged commits count' do
allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: true,
diverged_commits_count: 10)
expect(subject[:diverged_commits_count]).to eq(10)
end
end
context 'when MR is not open' do
it 'returns 0' do
allow(resource).to receive_messages(open?: false)
expect(subject[:diverged_commits_count]).to be_zero
end
end
context 'when MR is not diverging' do
it 'returns 0' do
allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: false)
expect(subject[:diverged_commits_count]).to be_zero
end
end
end
describe 'when source project is deleted' do
let(:project) { create(:project, :repository) }
let(:forked_project) { fork_project(project) }
......@@ -316,88 +68,4 @@ describe MergeRequestWidgetEntity do
expect(entity[:rebase_path]).to be_nil
end
end
describe 'commits_without_merge_commits' do
def find_matching_commit(short_id)
resource.commits.find { |c| c.short_id == short_id }
end
it 'does not include merge commits' do
commits_in_widget = subject[:commits_without_merge_commits]
expect(commits_in_widget.length).to be < resource.commits.length
expect(commits_in_widget.length).to eq(resource.commits.without_merge_commits.length)
commits_in_widget.each do |c|
expect(find_matching_commit(c[:short_id]).merge_commit?).to eq(false)
end
end
end
describe 'auto merge' do
context 'when auto merge is enabled' do
let(:resource) { create(:merge_request, :merge_when_pipeline_succeeds) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_enabled]).to be_truthy
expect(subject[:auto_merge_strategy]).to eq('merge_when_pipeline_succeeds')
end
end
context 'when auto merge is not enabled' do
let(:resource) { create(:merge_request) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_enabled]).to be_falsy
expect(subject[:auto_merge_strategy]).to be_nil
end
end
context 'when head pipeline is running' do
before do
create(:ci_pipeline, :running, project: project,
ref: resource.source_branch,
sha: resource.diff_head_sha)
resource.update_head_pipeline
end
it 'returns available auto merge strategies' do
expect(subject[:available_auto_merge_strategies]).to eq(%w[merge_when_pipeline_succeeds])
end
end
context 'when head pipeline is finished' do
before do
create(:ci_pipeline, :success, project: project,
ref: resource.source_branch,
sha: resource.diff_head_sha)
resource.update_head_pipeline
end
it 'returns available auto merge strategies' do
expect(subject[:available_auto_merge_strategies]).to be_empty
end
end
end
describe 'exposed_artifacts_path' do
context 'when merge request has exposed artifacts' do
before do
expect(resource).to receive(:has_exposed_artifacts?).and_return(true)
end
it 'set the path to poll data' do
expect(subject[:exposed_artifacts_path]).to be_present
end
end
context 'when merge request has no exposed artifacts' do
before do
expect(resource).to receive(:has_exposed_artifacts?).and_return(false)
end
it 'set the path to poll data' do
expect(subject[:exposed_artifacts_path]).to be_nil
end
end
end
end
......@@ -10,7 +10,7 @@ describe Ci::CreatePipelineService do
let(:ref_name) { 'refs/heads/master' }
before do
stub_repository_ci_yaml_file(sha: anything)
stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
end
describe '#execute' do
......@@ -510,7 +510,7 @@ describe Ci::CreatePipelineService do
it 'attaches errors to the pipeline' do
pipeline = execute_service
expect(pipeline.errors.full_messages).to eq ['Missing .gitlab-ci.yml file']
expect(pipeline.errors.full_messages).to eq ['Missing CI config file']
expect(pipeline).not_to be_persisted
end
end
......
......@@ -19,24 +19,28 @@ module StubGitlabCalls
end
def stub_ci_pipeline_yaml_file(ci_yaml_content)
allow_any_instance_of(Repository).to receive(:gitlab_ci_yml_for).and_return(ci_yaml_content)
allow_any_instance_of(Repository)
.to receive(:gitlab_ci_yml_for)
.and_return(ci_yaml_content)
# Ensure we don't hit auto-devops when config not found in repository
unless ci_yaml_content
allow_any_instance_of(Project).to receive(:auto_devops_enabled?).and_return(false)
end
# Stub the first call to `include:[local: .gitlab-ci.yml]` when
# evaluating the CI root config content.
if Feature.enabled?(:ci_root_config_content, default_enabled: true)
allow_any_instance_of(Gitlab::Ci::Config::External::File::Local)
.to receive(:content)
.and_return(ci_yaml_content)
end
end
def stub_pipeline_modified_paths(pipeline, modified_paths)
allow(pipeline).to receive(:modified_paths).and_return(modified_paths)
end
def stub_repository_ci_yaml_file(sha:, path: '.gitlab-ci.yml')
allow_any_instance_of(Repository)
.to receive(:gitlab_ci_yml_for).with(sha, path)
.and_return(gitlab_ci_yaml)
end
def stub_ci_builds_disabled
allow_any_instance_of(Project).to receive(:builds_enabled?).and_return(false)
end
......
......@@ -16,7 +16,7 @@ describe 'search/_results' do
it 'displays the page size' do
render
expect(rendered).to have_content('Showing 1 - 2 of 3 issues for "foo"')
expect(rendered).to have_content('Showing 1 - 2 of 3 issues for foo')
end
context 'when search results do not have a count' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment