Commit 8b1228b0 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 96b0c124
...@@ -61,8 +61,10 @@ export const getFileData = ( ...@@ -61,8 +61,10 @@ export const getFileData = (
{ path, makeFileActive = true, openFile = makeFileActive }, { path, makeFileActive = true, openFile = makeFileActive },
) => { ) => {
const file = state.entries[path]; const file = state.entries[path];
const fileDeletedAndReadded = getters.isFileDeletedAndReadded(path);
if (file.raw || (file.tempFile && !file.prevPath)) return Promise.resolve(); if (file.raw || (file.tempFile && !file.prevPath && !fileDeletedAndReadded))
return Promise.resolve();
commit(types.TOGGLE_LOADING, { entry: file }); commit(types.TOGGLE_LOADING, { entry: file });
...@@ -102,11 +104,16 @@ export const setFileMrChange = ({ commit }, { file, mrChange }) => { ...@@ -102,11 +104,16 @@ export const setFileMrChange = ({ commit }, { file, mrChange }) => {
export const getRawFileData = ({ state, commit, dispatch, getters }, { path }) => { export const getRawFileData = ({ state, commit, dispatch, getters }, { path }) => {
const file = state.entries[path]; const file = state.entries[path];
const stagedFile = state.stagedFiles.find(f => f.path === path);
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const fileDeletedAndReadded = getters.isFileDeletedAndReadded(path);
service service
.getRawFileData(file) .getRawFileData(fileDeletedAndReadded ? stagedFile : file)
.then(raw => { .then(raw => {
if (!(file.tempFile && !file.prevPath)) commit(types.SET_FILE_RAW_DATA, { file, raw }); if (!(file.tempFile && !file.prevPath && !fileDeletedAndReadded))
commit(types.SET_FILE_RAW_DATA, { file, raw, fileDeletedAndReadded });
if (file.mrChange && file.mrChange.new_file === false) { if (file.mrChange && file.mrChange.new_file === false) {
const baseSha = const baseSha =
(getters.currentMergeRequest && getters.currentMergeRequest.baseCommitSha) || ''; (getters.currentMergeRequest && getters.currentMergeRequest.baseCommitSha) || '';
...@@ -151,7 +158,7 @@ export const changeFileContent = ({ commit, dispatch, state }, { path, content } ...@@ -151,7 +158,7 @@ export const changeFileContent = ({ commit, dispatch, state }, { path, content }
if (file.changed && indexOfChangedFile === -1) { if (file.changed && indexOfChangedFile === -1) {
commit(types.ADD_FILE_TO_CHANGED, path); commit(types.ADD_FILE_TO_CHANGED, path);
} else if (!file.changed && indexOfChangedFile !== -1) { } else if (!file.changed && !file.tempFile && indexOfChangedFile !== -1) {
commit(types.REMOVE_FILE_FROM_CHANGED, path); commit(types.REMOVE_FILE_FROM_CHANGED, path);
} }
......
...@@ -54,27 +54,29 @@ export default { ...@@ -54,27 +54,29 @@ export default {
} }
}); });
}, },
[types.SET_FILE_RAW_DATA](state, { file, raw }) { [types.SET_FILE_RAW_DATA](state, { file, raw, fileDeletedAndReadded = false }) {
const openPendingFile = state.openFiles.find( const openPendingFile = state.openFiles.find(
f => f.path === file.path && f.pending && !(f.tempFile && !f.prevPath), f =>
f.path === file.path && f.pending && !(f.tempFile && !f.prevPath && !fileDeletedAndReadded),
); );
const stagedFile = state.stagedFiles.find(f => f.path === file.path);
if (file.tempFile && file.content === '') { if (file.tempFile && file.content === '' && !fileDeletedAndReadded) {
Object.assign(state.entries[file.path], { Object.assign(state.entries[file.path], { content: raw });
content: raw, } else if (fileDeletedAndReadded) {
}); Object.assign(stagedFile, { raw });
} else { } else {
Object.assign(state.entries[file.path], { Object.assign(state.entries[file.path], { raw });
raw,
});
} }
if (!openPendingFile) return; if (!openPendingFile) return;
if (!openPendingFile.tempFile) { if (!openPendingFile.tempFile) {
openPendingFile.raw = raw; openPendingFile.raw = raw;
} else if (openPendingFile.tempFile) { } else if (openPendingFile.tempFile && !fileDeletedAndReadded) {
openPendingFile.content = raw; openPendingFile.content = raw;
} else if (fileDeletedAndReadded) {
Object.assign(stagedFile, { raw });
} }
}, },
[types.SET_FILE_BASE_RAW_DATA](state, { file, baseRaw }) { [types.SET_FILE_BASE_RAW_DATA](state, { file, baseRaw }) {
......
const MARKDOWN_EXTENSIONS = ['mdown', 'mkd', 'mkdn', 'md', 'markdown']; const FILENAMES = ['index', 'readme'];
const ASCIIDOC_EXTENSIONS = ['adoc', 'ad', 'asciidoc'];
const OTHER_EXTENSIONS = ['textile', 'rdoc', 'org', 'creole', 'wiki', 'mediawiki', 'rst'];
const EXTENSIONS = [...MARKDOWN_EXTENSIONS, ...ASCIIDOC_EXTENSIONS, ...OTHER_EXTENSIONS];
const PLAIN_FILENAMES = ['readme', 'index'];
const FILE_REGEXP = new RegExp(
`^(${PLAIN_FILENAMES.join('|')})(.(${EXTENSIONS.join('|')}))?$`,
'i',
);
const PLAIN_FILE_REGEXP = new RegExp(`^(${PLAIN_FILENAMES.join('|')})`, 'i');
const EXTENSIONS_REGEXP = new RegExp(`.(${EXTENSIONS.join('|')})$`, 'i');
// eslint-disable-next-line import/prefer-default-export const MARKUP_EXTENSIONS = [
export const readmeFile = blobs => { 'ad',
const readMeFiles = blobs.filter(f => f.name.search(FILE_REGEXP) !== -1); 'adoc',
'asciidoc',
'creole',
'markdown',
'md',
'mdown',
'mediawiki',
'mkd',
'mkdn',
'org',
'rdoc',
'rst',
'textile',
'wiki',
];
const previewableReadme = readMeFiles.find(f => f.name.search(EXTENSIONS_REGEXP) !== -1); const isRichReadme = file => {
const plainReadme = readMeFiles.find(f => f.name.search(PLAIN_FILE_REGEXP) !== -1); const re = new RegExp(`^(${FILENAMES.join('|')})\\.(${MARKUP_EXTENSIONS.join('|')})$`, 'i');
return re.test(file.name);
};
return previewableReadme || plainReadme; const isPlainReadme = file => {
const re = new RegExp(`^(${FILENAMES.join('|')})$`, 'i');
return re.test(file.name);
}; };
// eslint-disable-next-line import/prefer-default-export
export const readmeFile = blobs => blobs.find(isRichReadme) || blobs.find(isPlainReadme);
...@@ -4,7 +4,6 @@ class ProjectCiCdSetting < ApplicationRecord ...@@ -4,7 +4,6 @@ class ProjectCiCdSetting < ApplicationRecord
include IgnorableColumns include IgnorableColumns
# https://gitlab.com/gitlab-org/gitlab/issues/36651 # https://gitlab.com/gitlab-org/gitlab/issues/36651
ignore_column :merge_trains_enabled, remove_with: '12.7', remove_after: '2019-12-22' ignore_column :merge_trains_enabled, remove_with: '12.7', remove_after: '2019-12-22'
belongs_to :project, inverse_of: :ci_cd_settings belongs_to :project, inverse_of: :ci_cd_settings
# The version of the schema that first introduced this model/table. # The version of the schema that first introduced this model/table.
......
# frozen_string_literal: true # frozen_string_literal: true
# SpamCheckService # SpamCheckMethods
# #
# Provide helper methods for checking if a given spammable object has # Provide helper methods for checking if a given spammable object has
# potential spam data. # potential spam data.
# #
# Dependencies: # Dependencies:
# - params with :request # - params with :request
#
module SpamCheckService module SpamCheckMethods
# rubocop:disable Gitlab/ModuleWithInstanceVariables # rubocop:disable Gitlab/ModuleWithInstanceVariables
def filter_spam_check_params def filter_spam_check_params
@request = params.delete(:request) @request = params.delete(:request)
......
# frozen_string_literal: true # frozen_string_literal: true
class CreateSnippetService < BaseService class CreateSnippetService < BaseService
include SpamCheckService include SpamCheckMethods
def execute def execute
filter_spam_check_params filter_spam_check_params
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
module Issues module Issues
class CreateService < Issues::BaseService class CreateService < Issues::BaseService
include SpamCheckService include SpamCheckMethods
include ResolveDiscussions include ResolveDiscussions
def execute def execute
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
module Issues module Issues
class UpdateService < Issues::BaseService class UpdateService < Issues::BaseService
include SpamCheckService include SpamCheckMethods
def execute(issue) def execute(issue)
handle_move_between_ids(issue) handle_move_between_ids(issue)
......
# frozen_string_literal: true # frozen_string_literal: true
class UpdateSnippetService < BaseService class UpdateSnippetService < BaseService
include SpamCheckService include SpamCheckMethods
attr_accessor :snippet attr_accessor :snippet
......
---
title: "Web IDE: Fix Incorrect diff of deletion and addition of the same file"
merge_request: 21680
author:
type: fixed
# frozen_string_literal: true
class DropProjectCiCdSettingsMergeTrainsEnabled < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
remove_column :project_ci_cd_settings, :merge_trains_enabled
end
def down
add_column_with_default :project_ci_cd_settings, :merge_trains_enabled, :boolean, default: false, allow_null: true
end
end
...@@ -3098,7 +3098,6 @@ ActiveRecord::Schema.define(version: 2020_01_08_155731) do ...@@ -3098,7 +3098,6 @@ ActiveRecord::Schema.define(version: 2020_01_08_155731) do
t.integer "project_id", null: false t.integer "project_id", null: false
t.boolean "group_runners_enabled", default: true, null: false t.boolean "group_runners_enabled", default: true, null: false
t.boolean "merge_pipelines_enabled" t.boolean "merge_pipelines_enabled"
t.boolean "merge_trains_enabled", default: false, null: false
t.integer "default_git_depth" t.integer "default_git_depth"
t.index ["project_id"], name: "index_project_ci_cd_settings_on_project_id", unique: true t.index ["project_id"], name: "index_project_ci_cd_settings_on_project_id", unique: true
end end
......
...@@ -32,11 +32,11 @@ In the case of [custom domains](#custom-domains) (but not ...@@ -32,11 +32,11 @@ In the case of [custom domains](#custom-domains) (but not
ports `80` and/or `443`. For that reason, there is some flexibility in the way ports `80` and/or `443`. For that reason, there is some flexibility in the way
which you can set it up: which you can set it up:
1. Run the Pages daemon in the same server as GitLab, listening on a secondary IP. - Run the Pages daemon in the same server as GitLab, listening on a **secondary IP**.
1. Run the Pages daemon in a separate server. In that case, the - Run the Pages daemon in a [separate server](#running-gitlab-pages-on-a-separate-server). In that case, the
[Pages path](#change-storage-path) must also be present in the server that [Pages path](#change-storage-path) must also be present in the server that
the Pages daemon is installed, so you will have to share it via network. the Pages daemon is installed, so you will have to share it via network.
1. Run the Pages daemon in the same server as GitLab, listening on the same IP - Run the Pages daemon in the same server as GitLab, listening on the same IP
but on different ports. In that case, you will have to proxy the traffic with but on different ports. In that case, you will have to proxy the traffic with
a loadbalancer. If you choose that route note that you should use TCP load a loadbalancer. If you choose that route note that you should use TCP load
balancing for HTTPS. If you use TLS-termination (HTTPS-load balancing) the balancing for HTTPS. If you use TLS-termination (HTTPS-load balancing) the
...@@ -182,7 +182,7 @@ The [GitLab Pages README](https://gitlab.com/gitlab-org/gitlab-pages#caveats) ha ...@@ -182,7 +182,7 @@ The [GitLab Pages README](https://gitlab.com/gitlab-org/gitlab-pages#caveats) ha
In addition to the wildcard domains, you can also have the option to configure In addition to the wildcard domains, you can also have the option to configure
GitLab Pages to work with custom domains. Again, there are two options here: GitLab Pages to work with custom domains. Again, there are two options here:
support custom domains with and without TLS certificates. The easiest setup is support custom domains with and without TLS certificates. The easiest setup is
that without TLS certificates. In either case, you'll need a secondary IP. If that without TLS certificates. In either case, you'll need a **secondary IP**. If
you have IPv6 as well as IPv4 addresses, you can use them both. you have IPv6 as well as IPv4 addresses, you can use them both.
### Custom domains ### Custom domains
......
...@@ -426,7 +426,7 @@ Status: 200 OK ...@@ -426,7 +426,7 @@ Status: 200 OK
The link to the next page contains an additional filter `id_after=42` which excludes records we have retrieved already. The link to the next page contains an additional filter `id_after=42` which excludes records we have retrieved already.
Note the type of filter depends on the `order_by` option used and we may have more than one additional filter. Note the type of filter depends on the `order_by` option used and we may have more than one additional filter.
The `Link` header is absent when the end of the collection has been reached and there are no additional records to retrieve. When the end of the collection has been reached and there are no additional records to retrieve, the `Link` header is absent and the resulting array is empty.
We recommend using only the given link to retrieve the next page instead of building your own URL. Apart from the headers shown, We recommend using only the given link to retrieve the next page instead of building your own URL. Apart from the headers shown,
we don't expose additional pagination headers. we don't expose additional pagination headers.
......
...@@ -4,6 +4,7 @@ module API ...@@ -4,6 +4,7 @@ module API
module Helpers module Helpers
include Gitlab::Utils include Gitlab::Utils
include Helpers::Pagination include Helpers::Pagination
include Helpers::PaginationStrategies
SUDO_HEADER = "HTTP_SUDO" SUDO_HEADER = "HTTP_SUDO"
GITLAB_SHARED_SECRET_HEADER = "Gitlab-Shared-Secret" GITLAB_SHARED_SECRET_HEADER = "Gitlab-Shared-Secret"
......
...@@ -3,34 +3,9 @@ ...@@ -3,34 +3,9 @@
module API module API
module Helpers module Helpers
module Pagination module Pagination
# This returns an ActiveRecord relation
def paginate(relation) def paginate(relation)
Gitlab::Pagination::OffsetPagination.new(self).paginate(relation) Gitlab::Pagination::OffsetPagination.new(self).paginate(relation)
end end
# This applies pagination and executes the query
# It always returns an array instead of an ActiveRecord relation
def paginate_and_retrieve!(relation)
offset_or_keyset_pagination(relation).to_a
end
private
def offset_or_keyset_pagination(relation)
return paginate(relation) unless keyset_pagination_enabled?
request_context = Gitlab::Pagination::Keyset::RequestContext.new(self)
unless Gitlab::Pagination::Keyset.available?(request_context, relation)
return error!('Keyset pagination is not yet available for this type of request', 405)
end
Gitlab::Pagination::Keyset.paginate(request_context, relation)
end
def keyset_pagination_enabled?
params[:pagination] == 'keyset' && Feature.enabled?(:api_keyset_pagination, default_enabled: true)
end
end end
end end
end end
# frozen_string_literal: true
module API
module Helpers
module PaginationStrategies
def paginate_with_strategies(relation)
paginator = paginator(relation)
yield(paginator.paginate(relation)).tap do |records, _|
paginator.finalize(records)
end
end
def paginator(relation)
return Gitlab::Pagination::OffsetPagination.new(self) unless keyset_pagination_enabled?
request_context = Gitlab::Pagination::Keyset::RequestContext.new(self)
unless Gitlab::Pagination::Keyset.available?(request_context, relation)
return error!('Keyset pagination is not yet available for this type of request', 405)
end
Gitlab::Pagination::Keyset::Pager.new(request_context)
end
private
def keyset_pagination_enabled?
params[:pagination] == 'keyset' && Feature.enabled?(:api_keyset_pagination, default_enabled: true)
end
end
end
end
...@@ -90,7 +90,8 @@ module API ...@@ -90,7 +90,8 @@ module API
def present_projects(projects, options = {}) def present_projects(projects, options = {})
projects = reorder_projects(projects) projects = reorder_projects(projects)
projects = apply_filters(projects) projects = apply_filters(projects)
projects = paginate(projects)
records, options = paginate_with_strategies(projects) do |projects|
projects, options = with_custom_attributes(projects, options) projects, options = with_custom_attributes(projects, options)
options = options.reverse_merge( options = options.reverse_merge(
...@@ -101,7 +102,10 @@ module API ...@@ -101,7 +102,10 @@ module API
) )
options[:with] = Entities::BasicProjectDetails if params[:simple] options[:with] = Entities::BasicProjectDetails if params[:simple]
present options[:with].prepare_relation(projects, options), options [options[:with].prepare_relation(projects, options), options]
end
present records, options
end end
def translate_params_for_compatibility(params) def translate_params_for_compatibility(params)
......
...@@ -17,7 +17,7 @@ module Gitlab ...@@ -17,7 +17,7 @@ module Gitlab
allow_failure type stage when start_in artifacts cache allow_failure type stage when start_in artifacts cache
dependencies before_script needs after_script variables dependencies before_script needs after_script variables
environment coverage retry parallel extends interruptible timeout environment coverage retry parallel extends interruptible timeout
resource_group].freeze resource_group release].freeze
REQUIRED_BY_NEEDS = %i[stage].freeze REQUIRED_BY_NEEDS = %i[stage].freeze
...@@ -151,14 +151,18 @@ module Gitlab ...@@ -151,14 +151,18 @@ module Gitlab
description: 'Coverage configuration for this job.', description: 'Coverage configuration for this job.',
inherit: false inherit: false
entry :release, Entry::Release,
description: 'This job will produce a release.',
inherit: false
helpers :before_script, :script, :stage, :type, :after_script, helpers :before_script, :script, :stage, :type, :after_script,
:cache, :image, :services, :only, :except, :variables, :cache, :image, :services, :only, :except, :variables,
:artifacts, :environment, :coverage, :retry, :rules, :artifacts, :environment, :coverage, :retry, :rules,
:parallel, :needs, :interruptible :parallel, :needs, :interruptible, :release
attributes :script, :tags, :allow_failure, :when, :dependencies, attributes :script, :tags, :allow_failure, :when, :dependencies,
:needs, :retry, :parallel, :extends, :start_in, :rules, :needs, :retry, :parallel, :extends, :start_in, :rules,
:interruptible, :timeout, :resource_group :interruptible, :timeout, :resource_group, :release
def self.matching?(name, config) def self.matching?(name, config)
!name.to_s.start_with?('.') && !name.to_s.start_with?('.') &&
...@@ -243,6 +247,7 @@ module Gitlab ...@@ -243,6 +247,7 @@ module Gitlab
interruptible: interruptible_defined? ? interruptible_value : nil, interruptible: interruptible_defined? ? interruptible_value : nil,
timeout: has_timeout? ? ChronicDuration.parse(timeout.to_s) : nil, timeout: has_timeout? ? ChronicDuration.parse(timeout.to_s) : nil,
artifacts: artifacts_value, artifacts: artifacts_value,
release: release_value,
after_script: after_script_value, after_script: after_script_value,
ignore: ignored?, ignore: ignored?,
needs: needs_defined? ? needs_value : nil, needs: needs_defined? ? needs_value : nil,
......
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a release configuration.
#
class Release < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[tag_name name description assets].freeze
attributes %i[tag_name name assets].freeze
# Attributable description conflicts with
# ::Gitlab::Config::Entry::Node.description
def has_description?
true
end
def description
config[:description]
end
entry :assets, Entry::Release::Assets, description: 'Release assets.'
validations do
validates :config, allowed_keys: ALLOWED_KEYS
validates :tag_name, presence: true
validates :description, type: String, presence: true
end
helpers :assets
def value
@config[:assets] = assets_value if @config.key?(:assets)
@config
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a configuration of release assets.
#
class Release
class Assets < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[links].freeze
attributes ALLOWED_KEYS
entry :links, Entry::Release::Assets::Links, description: 'Release assets:links.'
validations do
validates :config, allowed_keys: ALLOWED_KEYS
validates :links, array_of_hashes: true, presence: true
end
helpers :links
def value
@config[:links] = links_value if @config.key?(:links)
@config
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a configuration of release:assets:links.
#
class Release
class Assets
class Link < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[name url].freeze
attributes ALLOWED_KEYS
validations do
validates :config, allowed_keys: ALLOWED_KEYS
validates :name, type: String, presence: true
validates :url, presence: true, addressable_url: true
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a configuration of release:assets:links.
#
class Release
class Assets
class Links < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Validatable
entry :link, Entry::Release::Assets::Link, description: 'Release assets:links:link.'
validations do
validates :config, type: Array, presence: true
end
def skip_config_hash_validation?
true
end
end
end
end
end
end
end
end
...@@ -81,10 +81,15 @@ module Gitlab ...@@ -81,10 +81,15 @@ module Gitlab
instance: job[:instance], instance: job[:instance],
start_in: job[:start_in], start_in: job[:start_in],
trigger: job[:trigger], trigger: job[:trigger],
bridge_needs: job.dig(:needs, :bridge)&.first bridge_needs: job.dig(:needs, :bridge)&.first,
release: release(job)
}.compact }.compact }.compact }.compact
end end
def release(job)
job[:release] if Feature.enabled?(:ci_release_generation, default_enabled: false)
end
def stage_builds_attributes(stage) def stage_builds_attributes(stage)
@jobs.values @jobs.values
.select { |job| job[:stage] == stage } .select { |job| job[:stage] == stage }
...@@ -133,7 +138,6 @@ module Gitlab ...@@ -133,7 +138,6 @@ module Gitlab
@jobs.each do |name, job| @jobs.each do |name, job|
# logical validation for job # logical validation for job
validate_job_stage!(name, job) validate_job_stage!(name, job)
validate_job_dependencies!(name, job) validate_job_dependencies!(name, job)
validate_job_needs!(name, job) validate_job_needs!(name, job)
......
...@@ -10,7 +10,7 @@ module Gitlab ...@@ -10,7 +10,7 @@ module Gitlab
def attributes(*attributes) def attributes(*attributes)
attributes.flatten.each do |attribute| attributes.flatten.each do |attribute|
if method_defined?(attribute) if method_defined?(attribute)
raise ArgumentError, 'Method already defined!' raise ArgumentError, "Method already defined: #{attribute}"
end end
define_method(attribute) do define_method(attribute) do
......
...@@ -5,7 +5,7 @@ module Gitlab ...@@ -5,7 +5,7 @@ module Gitlab
module Entry module Entry
## ##
# This mixin is responsible for adding DSL, which purpose is to # This mixin is responsible for adding DSL, which purpose is to
# simplifly process of adding child nodes. # simplify the process of adding child nodes.
# #
# This can be used only if parent node is a configuration entry that # This can be used only if parent node is a configuration entry that
# holds a hash as a configuration value, for example: # holds a hash as a configuration value, for example:
......
...@@ -8,9 +8,17 @@ module Gitlab ...@@ -8,9 +8,17 @@ module Gitlab
feature = Feature.get(feature_key) feature = Feature.get(feature_key)
return feature.enabled? if Feature.persisted?(feature) return feature.enabled? if Feature.persisted?(feature)
# Disable Rugged auto-detect(can_use_disk?) when Puma threads>1
# https://gitlab.com/gitlab-org/gitlab/issues/119326
return false if running_puma_with_multiple_threads?
Gitlab::GitalyClient.can_use_disk?(repo.storage) Gitlab::GitalyClient.can_use_disk?(repo.storage)
end end
def running_puma_with_multiple_threads?
Gitlab::Runtime.puma? && ::Puma.cli_config.options[:max_threads] > 1
end
def execute_rugged_call(method_name, *args) def execute_rugged_call(method_name, *args)
Gitlab::GitalyClient::StorageSettings.allow_disk_access do Gitlab::GitalyClient::StorageSettings.allow_disk_access do
start = Gitlab::Metrics::System.monotonic_time start = Gitlab::Metrics::System.monotonic_time
......
...@@ -3,6 +3,14 @@ ...@@ -3,6 +3,14 @@
module Gitlab module Gitlab
module Pagination module Pagination
class Base class Base
def paginate(relation)
raise NotImplementedError
end
def finalize(records)
# Optional: Called with the actual set of records
end
private private
def per_page def per_page
......
...@@ -3,10 +3,6 @@ ...@@ -3,10 +3,6 @@
module Gitlab module Gitlab
module Pagination module Pagination
module Keyset module Keyset
def self.paginate(request_context, relation)
Gitlab::Pagination::Keyset::Pager.new(request_context).paginate(relation)
end
def self.available?(request_context, relation) def self.available?(request_context, relation)
order_by = request_context.page.order_by order_by = request_context.page.order_by
......
...@@ -11,14 +11,13 @@ module Gitlab ...@@ -11,14 +11,13 @@ module Gitlab
# Maximum number of records for a page # Maximum number of records for a page
MAXIMUM_PAGE_SIZE = 100 MAXIMUM_PAGE_SIZE = 100
attr_accessor :lower_bounds, :end_reached attr_accessor :lower_bounds
attr_reader :order_by attr_reader :order_by
def initialize(order_by: {}, lower_bounds: nil, per_page: DEFAULT_PAGE_SIZE, end_reached: false) def initialize(order_by: {}, lower_bounds: nil, per_page: DEFAULT_PAGE_SIZE)
@order_by = order_by.symbolize_keys @order_by = order_by.symbolize_keys
@lower_bounds = lower_bounds&.symbolize_keys @lower_bounds = lower_bounds&.symbolize_keys
@per_page = per_page @per_page = per_page
@end_reached = end_reached
end end
# Number of records to return per page # Number of records to return per page
...@@ -28,17 +27,11 @@ module Gitlab ...@@ -28,17 +27,11 @@ module Gitlab
[@per_page, MAXIMUM_PAGE_SIZE].min [@per_page, MAXIMUM_PAGE_SIZE].min
end end
# Determine whether this page indicates the end of the collection
def end_reached?
@end_reached
end
# Construct a Page for the next page # Construct a Page for the next page
# Uses identical order_by/per_page information for the next page # Uses identical order_by/per_page information for the next page
def next(lower_bounds, end_reached) def next(lower_bounds)
dup.tap do |next_page| dup.tap do |next_page|
next_page.lower_bounds = lower_bounds&.symbolize_keys next_page.lower_bounds = lower_bounds&.symbolize_keys
next_page.end_reached = end_reached
end end
end end
end end
......
...@@ -14,27 +14,20 @@ module Gitlab ...@@ -14,27 +14,20 @@ module Gitlab
# Validate assumption: The last two columns must match the page order_by # Validate assumption: The last two columns must match the page order_by
validate_order!(relation) validate_order!(relation)
# This performs the database query and retrieves records relation.limit(page.per_page) # rubocop: disable CodeReuse/ActiveRecord
# We retrieve one record more to check if we have data beyond this page end
all_records = relation.limit(page.per_page + 1).to_a # rubocop: disable CodeReuse/ActiveRecord
records_for_page = all_records.first(page.per_page)
# If we retrieved more records than belong on this page,
# we know there's a next page
there_is_more = all_records.size > records_for_page.size
apply_headers(records_for_page.last, there_is_more)
records_for_page def finalize(records)
apply_headers(records.last)
end end
private private
def apply_headers(last_record_in_page, there_is_more) def apply_headers(last_record_in_page)
end_reached = last_record_in_page.nil? || !there_is_more return unless last_record_in_page
lower_bounds = last_record_in_page&.slice(page.order_by.keys)
next_page = page.next(lower_bounds, end_reached) lower_bounds = last_record_in_page&.slice(page.order_by.keys)
next_page = page.next(lower_bounds)
request.apply_headers(next_page) request.apply_headers(next_page)
end end
......
...@@ -68,8 +68,6 @@ module Gitlab ...@@ -68,8 +68,6 @@ module Gitlab
end end
def pagination_links(next_page) def pagination_links(next_page)
return if next_page.end_reached?
%(<#{page_href(next_page)}>; rel="next") %(<#{page_href(next_page)}>; rel="next")
end end
......
...@@ -21,6 +21,8 @@ module QA ...@@ -21,6 +21,8 @@ module QA
delete delete_project_request.url delete delete_project_request.url
expect_status(202) expect_status(202)
Page::Main::Menu.perform(&:sign_out_if_signed_in)
end end
it 'user imports a GitHub repo' do it 'user imports a GitHub repo' do
......
...@@ -202,6 +202,53 @@ describe('IDE store file actions', () => { ...@@ -202,6 +202,53 @@ describe('IDE store file actions', () => {
}; };
}); });
describe('call to service', () => {
const callExpectation = serviceCalled => {
store.dispatch('getFileData', { path: localFile.path });
if (serviceCalled) {
expect(service.getFileData).toHaveBeenCalled();
} else {
expect(service.getFileData).not.toHaveBeenCalled();
}
};
beforeEach(() => {
service.getFileData.mockImplementation(() => new Promise(() => {}));
});
it("isn't called if file.raw exists", () => {
localFile.raw = 'raw data';
callExpectation(false);
});
it("isn't called if file is a tempFile", () => {
localFile.raw = '';
localFile.tempFile = true;
callExpectation(false);
});
it('is called if file is a tempFile but also renamed', () => {
localFile.raw = '';
localFile.tempFile = true;
localFile.prevPath = 'old_path';
callExpectation(true);
});
it('is called if tempFile but file was deleted and readded', () => {
localFile.raw = '';
localFile.tempFile = true;
localFile.prevPath = 'old_path';
store.state.stagedFiles = [{ ...localFile, deleted: true }];
callExpectation(true);
});
});
describe('success', () => { describe('success', () => {
beforeEach(() => { beforeEach(() => {
mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).replyOnce( mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).replyOnce(
...@@ -332,10 +379,10 @@ describe('IDE store file actions', () => { ...@@ -332,10 +379,10 @@ describe('IDE store file actions', () => {
mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).networkError(); mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).networkError();
}); });
it('dispatches error action', done => { it('dispatches error action', () => {
const dispatch = jest.fn(); const dispatch = jest.fn();
actions return actions
.getFileData( .getFileData(
{ state: store.state, commit() {}, dispatch, getters: store.getters }, { state: store.state, commit() {}, dispatch, getters: store.getters },
{ path: localFile.path }, { path: localFile.path },
...@@ -350,10 +397,7 @@ describe('IDE store file actions', () => { ...@@ -350,10 +397,7 @@ describe('IDE store file actions', () => {
makeFileActive: true, makeFileActive: true,
}, },
}); });
});
done();
})
.catch(done.fail);
}); });
}); });
}); });
...@@ -446,12 +490,14 @@ describe('IDE store file actions', () => { ...@@ -446,12 +490,14 @@ describe('IDE store file actions', () => {
mock.onGet(/(.*)/).networkError(); mock.onGet(/(.*)/).networkError();
}); });
it('dispatches error action', done => { it('dispatches error action', () => {
const dispatch = jest.fn(); const dispatch = jest.fn();
actions return actions
.getRawFileData({ state: store.state, commit() {}, dispatch }, { path: tmpFile.path }) .getRawFileData(
.then(done.fail) { state: store.state, commit() {}, dispatch, getters: store.getters },
{ path: tmpFile.path },
)
.catch(() => { .catch(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', { expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
text: 'An error occurred whilst loading the file content.', text: 'An error occurred whilst loading the file content.',
...@@ -461,8 +507,6 @@ describe('IDE store file actions', () => { ...@@ -461,8 +507,6 @@ describe('IDE store file actions', () => {
path: tmpFile.path, path: tmpFile.path,
}, },
}); });
done();
}); });
}); });
}); });
......
...@@ -11,7 +11,7 @@ describe('IDE store file mutations', () => { ...@@ -11,7 +11,7 @@ describe('IDE store file mutations', () => {
beforeEach(() => { beforeEach(() => {
localStore = createStore(); localStore = createStore();
localState = localStore.state; localState = localStore.state;
localFile = { ...file(), type: 'blob' }; localFile = { ...file('file'), type: 'blob', content: 'original' };
localState.entries[localFile.path] = localFile; localState.entries[localFile.path] = localFile;
}); });
...@@ -139,35 +139,68 @@ describe('IDE store file mutations', () => { ...@@ -139,35 +139,68 @@ describe('IDE store file mutations', () => {
}); });
describe('SET_FILE_RAW_DATA', () => { describe('SET_FILE_RAW_DATA', () => {
it('sets raw data', () => { const callMutationForFile = f => {
mutations.SET_FILE_RAW_DATA(localState, { mutations.SET_FILE_RAW_DATA(localState, {
file: localFile, file: f,
raw: 'testing', raw: 'testing',
fileDeletedAndReadded: localStore.getters.isFileDeletedAndReadded(localFile.path),
}); });
};
it('sets raw data', () => {
callMutationForFile(localFile);
expect(localFile.raw).toBe('testing'); expect(localFile.raw).toBe('testing');
}); });
it('sets raw data to stagedFile if file was deleted and readded', () => {
localState.stagedFiles = [{ ...localFile, deleted: true }];
localFile.tempFile = true;
callMutationForFile(localFile);
expect(localFile.raw).toBeFalsy();
expect(localState.stagedFiles[0].raw).toBe('testing');
});
it("sets raw data to a file's content if tempFile is empty", () => {
localFile.tempFile = true;
localFile.content = '';
callMutationForFile(localFile);
expect(localFile.raw).toBeFalsy();
expect(localFile.content).toBe('testing');
});
it('adds raw data to open pending file', () => { it('adds raw data to open pending file', () => {
localState.openFiles.push({ ...localFile, pending: true }); localState.openFiles.push({ ...localFile, pending: true });
mutations.SET_FILE_RAW_DATA(localState, { callMutationForFile(localFile);
file: localFile,
raw: 'testing',
});
expect(localState.openFiles[0].raw).toBe('testing'); expect(localState.openFiles[0].raw).toBe('testing');
}); });
it('does not add raw data to open pending tempFile file', () => { it('sets raw to content of a renamed tempFile', () => {
localState.openFiles.push({ ...localFile, pending: true, tempFile: true }); localFile.tempFile = true;
localFile.prevPath = 'old_path';
localState.openFiles.push({ ...localFile, pending: true });
mutations.SET_FILE_RAW_DATA(localState, { callMutationForFile(localFile);
file: localFile,
raw: 'testing',
});
expect(localState.openFiles[0].raw).not.toBe('testing'); expect(localState.openFiles[0].raw).not.toBe('testing');
expect(localState.openFiles[0].content).toBe('testing');
});
it('adds raw data to a staged deleted file if unstaged change has a tempFile of the same name', () => {
localFile.tempFile = true;
localState.openFiles.push({ ...localFile, pending: true });
localState.stagedFiles = [{ ...localFile, deleted: true }];
callMutationForFile(localFile);
expect(localFile.raw).toBeFalsy();
expect(localState.stagedFiles[0].raw).toBe('testing');
}); });
}); });
......
import { readmeFile } from '~/repository/utils/readme'; import { readmeFile } from '~/repository/utils/readme';
describe('readmeFile', () => { describe('readmeFile', () => {
describe('markdown files', () => { it('prefers README with markup over plain text README', () => {
it('returns markdown file', () => {
expect(readmeFile([{ name: 'README' }, { name: 'README.md' }])).toEqual({ expect(readmeFile([{ name: 'README' }, { name: 'README.md' }])).toEqual({
name: 'README.md', name: 'README.md',
}); });
expect(readmeFile([{ name: 'README' }, { name: 'index.md' }])).toEqual({
name: 'index.md',
}); });
it('is case insensitive', () => {
expect(readmeFile([{ name: 'README' }, { name: 'readme.rdoc' }])).toEqual({
name: 'readme.rdoc',
}); });
}); });
describe('plain files', () => { it('returns the first README found', () => {
it('returns plain file', () => { expect(readmeFile([{ name: 'INDEX.adoc' }, { name: 'README.md' }])).toEqual({
expect(readmeFile([{ name: 'README' }, { name: 'TEST.md' }])).toEqual({ name: 'INDEX.adoc',
name: 'README',
}); });
expect(readmeFile([{ name: 'readme' }, { name: 'TEST.md' }])).toEqual({
name: 'readme',
}); });
it('expects extension to be separated by dot', () => {
expect(readmeFile([{ name: 'readmeXorg' }, { name: 'index.org' }])).toEqual({
name: 'index.org',
}); });
}); });
describe('non-previewable file', () => { it('returns plain text README when there is no README with markup', () => {
it('returns undefined', () => { expect(readmeFile([{ name: 'README' }, { name: 'NOT_README.md' }])).toEqual({
expect(readmeFile([{ name: 'index.js' }, { name: 'TEST.md' }])).toBe(undefined); name: 'README',
}); });
}); });
it('returns undefined when there are no appropriate files', () => {
expect(readmeFile([{ name: 'index.js' }, { name: 'md.README' }])).toBe(undefined);
expect(readmeFile([])).toBe(undefined);
});
}); });
...@@ -5,70 +5,14 @@ require 'spec_helper' ...@@ -5,70 +5,14 @@ require 'spec_helper'
describe API::Helpers::Pagination do describe API::Helpers::Pagination do
subject { Class.new.include(described_class).new } subject { Class.new.include(described_class).new }
let(:expected_result) { double("result", to_a: double) } let(:paginator) { double('paginator') }
let(:relation) { double("relation") } let(:relation) { double('relation') }
let(:params) { {} } let(:expected_result) { double('expected result') }
before do
allow(subject).to receive(:params).and_return(params)
end
describe '#paginate' do
let(:offset_pagination) { double("offset pagination") }
it 'delegates to OffsetPagination' do it 'delegates to OffsetPagination' do
expect(::Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(offset_pagination) expect(Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(paginator)
expect(offset_pagination).to receive(:paginate).with(relation).and_return(expected_result) expect(paginator).to receive(:paginate).with(relation).and_return(expected_result)
result = subject.paginate(relation)
expect(result).to eq(expected_result)
end
end
describe '#paginate_and_retrieve!' do
context 'for offset pagination' do
before do
allow(Gitlab::Pagination::Keyset).to receive(:available?).and_return(false)
end
it 'delegates to paginate' do
expect(subject).to receive(:paginate).with(relation).and_return(expected_result)
result = subject.paginate_and_retrieve!(relation)
expect(result).to eq(expected_result.to_a)
end
end
context 'for keyset pagination' do
let(:params) { { pagination: 'keyset' } }
let(:request_context) { double('request context') }
before do
allow(Gitlab::Pagination::Keyset::RequestContext).to receive(:new).with(subject).and_return(request_context)
end
context 'when keyset pagination is available' do expect(subject.paginate(relation)).to eq(expected_result)
it 'delegates to KeysetPagination' do
expect(Gitlab::Pagination::Keyset).to receive(:available?).and_return(true)
expect(Gitlab::Pagination::Keyset).to receive(:paginate).with(request_context, relation).and_return(expected_result)
result = subject.paginate_and_retrieve!(relation)
expect(result).to eq(expected_result.to_a)
end
end
context 'when keyset pagination is not available' do
it 'renders a 501 error if keyset pagination isnt available yet' do
expect(Gitlab::Pagination::Keyset).to receive(:available?).with(request_context, relation).and_return(false)
expect(Gitlab::Pagination::Keyset).not_to receive(:paginate)
expect(subject).to receive(:error!).with(/not yet available/, 405)
subject.paginate_and_retrieve!(relation)
end
end
end
end end
end end
# frozen_string_literal: true
require 'spec_helper'
describe API::Helpers::PaginationStrategies do
subject { Class.new.include(described_class).new }
let(:expected_result) { double("result") }
let(:relation) { double("relation") }
let(:params) { {} }
before do
allow(subject).to receive(:params).and_return(params)
end
describe '#paginate_with_strategies' do
let(:paginator) { double("paginator", paginate: expected_result, finalize: nil) }
before do
allow(subject).to receive(:paginator).with(relation).and_return(paginator)
end
it 'yields paginated relation' do
expect { |b| subject.paginate_with_strategies(relation, &b) }.to yield_with_args(expected_result)
end
it 'calls #finalize with first value returned from block' do
return_value = double
expect(paginator).to receive(:finalize).with(return_value)
subject.paginate_with_strategies(relation) do |records|
some_options = {}
[return_value, some_options]
end
end
it 'returns whatever the block returns' do
return_value = [double, double]
result = subject.paginate_with_strategies(relation) do |records|
return_value
end
expect(result).to eq(return_value)
end
end
describe '#paginator' do
context 'offset pagination' do
let(:paginator) { double("paginator") }
before do
allow(subject).to receive(:keyset_pagination_enabled?).and_return(false)
end
it 'delegates to OffsetPagination' do
expect(Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(paginator)
expect(subject.paginator(relation)).to eq(paginator)
end
end
context 'for keyset pagination' do
let(:params) { { pagination: 'keyset' } }
let(:request_context) { double('request context') }
let(:pager) { double('pager') }
before do
allow(subject).to receive(:keyset_pagination_enabled?).and_return(true)
allow(Gitlab::Pagination::Keyset::RequestContext).to receive(:new).with(subject).and_return(request_context)
end
context 'when keyset pagination is available' do
before do
allow(Gitlab::Pagination::Keyset).to receive(:available?).and_return(true)
allow(Gitlab::Pagination::Keyset::Pager).to receive(:new).with(request_context).and_return(pager)
end
it 'delegates to Pager' do
expect(subject.paginator(relation)).to eq(pager)
end
end
context 'when keyset pagination is not available' do
before do
allow(Gitlab::Pagination::Keyset).to receive(:available?).with(request_context, relation).and_return(false)
end
it 'renders a 501 error' do
expect(subject).to receive(:error!).with(/not yet available/, 405)
subject.paginator(relation)
end
end
end
end
end
...@@ -24,7 +24,7 @@ describe Gitlab::Ci::Config::Entry::Job do ...@@ -24,7 +24,7 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:result) do let(:result) do
%i[before_script script stage type after_script cache %i[before_script script stage type after_script cache
image services only except rules needs variables artifacts image services only except rules needs variables artifacts
environment coverage retry interruptible timeout tags] environment coverage retry interruptible timeout release tags]
end end
it { is_expected.to match_array result } it { is_expected.to match_array result }
...@@ -122,6 +122,21 @@ describe Gitlab::Ci::Config::Entry::Job do ...@@ -122,6 +122,21 @@ describe Gitlab::Ci::Config::Entry::Job do
it { expect(entry).to be_valid } it { expect(entry).to be_valid }
end end
context 'when it is a release' do
let(:config) do
{
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "v0.06",
name: "Release $CI_TAG_NAME",
description: "./release_changelog.txt"
}
}
end
it { expect(entry).to be_valid }
end
end end
end end
...@@ -443,6 +458,25 @@ describe Gitlab::Ci::Config::Entry::Job do ...@@ -443,6 +458,25 @@ describe Gitlab::Ci::Config::Entry::Job do
expect(entry.timeout).to eq('1m 1s') expect(entry.timeout).to eq('1m 1s')
end end
end end
context 'when it is a release' do
context 'when `release:description` is missing' do
let(:config) do
{
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "v0.06",
name: "Release $CI_TAG_NAME"
}
}
end
it "returns error" do
expect(entry).not_to be_valid
expect(entry.errors).to include "release description can't be blank"
end
end
end
end end
end end
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Release::Assets::Link do
let(:entry) { described_class.new(config) }
describe 'validation' do
context 'when entry config value is correct' do
let(:config) do
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
}
end
describe '#value' do
it 'returns link configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry value is not correct' do
describe '#errors' do
context 'when name is not a string' do
let(:config) { { name: 123, url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip" } }
it 'reports error' do
expect(entry.errors)
.to include 'link name should be a string'
end
end
context 'when name is not present' do
let(:config) { { url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip" } }
it 'reports error' do
expect(entry.errors)
.to include "link name can't be blank"
end
end
context 'when url is not addressable' do
let(:config) { { name: "cool-app.zip", url: "xyz" } }
it 'reports error' do
expect(entry.errors)
.to include "link url is blocked: only allowed schemes are http, https"
end
end
context 'when url is not present' do
let(:config) { { name: "cool-app.zip" } }
it 'reports error' do
expect(entry.errors)
.to include "link url can't be blank"
end
end
context 'when there is an unknown key present' do
let(:config) { { test: 100 } }
it 'reports error' do
expect(entry.errors)
.to include 'link config contains unknown keys: test'
end
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Release::Assets::Links do
let(:entry) { described_class.new(config) }
describe 'validation' do
context 'when entry config value is correct' do
let(:config) do
[
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
},
{
name: "cool-app.exe",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
}
]
end
describe '#value' do
it 'returns links configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry value is not correct' do
describe '#errors' do
context 'when value of link is invalid' do
let(:config) { { link: 'xyz' } }
it 'reports error' do
expect(entry.errors)
.to include 'links config should be a array'
end
end
context 'when value of links link is empty' do
let(:config) { { link: [] } }
it 'reports error' do
expect(entry.errors)
.to include "links config should be a array"
end
end
context 'when there is an unknown key present' do
let(:config) { { test: 100 } }
it 'reports error' do
expect(entry.errors)
.to include 'links config should be a array'
end
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Release::Assets do
let(:entry) { described_class.new(config) }
describe 'validation' do
context 'when entry config value is correct' do
let(:config) do
{
links: [
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
},
{
name: "cool-app.exe",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
}
]
}
end
describe '#value' do
it 'returns assets configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry value is not correct' do
describe '#errors' do
context 'when value of assets is invalid' do
let(:config) { { links: 'xyz' } }
it 'reports error' do
expect(entry.errors)
.to include 'assets links should be an array of hashes'
end
end
context 'when value of assets:links is empty' do
let(:config) { { links: [] } }
it 'reports error' do
expect(entry.errors)
.to include "assets links can't be blank"
end
end
context 'when there is an unknown key present' do
let(:config) { { test: 100 } }
it 'reports error' do
expect(entry.errors)
.to include 'assets config contains unknown keys: test'
end
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Release do
let(:entry) { described_class.new(config) }
describe 'validation' do
context 'when entry config value is correct' do
let(:config) { { tag_name: 'v0.06', description: "./release_changelog.txt" } }
describe '#value' do
it 'returns release configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context "when value includes 'assets' keyword" do
let(:config) do
{
tag_name: 'v0.06',
description: "./release_changelog.txt",
assets: [
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
}
]
}
end
describe '#value' do
it 'returns release configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context "when value includes 'name' keyword" do
let(:config) do
{
tag_name: 'v0.06',
description: "./release_changelog.txt",
name: "Release $CI_TAG_NAME"
}
end
describe '#value' do
it 'returns release configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry value is not correct' do
describe '#errors' do
context 'when value of attribute is invalid' do
let(:config) { { description: 10 } }
it 'reports error' do
expect(entry.errors)
.to include 'release description should be a string'
end
end
context 'when release description is missing' do
let(:config) { { tag_name: 'v0.06' } }
it 'reports error' do
expect(entry.errors)
.to include "release description can't be blank"
end
end
context 'when release tag_name is missing' do
let(:config) { { description: "./release_changelog.txt" } }
it 'reports error' do
expect(entry.errors)
.to include "release tag name can't be blank"
end
end
context 'when there is an unknown key present' do
let(:config) { { test: 100 } }
it 'reports error' do
expect(entry.errors)
.to include 'release config contains unknown keys: test'
end
end
end
end
end
end
...@@ -27,16 +27,29 @@ describe Gitlab::Ci::Config::Entry::Root do ...@@ -27,16 +27,29 @@ describe Gitlab::Ci::Config::Entry::Root do
context 'when configuration is valid' do context 'when configuration is valid' do
context 'when top-level entries are defined' do context 'when top-level entries are defined' do
let(:hash) do let(:hash) do
{ before_script: %w(ls pwd), {
before_script: %w(ls pwd),
image: 'ruby:2.2', image: 'ruby:2.2',
default: {}, default: {},
services: ['postgres:9.1', 'mysql:5.5'], services: ['postgres:9.1', 'mysql:5.5'],
variables: { VAR: 'value' }, variables: { VAR: 'value' },
after_script: ['make clean'], after_script: ['make clean'],
stages: %w(build pages), stages: %w(build pages release),
cache: { key: 'k', untracked: true, paths: ['public/'] }, cache: { key: 'k', untracked: true, paths: ['public/'] },
rspec: { script: %w[rspec ls] }, rspec: { script: %w[rspec ls] },
spinach: { before_script: [], variables: {}, script: 'spinach' } } spinach: { before_script: [], variables: {}, script: 'spinach' },
release: {
stage: 'release',
before_script: [],
after_script: [],
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: 'v0.06',
name: "Release $CI_TAG_NAME",
description: "./release_changelog.txt"
}
}
}
end end
describe '#compose!' do describe '#compose!' do
...@@ -87,7 +100,7 @@ describe Gitlab::Ci::Config::Entry::Root do ...@@ -87,7 +100,7 @@ describe Gitlab::Ci::Config::Entry::Root do
describe '#stages_value' do describe '#stages_value' do
context 'when stages key defined' do context 'when stages key defined' do
it 'returns array of stages' do it 'returns array of stages' do
expect(root.stages_value).to eq %w[build pages] expect(root.stages_value).to eq %w[build pages release]
end end
end end
...@@ -105,8 +118,9 @@ describe Gitlab::Ci::Config::Entry::Root do ...@@ -105,8 +118,9 @@ describe Gitlab::Ci::Config::Entry::Root do
describe '#jobs_value' do describe '#jobs_value' do
it 'returns jobs configuration' do it 'returns jobs configuration' do
expect(root.jobs_value).to eq( expect(root.jobs_value.keys).to eq([:rspec, :spinach, :release])
rspec: { name: :rspec, expect(root.jobs_value[:rspec]).to eq(
{ name: :rspec,
script: %w[rspec ls], script: %w[rspec ls],
before_script: %w(ls pwd), before_script: %w(ls pwd),
image: { name: 'ruby:2.2' }, image: { name: 'ruby:2.2' },
...@@ -116,8 +130,10 @@ describe Gitlab::Ci::Config::Entry::Root do ...@@ -116,8 +130,10 @@ describe Gitlab::Ci::Config::Entry::Root do
variables: {}, variables: {},
ignore: false, ignore: false,
after_script: ['make clean'], after_script: ['make clean'],
only: { refs: %w[branches tags] } }, only: { refs: %w[branches tags] } }
spinach: { name: :spinach, )
expect(root.jobs_value[:spinach]).to eq(
{ name: :spinach,
before_script: [], before_script: [],
script: %w[spinach], script: %w[spinach],
image: { name: 'ruby:2.2' }, image: { name: 'ruby:2.2' },
...@@ -129,6 +145,20 @@ describe Gitlab::Ci::Config::Entry::Root do ...@@ -129,6 +145,20 @@ describe Gitlab::Ci::Config::Entry::Root do
after_script: ['make clean'], after_script: ['make clean'],
only: { refs: %w[branches tags] } } only: { refs: %w[branches tags] } }
) )
expect(root.jobs_value[:release]).to eq(
{ name: :release,
stage: 'release',
before_script: [],
script: ["make changelog | tee release_changelog.txt"],
release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" },
image: { name: "ruby:2.2" },
services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push" },
only: { refs: %w(branches tags) },
variables: {},
after_script: [],
ignore: false }
)
end end
end end
end end
...@@ -261,7 +291,7 @@ describe Gitlab::Ci::Config::Entry::Root do ...@@ -261,7 +291,7 @@ describe Gitlab::Ci::Config::Entry::Root do
# despite the fact, that key is present. See issue #18775 for more # despite the fact, that key is present. See issue #18775 for more
# details. # details.
# #
context 'when entires specified but not defined' do context 'when entries are specified but not defined' do
before do before do
root.compose! root.compose!
end end
......
...@@ -1285,6 +1285,59 @@ module Gitlab ...@@ -1285,6 +1285,59 @@ module Gitlab
end end
end end
describe "release" do
let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
let(:config) do
{
stages: ["build", "test", "release"], # rubocop:disable Style/WordArray
release: {
stage: "release",
only: ["tags"],
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "$CI_COMMIT_TAG",
name: "Release $CI_TAG_NAME",
description: "./release_changelog.txt",
assets: {
links: [
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
},
{
name: "cool-app.exe",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
}
]
}
}
}
}
end
context 'with feature flag active' do
before do
stub_feature_flags(ci_release_generation: true)
end
it "returns release info" do
expect(processor.stage_builds_attributes('release').first[:options])
.to eq(config[:release].except(:stage, :only))
end
end
context 'with feature flag inactive' do
before do
stub_feature_flags(ci_release_generation: false)
end
it "returns release info" do
expect(processor.stage_builds_attributes('release').first[:options].include?(config[:release]))
.to be false
end
end
end
describe '#environment' do describe '#environment' do
let(:config) do let(:config) do
{ {
......
...@@ -59,7 +59,7 @@ describe Gitlab::Config::Entry::Attributable do ...@@ -59,7 +59,7 @@ describe Gitlab::Config::Entry::Attributable do
end end
end end
expectation.to raise_error(ArgumentError, 'Method already defined!') expectation.to raise_error(ArgumentError, 'Method already defined: length')
end end
end end
end end
...@@ -53,6 +53,21 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do ...@@ -53,6 +53,21 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
allow(Feature).to receive(:persisted?).with(feature_flag).and_return(false) allow(Feature).to receive(:persisted?).with(feature_flag).and_return(false)
end end
context 'when running puma with multiple threads' do
before do
allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(true)
end
it 'returns false' do
expect(subject.use_rugged?(repository, feature_flag_name)).to be false
end
end
context 'when not running puma with multiple threads' do
before do
allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(false)
end
it 'returns true when gitaly matches disk' do it 'returns true when gitaly matches disk' do
expect(subject.use_rugged?(repository, feature_flag_name)).to be true expect(subject.use_rugged?(repository, feature_flag_name)).to be true
end end
...@@ -79,6 +94,7 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do ...@@ -79,6 +94,7 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
subject.use_rugged?(repository, feature_flag_name) subject.use_rugged?(repository, feature_flag_name)
end end
end end
end
context 'when feature flag is persisted' do context 'when feature flag is persisted' do
before do before do
...@@ -99,6 +115,37 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do ...@@ -99,6 +115,37 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
end end
end end
describe '#running_puma_with_multiple_threads?' do
context 'when using Puma' do
before do
stub_const('::Puma', class_double('Puma'))
allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
end
it 'returns false for single thread Puma' do
allow(::Puma).to receive_message_chain(:cli_config, :options).and_return(max_threads: 1)
expect(subject.running_puma_with_multiple_threads?).to be false
end
it 'returns true for multi-threaded Puma' do
allow(::Puma).to receive_message_chain(:cli_config, :options).and_return(max_threads: 2)
expect(subject.running_puma_with_multiple_threads?).to be true
end
end
context 'when not using Puma' do
before do
allow(Gitlab::Runtime).to receive(:puma?).and_return(false)
end
it 'returns false' do
expect(subject.running_puma_with_multiple_threads?).to be false
end
end
end
def create_temporary_gitaly_metadata_file def create_temporary_gitaly_metadata_file
tmp = Tempfile.new('.gitaly-metadata') tmp = Tempfile.new('.gitaly-metadata')
gitaly_metadata = { gitaly_metadata = {
......
...@@ -30,16 +30,14 @@ describe Gitlab::Pagination::Keyset::Page do ...@@ -30,16 +30,14 @@ describe Gitlab::Pagination::Keyset::Page do
end end
describe '#next' do describe '#next' do
let(:page) { described_class.new(order_by: order_by, lower_bounds: lower_bounds, per_page: per_page, end_reached: end_reached) } let(:page) { described_class.new(order_by: order_by, lower_bounds: lower_bounds, per_page: per_page) }
subject { page.next(new_lower_bounds, new_end_reached) } subject { page.next(new_lower_bounds) }
let(:order_by) { { id: :desc } } let(:order_by) { { id: :desc } }
let(:lower_bounds) { { id: 42 } } let(:lower_bounds) { { id: 42 } }
let(:per_page) { 10 } let(:per_page) { 10 }
let(:end_reached) { false }
let(:new_lower_bounds) { { id: 21 } } let(:new_lower_bounds) { { id: 21 } }
let(:new_end_reached) { true }
it 'copies over order_by' do it 'copies over order_by' do
expect(subject.order_by).to eq(page.order_by) expect(subject.order_by).to eq(page.order_by)
...@@ -57,10 +55,5 @@ describe Gitlab::Pagination::Keyset::Page do ...@@ -57,10 +55,5 @@ describe Gitlab::Pagination::Keyset::Page do
expect(subject.lower_bounds).to eq(new_lower_bounds) expect(subject.lower_bounds).to eq(new_lower_bounds)
expect(page.lower_bounds).to eq(lower_bounds) expect(page.lower_bounds).to eq(lower_bounds)
end end
it 'sets end_reached only on new instance' do
expect(subject.end_reached?).to eq(new_end_reached)
expect(page.end_reached?).to eq(end_reached)
end
end end
end end
...@@ -15,15 +15,37 @@ describe Gitlab::Pagination::Keyset::Pager do ...@@ -15,15 +15,37 @@ describe Gitlab::Pagination::Keyset::Pager do
describe '#paginate' do describe '#paginate' do
subject { described_class.new(request).paginate(relation) } subject { described_class.new(request).paginate(relation) }
it 'loads the result relation only once' do it 'does not execute a query' do
expect do expect do
subject subject
end.not_to exceed_query_limit(1) end.not_to exceed_query_limit(0)
end end
it 'applies a LIMIT' do
expect(subject.limit_value).to eq(page.per_page)
end
it 'returns the limited relation' do
expect(subject).to eq(relation.limit(page.per_page))
end
context 'validating the order clause' do
let(:page) { Gitlab::Pagination::Keyset::Page.new(order_by: { created_at: :asc }, per_page: 3) }
it 'raises an error if has a different order clause than the page' do
expect { subject }.to raise_error(ArgumentError, /order_by does not match/)
end
end
end
describe '#finalize' do
let(:records) { relation.limit(page.per_page).load }
subject { described_class.new(request).finalize(records) }
it 'passes information about next page to request' do it 'passes information about next page to request' do
lower_bounds = relation.limit(page.per_page).last.slice(:id) lower_bounds = records.last.slice(:id)
expect(page).to receive(:next).with(lower_bounds, false).and_return(next_page) expect(page).to receive(:next).with(lower_bounds).and_return(next_page)
expect(request).to receive(:apply_headers).with(next_page) expect(request).to receive(:apply_headers).with(next_page)
subject subject
...@@ -32,10 +54,10 @@ describe Gitlab::Pagination::Keyset::Pager do ...@@ -32,10 +54,10 @@ describe Gitlab::Pagination::Keyset::Pager do
context 'when retrieving the last page' do context 'when retrieving the last page' do
let(:relation) { Project.where('id > ?', Project.maximum(:id) - page.per_page).order(id: :asc) } let(:relation) { Project.where('id > ?', Project.maximum(:id) - page.per_page).order(id: :asc) }
it 'indicates this is the last page' do it 'indicates there is another (likely empty) page' do
expect(request).to receive(:apply_headers) do |next_page| lower_bounds = records.last.slice(:id)
expect(next_page.end_reached?).to be_truthy expect(page).to receive(:next).with(lower_bounds).and_return(next_page)
end expect(request).to receive(:apply_headers).with(next_page)
subject subject
end end
...@@ -45,24 +67,10 @@ describe Gitlab::Pagination::Keyset::Pager do ...@@ -45,24 +67,10 @@ describe Gitlab::Pagination::Keyset::Pager do
let(:relation) { Project.where('id > ?', Project.maximum(:id) + 1).order(id: :asc) } let(:relation) { Project.where('id > ?', Project.maximum(:id) + 1).order(id: :asc) }
it 'indicates this is the last page' do it 'indicates this is the last page' do
expect(request).to receive(:apply_headers) do |next_page| expect(request).not_to receive(:apply_headers)
expect(next_page.end_reached?).to be_truthy
end
subject subject
end end
end end
it 'returns an array with the loaded records' do
expect(subject).to eq(relation.limit(page.per_page).to_a)
end
context 'validating the order clause' do
let(:page) { Gitlab::Pagination::Keyset::Page.new(order_by: { created_at: :asc }, per_page: 3) }
it 'raises an error if has a different order clause than the page' do
expect { subject }.to raise_error(ArgumentError, /order_by does not match/)
end
end
end end
end end
...@@ -53,7 +53,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do ...@@ -53,7 +53,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
let(:request) { double('request', url: "http://#{Gitlab.config.gitlab.host}/api/v4/projects?foo=bar") } let(:request) { double('request', url: "http://#{Gitlab.config.gitlab.host}/api/v4/projects?foo=bar") }
let(:params) { { foo: 'bar' } } let(:params) { { foo: 'bar' } }
let(:request_context) { double('request context', params: params, request: request) } let(:request_context) { double('request context', params: params, request: request) }
let(:next_page) { double('next page', order_by: { id: :asc }, lower_bounds: { id: 42 }, end_reached?: false) } let(:next_page) { double('next page', order_by: { id: :asc }, lower_bounds: { id: 42 }) }
subject { described_class.new(request_context).apply_headers(next_page) } subject { described_class.new(request_context).apply_headers(next_page) }
...@@ -92,7 +92,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do ...@@ -92,7 +92,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
end end
context 'with descending order' do context 'with descending order' do
let(:next_page) { double('next page', order_by: { id: :desc }, lower_bounds: { id: 42 }, end_reached?: false) } let(:next_page) { double('next page', order_by: { id: :desc }, lower_bounds: { id: 42 }) }
it 'sets Links header with a link to the next page' do it 'sets Links header with a link to the next page' do
orig_uri = URI.parse(request_context.request.url) orig_uri = URI.parse(request_context.request.url)
......
...@@ -3,22 +3,6 @@ ...@@ -3,22 +3,6 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::Pagination::Keyset do describe Gitlab::Pagination::Keyset do
describe '.paginate' do
subject { described_class.paginate(request_context, relation) }
let(:request_context) { double }
let(:relation) { double }
let(:pager) { double }
let(:result) { double }
it 'uses Pager to paginate the relation' do
expect(Gitlab::Pagination::Keyset::Pager).to receive(:new).with(request_context).and_return(pager)
expect(pager).to receive(:paginate).with(relation).and_return(result)
expect(subject).to eq(result)
end
end
describe '.available?' do describe '.available?' do
subject { described_class } subject { described_class }
......
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191128162854_drop_project_ci_cd_settings_merge_trains_enabled.rb')
describe DropProjectCiCdSettingsMergeTrainsEnabled, :migration do
let!(:project_ci_cd_setting) { table(:project_ci_cd_settings) }
it 'correctly migrates up and down' do
reversible_migration do |migration|
migration.before -> {
expect(project_ci_cd_setting.column_names).to include("merge_trains_enabled")
}
migration.after -> {
project_ci_cd_setting.reset_column_information
expect(project_ci_cd_setting.column_names).not_to include("merge_trains_enabled")
}
end
end
end
...@@ -570,6 +570,102 @@ describe API::Projects do ...@@ -570,6 +570,102 @@ describe API::Projects do
let(:projects) { Project.all } let(:projects) { Project.all }
end end
end end
context 'with keyset pagination' do
let(:current_user) { user }
let(:projects) { [public_project, project, project2, project3] }
context 'headers and records' do
let(:params) { { pagination: 'keyset', order_by: :id, sort: :asc, per_page: 1 } }
it 'includes a pagination header with link to the next page' do
get api('/projects', current_user), params: params
expect(response.header).to include('Links')
expect(response.header['Links']).to include('pagination=keyset')
expect(response.header['Links']).to include("id_after=#{public_project.id}")
end
it 'contains only the first project with per_page = 1' do
get api('/projects', current_user), params: params
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(public_project.id)
end
it 'still includes a link if the end has reached and there is no more data after this page' do
get api('/projects', current_user), params: params.merge(id_after: project2.id)
expect(response.header).to include('Links')
expect(response.header['Links']).to include('pagination=keyset')
expect(response.header['Links']).to include("id_after=#{project3.id}")
end
it 'does not include a next link when the page does not have any records' do
get api('/projects', current_user), params: params.merge(id_after: Project.maximum(:id))
expect(response.header).not_to include('Links')
end
it 'returns an empty array when the page does not have any records' do
get api('/projects', current_user), params: params.merge(id_after: Project.maximum(:id))
expect(response).to have_gitlab_http_status(200)
expect(json_response).to eq([])
end
it 'responds with 501 if order_by is different from id' do
get api('/projects', current_user), params: params.merge(order_by: :created_at)
expect(response).to have_gitlab_http_status(405)
end
end
context 'with descending sorting' do
let(:params) { { pagination: 'keyset', order_by: :id, sort: :desc, per_page: 1 } }
it 'includes a pagination header with link to the next page' do
get api('/projects', current_user), params: params
expect(response.header).to include('Links')
expect(response.header['Links']).to include('pagination=keyset')
expect(response.header['Links']).to include("id_before=#{project3.id}")
end
it 'contains only the last project with per_page = 1' do
get api('/projects', current_user), params: params
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(project3.id)
end
end
context 'retrieving the full relation' do
let(:params) { { pagination: 'keyset', order_by: :id, sort: :desc, per_page: 2 } }
it 'returns all projects' do
url = '/projects'
requests = 0
ids = []
while url && requests <= 5 # circuit breaker
requests += 1
get api(url, current_user), params: params
links = response.header['Links']
url = links&.match(/<[^>]+(\/projects\?[^>]+)>; rel="next"/) do |match|
match[1]
end
ids += JSON.parse(response.body).map { |p| p['id'] }
end
expect(ids).to contain_exactly(*projects.map(&:id))
end
end
end
end end
describe 'POST /projects' do describe 'POST /projects' do
......
...@@ -34,7 +34,7 @@ describe AutoMerge::MergeWhenPipelineSucceedsService do ...@@ -34,7 +34,7 @@ describe AutoMerge::MergeWhenPipelineSucceedsService do
it { is_expected.to be_truthy } it { is_expected.to be_truthy }
context 'when the head piipeline succeeded' do context 'when the head pipeline succeeded' do
let(:pipeline_status) { :success } let(:pipeline_status) { :success }
it { is_expected.to be_falsy } it { is_expected.to be_falsy }
......
...@@ -940,7 +940,7 @@ describe Ci::CreatePipelineService do ...@@ -940,7 +940,7 @@ describe Ci::CreatePipelineService do
expect(resource_group.resources.first.build).to eq(nil) expect(resource_group.resources.first.build).to eq(nil)
end end
context 'when resourc group key includes predefined variables' do context 'when resource group key includes predefined variables' do
let(:resource_group_key) { '$CI_COMMIT_REF_NAME-$CI_JOB_NAME' } let(:resource_group_key) { '$CI_COMMIT_REF_NAME-$CI_JOB_NAME' }
it 'interpolates the variables into the key correctly' do it 'interpolates the variables into the key correctly' do
...@@ -969,6 +969,70 @@ describe Ci::CreatePipelineService do ...@@ -969,6 +969,70 @@ describe Ci::CreatePipelineService do
end end
end end
context 'with release' do
shared_examples_for 'a successful release pipeline' do
before do
stub_feature_flags(ci_release_generation: true)
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
it 'is valid config' do
pipeline = execute_service
build = pipeline.builds.first
expect(pipeline).to be_kind_of(Ci::Pipeline)
expect(pipeline).to be_valid
expect(pipeline.yaml_errors).not_to be_present
expect(pipeline).to be_persisted
expect(build).to be_kind_of(Ci::Build)
expect(build.options).to eq(config[:release].except(:stage, :only).with_indifferent_access)
end
end
context 'simple example' do
it_behaves_like 'a successful release pipeline' do
let(:config) do
{
release: {
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "v0.06",
description: "./release_changelog.txt"
}
}
}
end
end
end
context 'example with all release metadata' do
it_behaves_like 'a successful release pipeline' do
let(:config) do
{
release: {
script: ["make changelog | tee release_changelog.txt"],
release: {
name: "Release $CI_TAG_NAME",
tag_name: "v0.06",
description: "./release_changelog.txt",
assets: {
links: [
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
},
{
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
}
]
}
}
}
}
end
end
end
end
shared_examples 'when ref is protected' do shared_examples 'when ref is protected' do
let(:user) { create(:user) } let(:user) { create(:user) }
......
...@@ -45,7 +45,7 @@ describe SpamService do ...@@ -45,7 +45,7 @@ describe SpamService do
context 'when indicated as spam by akismet' do context 'when indicated as spam by akismet' do
shared_examples 'akismet spam' do shared_examples 'akismet spam' do
it 'doesnt check as spam when request is missing' do it "doesn't check as spam when request is missing" do
check_spam(issue, nil, false) check_spam(issue, nil, false)
expect(issue).not_to be_spam expect(issue).not_to be_spam
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment