Commit f78257cb authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent f500600a
......@@ -4,7 +4,6 @@ entry.
## 12.8.4
- No changes.
### Fixed (8 changes)
- Fix Group Import API file upload when object storage is disabled. !25715
......
......@@ -398,7 +398,7 @@ export default {
}"
>
<commit-widget v-if="commit" :commit="commit" />
<div v-if="isBatchLoading" class="loading"><gl-loading-icon /></div>
<div v-if="isBatchLoading" class="loading"><gl-loading-icon size="lg" /></div>
<template v-else-if="renderDiffFiles">
<diff-file
v-for="file in diffFiles"
......
......@@ -89,6 +89,10 @@ module Ci
end
end
def parent_pipeline
pipeline if triggers_child_pipeline?
end
def triggers_child_pipeline?
yaml_for_downstream.present?
end
......
......@@ -787,6 +787,10 @@ module Ci
.fabricate!
end
def find_job_with_archive_artifacts(name)
builds.latest.with_artifacts_archive.find_by_name(name)
end
def latest_builds_with_artifacts
# We purposely cast the builds to an Array here. Because we always use the
# rows if there are more than 0 this prevents us from having to run two
......
......@@ -197,6 +197,8 @@ class Snippet < ApplicationRecord
end
def blobs
return [] unless repository_exists?
repository.ls_files(repository.root_ref).map { |file| Blob.lazy(self, repository.root_ref, file) }
end
......
......@@ -4,6 +4,7 @@ module Projects
class UpdateRepositoryStorageService < BaseService
include Gitlab::ShellAdapter
Error = Class.new(StandardError)
RepositoryAlreadyMoved = Class.new(StandardError)
def initialize(project)
......@@ -17,37 +18,45 @@ module Projects
# exception.
raise RepositoryAlreadyMoved if project.repository_storage == new_repository_storage_key
if mirror_repositories(new_repository_storage_key)
mark_old_paths_for_archive
mirror_repositories(new_repository_storage_key)
project.update(repository_storage: new_repository_storage_key, repository_read_only: false)
project.leave_pool_repository
project.track_project_repository
mark_old_paths_for_archive
enqueue_housekeeping
else
project.update(repository_read_only: false)
end
project.update(repository_storage: new_repository_storage_key, repository_read_only: false)
project.leave_pool_repository
project.track_project_repository
enqueue_housekeeping
success
rescue Error => e
project.update(repository_read_only: false)
Gitlab::ErrorTracking.track_exception(e, project_path: project.full_path)
error(s_("UpdateRepositoryStorage|Error moving repository storage for %{project_full_path} - %{message}") % { project_full_path: project.full_path, message: e.message })
end
private
def mirror_repositories(new_repository_storage_key)
result = mirror_repository(new_repository_storage_key)
mirror_repository(new_repository_storage_key)
if project.wiki.repository_exists?
result &&= mirror_repository(new_repository_storage_key, type: Gitlab::GlRepository::WIKI)
mirror_repository(new_repository_storage_key, type: Gitlab::GlRepository::WIKI)
end
result
end
def mirror_repository(new_storage_key, type: Gitlab::GlRepository::PROJECT)
return false unless wait_for_pushes(type)
unless wait_for_pushes(type)
raise Error, s_('UpdateRepositoryStorage|Timeout waiting for %{type} repository pushes') % { type: type.name }
end
repository = type.repository_for(project)
full_path = repository.full_path
raw_repository = repository.raw
checksum = repository.checksum
# Initialize a git repository on the target path
gitlab_shell.create_repository(new_storage_key, raw_repository.relative_path, full_path)
......@@ -56,7 +65,15 @@ module Projects
raw_repository.gl_repository,
full_path)
new_repository.fetch_repository_as_mirror(raw_repository)
unless new_repository.fetch_repository_as_mirror(raw_repository)
raise Error, s_('UpdateRepositoryStorage|Failed to fetch %{type} repository as mirror') % { type: type.name }
end
new_checksum = new_repository.checksum
if checksum != new_checksum
raise Error, s_('UpdateRepositoryStorage|Failed to verify %{type} repository checksum from %{old} to %{new}') % { type: type.name, old: checksum, new: new_checksum }
end
end
def mark_old_paths_for_archive
......
......@@ -77,6 +77,11 @@ class PostReceive # rubocop:disable Scalability/IdempotentWorker
return false unless user
# We can remove once we implement multi-file snippets
# https://gitlab.com/gitlab-org/gitlab/-/issues/39269
blob = snippet.blobs.first
snippet.update(file_name: blob.path, content: blob.data) if blob
# At the moment, we only expires the repository caches.
# In the future we might need to call ProjectCacheWorker
# (or the custom class we create) to update the snippet
......
---
title: Prevent editing weight to scroll to the top.
merge_request: 26613
author: Gilang Gumilar
type: fixed
---
title: Add vars to allow air-gapped usage of Retire.js (Dependency Scanning)
merge_request: 26463
author:
type: added
---
title: 'Create child pipelines dynamically using content from artifact as CI configuration'
merge_request: 23790
author:
type: fixed
---
title: Allow selecting all queues with sidekiq-cluster
merge_request: 26594
author:
type: added
---
title: Allow to disable inheritance of default job settings
merge_request: 25690
author:
type: added
---
title: Sync snippet after Git action
merge_request: 26565
author:
type: changed
---
title: Remove kubernetes workaround in container scanning
merge_request: 21188
author:
type: changed
---
title: Ensure checksums match when updating repository storage
merge_request: 26334
author:
type: changed
......@@ -50,4 +50,14 @@ Rails.application.configure do
# BetterErrors live shell (REPL) on every stack frame
BetterErrors::Middleware.allow_ip!("127.0.0.1/0")
# Reassign some performance related settings when we profile the app
if Gitlab::Utils.to_boolean(ENV['RAILS_PROFILE'].to_s)
warn "Hot-reloading is disabled as you are running with RAILS_PROFILE enabled"
config.cache_classes = true
config.eager_load = true
config.active_record.migration_error = false
config.active_record.verbose_query_logs = false
config.action_view.cache_template_loading = true
end
end
......@@ -17,7 +17,7 @@ members to the group in order to give them maintainer access to the project.
This project will be used for self monitoring your GitLab instance.
## Activating the self monitoring project
## Creating the self monitoring project
1. Navigate to **Admin Area > Settings > Metrics and profiling**, and expand the **Self monitoring** section.
1. Toggle the **Create Project** button on.
......@@ -26,10 +26,11 @@ created, GitLab displays a message with a link to the project. The project
will also be linked in the help text above the **Create Project** button. You can also
find the project under **Projects > Your projects**.
## Deactivating the self monitoring project
## Deleting the self monitoring project
CAUTION: **Warning:**
If you deactivate the self monitoring project, it will be permanently deleted.
If you delete the self monitoring project, you will lose any changes made to the
project. If you create the project again, it will be created in its default state.
1. Navigate to **Admin Area > Settings > Metrics and profiling**, and expand the **Self monitoring** section.
1. Toggle the **Create Project** button off.
......
......@@ -53,6 +53,20 @@ To start extra Sidekiq processes, you must enable `sidekiq-cluster`:
]
```
[In GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26594) and
later, the special queue name `*` means all queues. This starts two
processes, each handling all queues:
```ruby
sidekiq_cluster['queue_groups'] = [
"*",
"*"
]
```
`*` cannot be combined with concrete queue names - `*, mailers` will
just handle the `mailers` queue.
1. Save the file and reconfigure GitLab for the changes to take effect:
```shell
......@@ -154,6 +168,10 @@ from highest to lowest precedence:
The operator precedence for this syntax is fixed: it's not possible to make AND
have higher precedence than OR.
[In GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26594) and
later, as with the standard queue group syntax above, a single `*` as the
entire queue group selects all queues.
### Example queries
In `/etc/gitlab/gitlab.rb`:
......@@ -163,9 +181,11 @@ sidekiq_cluster['enable'] = true
sidekiq_cluster['experimental_queue_selector'] = true
sidekiq_cluster['queue_groups'] = [
# Run all non-CPU-bound queues that are high urgency
'resource_boundary!=cpu&urgency=high,
'resource_boundary!=cpu&urgency=high',
# Run all continuous integration and pages queues that are not high urgency
'feature_category=continuous_integration,pages&urgency!=high
'feature_category=continuous_integration,pages&urgency!=high',
# Run all queues
'*'
]
```
......
......@@ -18,6 +18,26 @@ troubleshooting steps that will help you diagnose the bottleneck.
> may be using all available CPU, or have a Ruby Global Interpreter Lock,
> preventing other threads from continuing.
## Log arguments to Sidekiq jobs
If you want to see what arguments are being passed to Sidekiq jobs you can set
the `SIDEKIQ_LOG_ARGUMENTS` [environment variable](https://docs.gitlab.com/omnibus/settings/environment-variables.html) to `1` (true).
Example:
```
gitlab_rails['env'] = {"SIDEKIQ_LOG_ARGUMENTS" => "1"}
```
Please note: It is not recommend to enable this setting in production because some
Sidekiq jobs (such as sending a password reset email) take secret arguments (for
example the password reset token).
When using [Sidekiq JSON logging](../logs.md#sidekiqlog),
arguments logs are limited to a maximum size of 10 kilobytes of text;
any arguments after this limit will be discarded and replaced with a
single argument containing the string `"..."`.
## Thread dump
Send the Sidekiq process ID the `TTIN` signal and it will output thread
......
......@@ -158,6 +158,42 @@ rspec 2.6:
script: bundle exec rspec
```
### `inherit`
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/207484) in GitLab 12.9.
You can disable inheritance of globally defined defaults
and variables with the `inherit:` parameter.
In the example below:
- `rubocop` **will** inherit both the `before_script` and the variable `DOMAIN`.
- `rspec` **will not** inherit the `before_script` or the variable `DOMAIN`.
- `capybara` **will** inherit the `before_script`, but **will not** inherit the variable `DOMAIN`.
```yaml
default:
before_script:
- echo Hello World
variables:
DOMAIN: example.com
rubocop:
script: bundle exec rubocop
rspec:
inherit:
default: false
variables: false
script: bundle exec rspec
capybara:
inherit:
variables: false
script: bundle exec capybara
```
## Parameter details
The following are detailed explanations for parameters used to configure CI/CD pipelines.
......
......@@ -120,3 +120,16 @@ Bullet will log query problems to both the Rails log as well as the Chrome
console.
As a follow up to finding `N+1` queries with Bullet, consider writing a [QueryRecoder test](query_recorder.md) to prevent a regression.
## Settings that impact performance
1. `development` environment by default works with hot-reloading enabled, this makes Rails to check file changes every request, and create a potential contention lock, as hot reload is single threaded.
1. `development` environment can load code lazily once the request is fired which results in first request to always be slow.
To disable those features for profiling/benchmarking set the `RAILS_PROFILE` environment variable to `true` before starting GitLab. For example when using GDK:
- create a file [`env.runit`](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/master/doc/runit.md#modifying-environment-configuration-for-services) in the root GDK directory
- add `export RAILS_PROFILE=true` to your `env.runit` file
- restart GDK with `gdk restart`
*This environment variable is only applicable for the development mode.*
......@@ -2,20 +2,5 @@
## Log arguments to Sidekiq jobs
If you want to see what arguments are being passed to Sidekiq jobs you can set
the `SIDEKIQ_LOG_ARGUMENTS` [environment variable](https://docs.gitlab.com/omnibus/settings/environment-variables.html) to `1` (true).
Example:
```
gitlab_rails['env'] = {"SIDEKIQ_LOG_ARGUMENTS" => "1"}
```
Please note: It is not recommend to enable this setting in production because some
Sidekiq jobs (such as sending a password reset email) take secret arguments (for
example the password reset token).
When using [Sidekiq JSON logging](../administration/logs.md#sidekiqlog),
arguments logs are limited to a maximum size of 10 kilobytes of text;
any arguments after this limit will be discarded and replaced with a
single argument containing the string `"..."`.
This content has been moved to the
[Troubleshooting Sidekiq docs](../administration/troubleshooting/sidekiq.md).
# frozen_string_literal: true
# This class takes in input a Ci::Build object and an artifact path to read.
# It downloads and extracts the artifacts archive, then returns the content
# of the artifact, if found.
module Gitlab
module Ci
class ArtifactFileReader
Error = Class.new(StandardError)
MAX_ARCHIVE_SIZE = 5.megabytes
def initialize(job)
@job = job
raise ArgumentError, 'Job does not have artifacts' unless @job.artifacts?
validate!
end
def read(path)
return unless job.artifacts_metadata
metadata_entry = job.artifacts_metadata_entry(path)
if metadata_entry.total_size > MAX_ARCHIVE_SIZE
raise Error, "Artifacts archive for job `#{job.name}` is too large: max #{max_archive_size_in_mb}"
end
read_zip_file!(path)
end
private
attr_reader :job
def validate!
if job.job_artifacts_archive.size > MAX_ARCHIVE_SIZE
raise Error, "Artifacts archive for job `#{job.name}` is too large: max #{max_archive_size_in_mb}"
end
unless job.artifacts_metadata?
raise Error, "Job `#{job.name}` has missing artifacts metadata and cannot be extracted!"
end
end
def read_zip_file!(file_path)
job.artifacts_file.use_file do |archive_path|
Zip::File.open(archive_path) do |zip_file|
entry = zip_file.find_entry(file_path)
unless entry
raise Error, "Path `#{file_path}` does not exist inside the `#{job.name}` artifacts archive!"
end
if entry.name_is_directory?
raise Error, "Path `#{file_path}` was expected to be a file but it was a directory!"
end
zip_file.get_input_stream(entry) do |is|
is.read
end
end
end
end
def max_archive_size_in_mb
ActiveSupport::NumberHelper.number_to_human_size(MAX_ARCHIVE_SIZE)
end
end
end
end
......@@ -18,12 +18,9 @@ module Gitlab
attr_reader :root
def initialize(config, project: nil, sha: nil, user: nil)
@context = build_context(project: project, sha: sha, user: user)
if Feature.enabled?(:ci_limit_yaml_expansion, project, default_enabled: true)
@context.set_deadline(TIMEOUT_SECONDS)
end
def initialize(config, project: nil, sha: nil, user: nil, parent_pipeline: nil)
@context = build_context(project: project, sha: sha, user: user, parent_pipeline: parent_pipeline)
@context.set_deadline(TIMEOUT_SECONDS)
@config = expand_config(config)
......@@ -87,11 +84,12 @@ module Gitlab
initial_config
end
def build_context(project:, sha:, user:)
def build_context(project:, sha:, user:, parent_pipeline:)
Config::External::Context.new(
project: project,
sha: sha || project&.repository&.root_ref_sha,
user: user)
user: user,
parent_pipeline: parent_pipeline)
end
def track_and_raise_for_dev_exception(error)
......
......@@ -11,7 +11,7 @@ module Gitlab
class Bridge < ::Gitlab::Config::Entry::Node
include ::Gitlab::Ci::Config::Entry::Processable
ALLOWED_KEYS = %i[trigger allow_failure when variables needs].freeze
ALLOWED_KEYS = %i[trigger allow_failure when needs].freeze
validations do
validates :config, allowed_keys: ALLOWED_KEYS + PROCESSABLE_ALLOWED_KEYS
......@@ -45,10 +45,6 @@ module Gitlab
inherit: false,
metadata: { allowed_needs: %i[job bridge] }
entry :variables, ::Gitlab::Ci::Config::Entry::Variables,
description: 'Environment variables available for this job.',
inherit: false
attributes :when, :allow_failure
def self.matching?(name, config)
......@@ -67,7 +63,6 @@ module Gitlab
needs: (needs_value if needs_defined?),
ignore: !!allow_failure,
when: self.when,
variables: (variables_value if variables_defined?),
scheduling_type: needs_defined? && !bridge_needs ? :dag : :stage
).compact
end
......
......@@ -10,7 +10,7 @@ module Gitlab
class Include < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
ALLOWED_KEYS = %i[local file remote template].freeze
ALLOWED_KEYS = %i[local file remote template artifact job].freeze
validations do
validates :config, hash_or_string: true
......
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# This class represents a inherit entry
#
class Inherit < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
ALLOWED_KEYS = %i[default variables].freeze
validations do
validates :config, allowed_keys: ALLOWED_KEYS
end
entry :default, ::Gitlab::Config::Entry::Boolean,
description: 'Indicates whether to inherit `default:`.',
default: true
entry :variables, ::Gitlab::Config::Entry::Boolean,
description: 'Indicates whether to inherit `variables:`.',
default: true
end
end
end
end
end
......@@ -13,7 +13,7 @@ module Gitlab
ALLOWED_WHEN = %w[on_success on_failure always manual delayed].freeze
ALLOWED_KEYS = %i[tags script type image services
allow_failure type when start_in artifacts cache
dependencies before_script needs after_script variables
dependencies before_script needs after_script
environment coverage retry parallel interruptible timeout
resource_group release].freeze
......@@ -112,10 +112,6 @@ module Gitlab
metadata: { allowed_needs: %i[job cross_dependency] },
inherit: false
entry :variables, Entry::Variables,
description: 'Environment variables available for this job.',
inherit: false
entry :environment, Entry::Environment,
description: 'Environment configuration for this job.',
inherit: false
......@@ -174,7 +170,6 @@ module Gitlab
when: self.when,
start_in: self.start_in,
dependencies: dependencies,
variables: variables_defined? ? variables_value : {},
environment: environment_defined? ? environment_value : nil,
environment_name: environment_defined? ? environment_value[:name] : nil,
coverage: coverage_defined? ? coverage_value : nil,
......
......@@ -14,7 +14,7 @@ module Gitlab
include ::Gitlab::Config::Entry::Attributable
include ::Gitlab::Config::Entry::Inheritable
PROCESSABLE_ALLOWED_KEYS = %i[extends stage only except rules].freeze
PROCESSABLE_ALLOWED_KEYS = %i[extends stage only except rules variables inherit].freeze
included do
validations do
......@@ -54,12 +54,21 @@ module Gitlab
allowed_when: %w[on_success on_failure always never manual delayed].freeze
}
entry :variables, ::Gitlab::Ci::Config::Entry::Variables,
description: 'Environment variables available for this job.',
inherit: false
entry :inherit, ::Gitlab::Ci::Config::Entry::Inherit,
description: 'Indicates whether to inherit defaults or not.',
inherit: false,
default: {}
attributes :extends, :rules
end
def compose!(deps = nil)
super do
has_workflow_rules = deps&.workflow&.has_rules?
has_workflow_rules = deps&.workflow_entry&.has_rules?
# If workflow:rules: or rules: are used
# they are considered not compatible
......@@ -73,6 +82,9 @@ module Gitlab
@entries.delete(:except) unless except_defined? # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
# inherit root variables
@root_variables_value = deps&.variables_value # rubocop:disable Gitlab/ModuleWithInstanceVariables
yield if block_given?
end
end
......@@ -82,7 +94,10 @@ module Gitlab
end
def overwrite_entry(deps, key, current_entry)
deps.default[key] unless current_entry.specified?
return unless inherit_entry&.default_value
return unless deps.default_entry
deps.default_entry[key] unless current_entry.specified?
end
def value
......@@ -90,9 +105,18 @@ module Gitlab
stage: stage_value,
extends: extends,
rules: rules_value,
variables: root_and_job_variables_value,
only: only_value,
except: except_value }.compact
end
def root_and_job_variables_value
if inherit_entry&.variables_value
@root_variables_value.to_h.merge(variables_value.to_h) # rubocop:disable Gitlab/ModuleWithInstanceVariables
else
variables_value.to_h
end
end
end
end
end
......
......@@ -65,7 +65,8 @@ module Gitlab
reserved: true
entry :workflow, Entry::Workflow,
description: 'List of evaluable rules to determine Pipeline status'
description: 'List of evaluable rules to determine Pipeline status',
default: {}
dynamic_helpers :jobs
......@@ -73,7 +74,7 @@ module Gitlab
:image_value,
:services_value,
:after_script_value,
:cache_value, to: :default
:cache_value, to: :default_entry
attr_reader :jobs_config
......@@ -102,14 +103,6 @@ module Gitlab
end
end
def default
self[:default]
end
def workflow
self[:workflow] if workflow_defined?
end
private
# rubocop: disable CodeReuse/ActiveRecord
......
......@@ -12,7 +12,6 @@ module Gitlab
validations do
validates :config, type: Hash
validates :config, allowed_keys: ALLOWED_KEYS
validates :config, presence: true
end
entry :rules, Entry::Rules,
......
......@@ -7,13 +7,14 @@ module Gitlab
class Context
TimeoutError = Class.new(StandardError)
attr_reader :project, :sha, :user
attr_reader :project, :sha, :user, :parent_pipeline
attr_reader :expandset, :execution_deadline
def initialize(project: nil, sha: nil, user: nil)
def initialize(project: nil, sha: nil, user: nil, parent_pipeline: nil)
@project = project
@sha = sha
@user = user
@parent_pipeline = parent_pipeline
@expandset = Set.new
@execution_deadline = 0
......
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module External
module File
class Artifact < Base
extend ::Gitlab::Utils::Override
include Gitlab::Utils::StrongMemoize
attr_reader :job_name
def initialize(params, context)
@location = params[:artifact]
@job_name = params[:job]
super
end
def content
strong_memoize(:content) do
next unless artifact_job
Gitlab::Ci::ArtifactFileReader.new(artifact_job).read(location)
rescue Gitlab::Ci::ArtifactFileReader::Error => error
errors.push(error.message)
end
end
def matching?
super &&
Feature.enabled?(:ci_dynamic_child_pipeline, project)
end
private
def project
context&.parent_pipeline&.project
end
def validate_content!
return unless ensure_preconditions_satisfied!
errors.push("File `#{location}` is empty!") unless content.present?
end
def ensure_preconditions_satisfied!
unless creating_child_pipeline?
errors.push('Including configs from artifacts is only allowed when triggering child pipelines')
return false
end
unless job_name.present?
errors.push("Job must be provided when including configs from artifacts")
return false
end
unless artifact_job.present?
errors.push("Job `#{job_name}` not found in parent pipeline or does not have artifacts!")
return false
end
true
end
def artifact_job
strong_memoize(:artifact_job) do
next unless creating_child_pipeline?
context.parent_pipeline.find_job_with_archive_artifacts(job_name)
end
end
def creating_child_pipeline?
context.parent_pipeline.present?
end
override :expand_context_attrs
def expand_context_attrs
{
project: context.project,
sha: context.sha,
user: context.user,
parent_pipeline: context.parent_pipeline
}
end
end
end
end
end
end
end
......@@ -40,7 +40,8 @@ module Gitlab
{
project: context.project,
sha: context.sha,
user: context.user
user: context.user,
parent_pipeline: context.parent_pipeline
}
end
end
......
......@@ -71,7 +71,8 @@ module Gitlab
{
project: project,
sha: sha,
user: context.user
user: context.user,
parent_pipeline: context.parent_pipeline
}
end
end
......
......@@ -13,7 +13,8 @@ module Gitlab
External::File::Remote,
External::File::Template,
External::File::Local,
External::File::Project
External::File::Project,
External::File::Artifact
].freeze
Error = Class.new(StandardError)
......
......@@ -7,7 +7,7 @@ module Gitlab
class Base
attr_reader :pipeline, :command, :config
delegate :project, :current_user, to: :command
delegate :project, :current_user, :parent_pipeline, to: :command
def initialize(pipeline, command)
@pipeline = pipeline
......
......@@ -72,6 +72,10 @@ module Gitlab
project.repository.ambiguous_ref?(origin_ref)
end
end
def parent_pipeline
bridge&.parent_pipeline
end
end
end
end
......
......@@ -15,7 +15,8 @@ module Gitlab
@command.config_content, {
project: project,
sha: @pipeline.sha,
user: current_user
user: current_user,
parent_pipeline: parent_pipeline
}
)
rescue Gitlab::Ci::YamlProcessor::ValidationError => ex
......
......@@ -5,9 +5,7 @@ variables:
container_scanning:
stage: test
image:
name: registry.gitlab.com/gitlab-org/security-products/analyzers/klar:$CS_MAJOR_VERSION
entrypoint: []
image: registry.gitlab.com/gitlab-org/security-products/analyzers/klar:$CS_MAJOR_VERSION
variables:
# By default, use the latest clair vulnerabilities database, however, allow it to be overridden here with a specific image
# to enable container scanning to run offline, or to provide a consistent list of vulnerabilities for integration testing purposes
......@@ -22,10 +20,7 @@ container_scanning:
- name: $CLAIR_DB_IMAGE
alias: clair-vulnerabilities-db
script:
# the kubernetes executor currently ignores the Docker image entrypoint value, so the start.sh script must
# be explicitly executed here in order for this to work with both the kubernetes and docker executors
# see this issue for more details https://gitlab.com/gitlab-org/gitlab-runner/issues/4125
- /container-scanner/start.sh
- /analyzer run
artifacts:
reports:
container_scanning: gl-container-scanning-report.json
......
......@@ -59,6 +59,8 @@ dependency_scanning:
BUNDLER_AUDIT_UPDATE_DISABLED \
BUNDLER_AUDIT_ADVISORY_DB_URL \
BUNDLER_AUDIT_ADVISORY_DB_REF_NAME \
RETIREJS_JS_ADVISORY_DB \
RETIREJS_NODE_ADVISORY_DB \
) \
--volume "$PWD:/code" \
--volume /var/run/docker.sock:/var/run/docker.sock \
......
......@@ -57,7 +57,7 @@ module Gitlab
when: job[:when] || 'on_success',
environment: job[:environment_name],
coverage_regex: job[:coverage],
yaml_variables: transform_to_yaml_variables(job_variables(name)),
yaml_variables: transform_to_yaml_variables(job[:variables]),
needs_attributes: job.dig(:needs, :job),
interruptible: job[:interruptible],
only: job[:only],
......@@ -146,13 +146,6 @@ module Gitlab
end
end
def job_variables(name)
job_variables = @jobs.dig(name.to_sym, :variables)
@variables.to_h
.merge(job_variables.to_h)
end
def transform_to_yaml_variables(variables)
variables.to_h.map do |key, value|
{ key: key.to_s, value: value, public: true }
......
......@@ -10,7 +10,7 @@ module Gitlab
def attributes(*attributes)
attributes.flatten.each do |attribute|
if method_defined?(attribute)
raise ArgumentError, "Method already defined: #{attribute}"
raise ArgumentError, "Method '#{attribute}' already defined in '#{name}'"
end
define_method(attribute) do
......
......@@ -76,7 +76,7 @@ module Gitlab
# rubocop: disable CodeReuse/ActiveRecord
def entry(key, entry, description: nil, default: nil, inherit: nil, reserved: nil, metadata: {})
entry_name = key.to_sym
raise ArgumentError, "Entry #{key} already defined" if @nodes.to_h[entry_name]
raise ArgumentError, "Entry '#{key}' already defined in '#{name}'" if @nodes.to_h[entry_name]
factory = ::Gitlab::Config::Entry::Factory.new(entry)
.with(description: description)
......@@ -98,8 +98,8 @@ module Gitlab
def helpers(*nodes, dynamic: false)
nodes.each do |symbol|
if method_defined?("#{symbol}_defined?") || method_defined?("#{symbol}_value")
raise ArgumentError, "Method #{symbol}_defined? or #{symbol}_value already defined"
if method_defined?("#{symbol}_defined?") || method_defined?("#{symbol}_entry") || method_defined?("#{symbol}_value")
raise ArgumentError, "Method '#{symbol}_defined?', '#{symbol}_entry' or '#{symbol}_value' already defined in '#{name}'"
end
unless @nodes.to_h[symbol]
......@@ -110,10 +110,13 @@ module Gitlab
entries[symbol]&.specified?
end
define_method("#{symbol}_value") do
return unless entries[symbol] && entries[symbol].valid?
define_method("#{symbol}_entry") do
entries[symbol]
end
entries[symbol].value
define_method("#{symbol}_value") do
entry = entries[symbol]
entry.value if entry&.valid?
end
end
end
......
......@@ -21221,6 +21221,18 @@ msgstr ""
msgid "UpdateProject|Project could not be updated!"
msgstr ""
msgid "UpdateRepositoryStorage|Error moving repository storage for %{project_full_path} - %{message}"
msgstr ""
msgid "UpdateRepositoryStorage|Failed to fetch %{type} repository as mirror"
msgstr ""
msgid "UpdateRepositoryStorage|Failed to verify %{type} repository checksum from %{old} to %{new}"
msgstr ""
msgid "UpdateRepositoryStorage|Timeout waiting for %{type} repository pushes"
msgstr ""
msgid "Updated"
msgstr ""
......
......@@ -176,11 +176,9 @@ module Trigger
edition = Trigger.ee? ? 'EE' : 'CE'
{
# Back-compatibility until https://gitlab.com/gitlab-org/build/CNG/merge_requests/189 is merged
"GITLAB_#{edition}_VERSION" => ENV['CI_COMMIT_REF_NAME'],
"GITLAB_VERSION" => ENV['CI_COMMIT_REF_NAME'],
"GITLAB_VERSION" => ENV['CI_COMMIT_SHA'],
"GITLAB_TAG" => ENV['CI_COMMIT_TAG'],
"GITLAB_ASSETS_TAG" => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : ENV['CI_COMMIT_REF_SLUG'],
"GITLAB_ASSETS_TAG" => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : ENV['CI_COMMIT_SHA'],
"FORCE_RAILS_IMAGE_BUILDS" => 'true',
"#{edition}_PIPELINE" => 'true'
}
......
......@@ -319,18 +319,21 @@ describe Backup::Manager do
context 'when there is a non-tarred backup in the directory' do
before do
allow(Dir).to receieve(:glob).and_return(
allow(Dir).to receive(:glob).and_return(
[
'backup_information.yml'
]
)
allow(File).to receive(:exist?).and_return(true)
end
it 'selects the non-tarred backup to restore from' do
expect(Kernel).not_to receive(:system)
it 'selects the non-tarred backup to restore from' do
expect { subject.unpack }.to output.to_stdout
expect(progress).to have_received(:puts)
.with(a_string_matching('Non tarred backup found '))
expect(Kernel).not_to receive(:system)
end
subject.unpack
expect(progress).to have_received(:puts)
.with(a_string_matching('Non tarred backup found '))
end
end
end
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::ArtifactFileReader do
let(:job) { create(:ci_build) }
let(:path) { 'generated.yml' } # included in the ci_build_artifacts.zip
describe '#read' do
subject { described_class.new(job).read(path) }
context 'when job has artifacts and metadata' do
let!(:artifacts) { create(:ci_job_artifact, :archive, job: job) }
let!(:metadata) { create(:ci_job_artifact, :metadata, job: job) }
it 'returns the content at the path' do
is_expected.to be_present
expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom')
end
context 'when path does not exist' do
let(:path) { 'file/does/not/exist.txt' }
let(:expected_error) do
"Path `#{path}` does not exist inside the `#{job.name}` artifacts archive!"
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::Error, expected_error)
end
end
context 'when path points to a directory' do
let(:path) { 'other_artifacts_0.1.2' }
let(:expected_error) do
"Path `#{path}` was expected to be a file but it was a directory!"
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::Error, expected_error)
end
end
context 'when path is nested' do
# path exists in ci_build_artifacts.zip
let(:path) { 'other_artifacts_0.1.2/doc_sample.txt' }
it 'returns the content at the nested path' do
is_expected.to be_present
end
end
context 'when artifact archive size is greater than the limit' do
let(:expected_error) do
"Artifacts archive for job `#{job.name}` is too large: max 1 KB"
end
before do
stub_const("#{described_class}::MAX_ARCHIVE_SIZE", 1.kilobyte)
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::Error, expected_error)
end
end
context 'when metadata entry shows size greater than the limit' do
let(:expected_error) do
"Artifacts archive for job `#{job.name}` is too large: max 5 MB"
end
before do
expect_next_instance_of(Gitlab::Ci::Build::Artifacts::Metadata::Entry) do |entry|
expect(entry).to receive(:total_size).and_return(10.megabytes)
end
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::Error, expected_error)
end
end
end
context 'when job does not have metadata artifacts' do
let!(:artifacts) { create(:ci_job_artifact, :archive, job: job) }
let(:expected_error) do
"Job `#{job.name}` has missing artifacts metadata and cannot be extracted!"
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::Error, expected_error)
end
end
context 'when job does not have artifacts' do
it 'raises ArgumentError' do
expect { subject }.to raise_error(ArgumentError, 'Job does not have artifacts')
end
end
end
end
......@@ -106,6 +106,7 @@ describe Gitlab::Ci::Config::Entry::Bridge do
ignore: false,
stage: 'test',
only: { refs: %w[branches tags] },
variables: {},
scheduling_type: :stage)
end
end
......@@ -128,6 +129,7 @@ describe Gitlab::Ci::Config::Entry::Bridge do
ignore: false,
stage: 'test',
only: { refs: %w[branches tags] },
variables: {},
scheduling_type: :stage)
end
end
......
......@@ -6,6 +6,7 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:entry) { described_class.new(config, name: :rspec) }
it_behaves_like 'with inheritable CI config' do
let(:config) { { script: 'echo' } }
let(:inheritable_key) { 'default' }
let(:inheritable_class) { Gitlab::Ci::Config::Entry::Default }
......@@ -15,6 +16,10 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:ignored_inheritable_columns) do
%i[]
end
before do
allow(entry).to receive_message_chain(:inherit_entry, :default_value).and_return(true)
end
end
describe '.nodes' do
......@@ -24,7 +29,8 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:result) do
%i[before_script script stage type after_script cache
image services only except rules needs variables artifacts
environment coverage retry interruptible timeout release tags]
environment coverage retry interruptible timeout release tags
inherit]
end
it { is_expected.to match_array result }
......@@ -500,7 +506,13 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:unspecified) { double('unspecified', 'specified?' => false) }
let(:default) { double('default', '[]' => unspecified) }
let(:workflow) { double('workflow', 'has_rules?' => false) }
let(:deps) { double('deps', 'default' => default, '[]' => unspecified, 'workflow' => workflow) }
let(:deps) do
double('deps',
'default_entry' => default,
'workflow_entry' => workflow,
'variables_value' => nil)
end
context 'when job config overrides default config' do
before do
......
......@@ -99,6 +99,7 @@ describe Gitlab::Ci::Config::Entry::Jobs do
only: { refs: %w[branches tags] },
stage: 'test',
trigger: { project: 'my/project' },
variables: {},
scheduling_type: :stage
},
regular_job: {
......
......@@ -7,6 +7,10 @@ describe Gitlab::Ci::Config::Entry::Processable do
Class.new(::Gitlab::Config::Entry::Node) do
include Gitlab::Ci::Config::Entry::Processable
entry :tags, ::Gitlab::Config::Entry::ArrayOfStrings,
description: 'Set the default tags.',
inherit: true
def self.name
'job'
end
......@@ -189,14 +193,17 @@ describe Gitlab::Ci::Config::Entry::Processable do
end
describe '#compose!' do
let(:specified) do
double('specified', 'specified?' => true, value: 'specified')
end
let(:unspecified) { double('unspecified', 'specified?' => false) }
let(:default) { double('default', '[]' => unspecified) }
let(:workflow) { double('workflow', 'has_rules?' => false) }
let(:deps) { double('deps', 'default' => default, '[]' => unspecified, 'workflow' => workflow) }
let(:variables) { }
let(:deps) do
double('deps',
default_entry: default,
workflow_entry: workflow,
variables_value: variables)
end
context 'with workflow rules' do
using RSpec::Parameterized::TableSyntax
......@@ -240,6 +247,84 @@ describe Gitlab::Ci::Config::Entry::Processable do
end
end
end
context 'with inheritance' do
context 'of variables' do
let(:config) do
{ variables: { A: 'job', B: 'job' } }
end
before do
entry.compose!(deps)
end
context 'with only job variables' do
it 'does return defined variables' do
expect(entry.value).to include(
variables: { 'A' => 'job', 'B' => 'job' }
)
end
end
context 'when root yaml variables are used' do
let(:variables) do
Gitlab::Ci::Config::Entry::Variables.new(
A: 'root', C: 'root'
).value
end
it 'does return all variables and overwrite them' do
expect(entry.value).to include(
variables: { 'A' => 'job', 'B' => 'job', 'C' => 'root' }
)
end
context 'when inherit of defaults is disabled' do
let(:config) do
{
variables: { A: 'job', B: 'job' },
inherit: { variables: false }
}
end
it 'does return only job variables' do
expect(entry.value).to include(
variables: { 'A' => 'job', 'B' => 'job' }
)
end
end
end
end
context 'of default:tags' do
using RSpec::Parameterized::TableSyntax
where(:default_tags, :tags, :inherit_default, :result) do
nil | %w[a b] | nil | %w[a b]
nil | %w[a b] | true | %w[a b]
nil | %w[a b] | false | %w[a b]
%w[b c] | %w[a b] | nil | %w[a b]
%w[b c] | %w[a b] | true | %w[a b]
%w[b c] | %w[a b] | false | %w[a b]
%w[b c] | nil | nil | %w[b c]
%w[b c] | nil | true | %w[b c]
%w[b c] | nil | false | nil
end
with_them do
let(:config) { { tags: tags, inherit: { default: inherit_default } } }
let(:default_specified_tags) { double('tags', 'specified?' => true, 'valid?' => true, 'value' => default_tags) }
before do
allow(default).to receive('[]').with(:tags).and_return(default_specified_tags)
entry.compose!(deps)
end
it { expect(entry.tags_value).to eq(result) }
end
end
end
end
context 'when composed' do
......@@ -254,10 +339,12 @@ describe Gitlab::Ci::Config::Entry::Processable do
end
it 'returns correct value' do
expect(entry.value)
.to eq(name: :rspec,
stage: 'test',
only: { refs: %w[branches tags] })
expect(entry.value).to eq(
name: :rspec,
stage: 'test',
only: { refs: %w[branches tags] },
variables: {}
)
end
end
end
......
......@@ -32,7 +32,7 @@ describe Gitlab::Ci::Config::Entry::Root do
image: 'ruby:2.2',
default: {},
services: ['postgres:9.1', 'mysql:5.5'],
variables: { VAR: 'value' },
variables: { VAR: 'root' },
after_script: ['make clean'],
stages: %w(build pages release),
cache: { key: 'k', untracked: true, paths: ['public/'] },
......@@ -42,6 +42,7 @@ describe Gitlab::Ci::Config::Entry::Root do
stage: 'release',
before_script: [],
after_script: [],
variables: { 'VAR' => 'job' },
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: 'v0.06',
......@@ -127,7 +128,7 @@ describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push' },
variables: {},
variables: { 'VAR' => 'root' },
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
......@@ -141,7 +142,7 @@ describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push' },
variables: {},
variables: { 'VAR' => 'root' },
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
......@@ -157,7 +158,7 @@ describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push" },
only: { refs: %w(branches tags) },
variables: {},
variables: { 'VAR' => 'job' },
after_script: [],
ignore: false,
scheduling_type: :stage }
......@@ -175,11 +176,11 @@ describe Gitlab::Ci::Config::Entry::Root do
image: 'ruby:2.1',
services: ['postgres:9.1', 'mysql:5.5']
},
variables: { VAR: 'value' },
variables: { VAR: 'root' },
stages: %w(build pages),
cache: { key: 'k', untracked: true, paths: ['public/'] },
rspec: { script: %w[rspec ls] },
spinach: { before_script: [], variables: { VAR: 'AA' }, script: 'spinach' } }
spinach: { before_script: [], variables: { VAR: 'job' }, script: 'spinach' } }
end
context 'when composed' do
......@@ -203,7 +204,7 @@ describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: "pull-push" },
variables: {},
variables: { 'VAR' => 'root' },
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
......@@ -215,7 +216,7 @@ describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: "pull-push" },
variables: { 'VAR' => 'AA' },
variables: { 'VAR' => 'job' },
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Artifact do
let(:parent_pipeline) { create(:ci_pipeline) }
let(:context) do
Gitlab::Ci::Config::External::Context.new(parent_pipeline: parent_pipeline)
end
let(:external_file) { described_class.new(params, context) }
describe '#matching?' do
context 'when params contain artifact location' do
let(:params) { { artifact: 'generated.yml' } }
it 'returns true' do
expect(external_file).to be_matching
end
end
context 'when params does not contain artifact location' do
let(:params) { {} }
it 'returns false' do
expect(external_file).not_to be_matching
end
end
end
describe '#valid?' do
shared_examples 'is invalid' do
it 'is not valid' do
expect(external_file).not_to be_valid
end
it 'sets the expected error' do
expect(external_file.errors)
.to contain_exactly(expected_error)
end
end
describe 'when used in non child pipeline context' do
let(:parent_pipeline) { nil }
let(:params) { { artifact: 'generated.yml' } }
let(:expected_error) do
'Including configs from artifacts is only allowed when triggering child pipelines'
end
it_behaves_like 'is invalid'
end
context 'when used in child pipeline context' do
let(:parent_pipeline) { create(:ci_pipeline) }
context 'when job is not provided' do
let(:params) { { artifact: 'generated.yml' } }
let(:expected_error) do
'Job must be provided when including configs from artifacts'
end
it_behaves_like 'is invalid'
end
context 'when job is provided' do
let(:params) { { artifact: 'generated.yml', job: 'generator' } }
context 'when job does not exist in the parent pipeline' do
let(:expected_error) do
'Job `generator` not found in parent pipeline or does not have artifacts!'
end
it_behaves_like 'is invalid'
end
context 'when job exists in the parent pipeline' do
let!(:generator_job) { create(:ci_build, name: 'generator', pipeline: parent_pipeline) }
context 'when job does not have artifacts' do
let(:expected_error) do
'Job `generator` not found in parent pipeline or does not have artifacts!'
end
it_behaves_like 'is invalid'
end
context 'when job has archive artifacts' do
let!(:artifacts) do
create(:ci_job_artifact, :archive,
job: generator_job,
file: fixture_file_upload(Rails.root.join('spec/fixtures/pages.zip'), 'application/zip'))
end
let(:expected_error) do
'Job `generator` has missing artifacts metadata and cannot be extracted!'
end
it_behaves_like 'is invalid'
context 'when job has artifacts exceeding the max allowed size' do
let(:expected_error) do
"Artifacts archive for job `generator` is too large: max 1 KB"
end
before do
stub_const("#{Gitlab::Ci::ArtifactFileReader}::MAX_ARCHIVE_SIZE", 1.kilobyte)
end
it_behaves_like 'is invalid'
end
context 'when job has artifacts metadata' do
let!(:metadata) do
create(:ci_job_artifact, :metadata, job: generator_job)
end
let(:expected_error) do
'Path `generated.yml` does not exist inside the `generator` artifacts archive!'
end
it_behaves_like 'is invalid'
context 'when file is found in metadata' do
let!(:artifacts) { create(:ci_job_artifact, :archive, job: generator_job) }
let!(:metadata) { create(:ci_job_artifact, :metadata, job: generator_job) }
context 'when file is empty' do
before do
allow_next_instance_of(Gitlab::Ci::ArtifactFileReader) do |reader|
allow(reader).to receive(:read).and_return('')
end
end
let(:expected_error) do
'File `generated.yml` is empty!'
end
it_behaves_like 'is invalid'
end
context 'when file is not empty' do
it 'is valid' do
expect(external_file).to be_valid
expect(external_file.content).to be_present
end
it 'propagates parent_pipeline to nested includes' do
expected_attrs = {
parent_pipeline: parent_pipeline,
project: anything,
sha: anything,
user: anything
}
expect(context).to receive(:mutate).with(expected_attrs).and_call_original
external_file.content
end
end
end
end
end
end
end
end
end
end
......@@ -6,10 +6,19 @@ describe Gitlab::Ci::Config::External::File::Local do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:sha) { '12345' }
let(:context_params) { { project: project, sha: sha, user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:params) { { local: location } }
let(:local_file) { described_class.new(params, context) }
let(:parent_pipeline) { double(:parent_pipeline) }
let(:context_params) do
{
project: project,
sha: sha,
user: user,
parent_pipeline: parent_pipeline
}
end
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
......@@ -117,7 +126,11 @@ describe Gitlab::Ci::Config::External::File::Local do
subject { local_file.send(:expand_context_attrs) }
it 'inherits project, user and sha' do
is_expected.to include(user: user, project: project, sha: sha)
is_expected.to include(
user: user,
project: project,
sha: sha,
parent_pipeline: parent_pipeline)
end
end
......
......@@ -7,10 +7,19 @@ describe Gitlab::Ci::Config::External::File::Project do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:context_user) { user }
let(:context_params) { { project: context_project, sha: '12345', user: context_user } }
let(:parent_pipeline) { double(:parent_pipeline) }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:project_file) { described_class.new(params, context) }
let(:context_params) do
{
project: context_project,
sha: '12345',
user: context_user,
parent_pipeline: parent_pipeline
}
end
before do
project.add_developer(user)
......@@ -152,7 +161,11 @@ describe Gitlab::Ci::Config::External::File::Project do
subject { project_file.send(:expand_context_attrs) }
it 'inherits user, and target project and sha' do
is_expected.to include(user: user, project: project, sha: project.commit('master').id)
is_expected.to include(
user: user,
project: project,
sha: project.commit('master').id,
parent_pipeline: parent_pipeline)
end
end
......
......@@ -376,23 +376,6 @@ describe Gitlab::Ci::Config do
end
end
context 'when context expansion timeout is disabled' do
before do
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!).and_call_original
end
allow(Feature)
.to receive(:enabled?)
.with(:ci_limit_yaml_expansion, project, default_enabled: true)
.and_return(false)
end
it 'does not raises errors' do
expect { config }.not_to raise_error
end
end
describe 'external file version' do
context 'when external local file SHA is defined' do
it 'is using a defined value' do
......@@ -541,5 +524,76 @@ describe Gitlab::Ci::Config do
end
end
end
context 'when including file from artifact' do
let(:config) do
described_class.new(
gitlab_ci_yml,
project: nil,
sha: nil,
user: nil,
parent_pipeline: parent_pipeline)
end
let(:gitlab_ci_yml) do
<<~HEREDOC
include:
- artifact: generated.yml
job: rspec
HEREDOC
end
let(:parent_pipeline) { nil }
context 'when used in the context of a child pipeline' do
# This job has ci_build_artifacts.zip artifact archive which
# contains generated.yml
let!(:job) { create(:ci_build, :artifacts, name: 'rspec', pipeline: parent_pipeline) }
let(:parent_pipeline) { create(:ci_pipeline) }
it 'returns valid config' do
expect(config).to be_valid
end
context 'when job key is missing' do
let(:gitlab_ci_yml) do
<<~HEREDOC
include:
- artifact: generated.yml
HEREDOC
end
it 'raises an error' do
expect { config }.to raise_error(
described_class::ConfigError,
'Job must be provided when including configs from artifacts'
)
end
end
context 'when artifact key is missing' do
let(:gitlab_ci_yml) do
<<~HEREDOC
include:
- job: rspec
HEREDOC
end
it 'raises an error' do
expect { config }.to raise_error(
described_class::ConfigError,
/needs to match exactly one accessor!/
)
end
end
end
it 'disallows the use in parent pipelines' do
expect { config }.to raise_error(
described_class::ConfigError,
'Including configs from artifacts is only allowed when triggering child pipelines'
)
end
end
end
end
......@@ -509,28 +509,44 @@ module Gitlab
describe "before_script" do
context "in global context" do
let(:config) do
{
before_script: ["global script"],
test: { script: ["script"] }
}
using RSpec::Parameterized::TableSyntax
where(:inherit, :result) do
nil | ["global script"]
{ default: false } | nil
{ default: true } | ["global script"]
end
it "return commands with scripts concatenated" do
expect(subject[:options][:before_script]).to eq(["global script"])
with_them do
let(:config) do
{
before_script: ["global script"],
test: { script: ["script"], inherit: inherit }
}
end
it { expect(subject[:options][:before_script]).to eq(result) }
end
end
context "in default context" do
let(:config) do
{
default: { before_script: ["global script"] },
test: { script: ["script"] }
}
using RSpec::Parameterized::TableSyntax
where(:inherit, :result) do
nil | ["global script"]
{ default: false } | nil
{ default: true } | ["global script"]
end
it "return commands with scripts concatenated" do
expect(subject[:options][:before_script]).to eq(["global script"])
with_them do
let(:config) do
{
default: { before_script: ["global script"] },
test: { script: ["script"], inherit: inherit }
}
end
it { expect(subject[:options][:before_script]).to eq(result) }
end
end
......@@ -793,7 +809,7 @@ module Gitlab
context 'when job and global variables are defined' do
let(:global_variables) do
{ 'VAR1' => 'global1', 'VAR3' => 'global3' }
{ 'VAR1' => 'global1', 'VAR3' => 'global3', 'VAR4' => 'global4' }
end
let(:job_variables) do
{ 'VAR1' => 'value1', 'VAR2' => 'value2' }
......@@ -802,16 +818,32 @@ module Gitlab
{
before_script: ['pwd'],
variables: global_variables,
rspec: { script: 'rspec', variables: job_variables }
rspec: { script: 'rspec', variables: job_variables, inherit: inherit }
}
end
it 'returns all unique variables' do
expect(subject).to contain_exactly(
{ key: 'VAR3', value: 'global3', public: true },
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
context 'when no inheritance is specified' do
let(:inherit) { }
it 'returns all unique variables' do
expect(subject).to contain_exactly(
{ key: 'VAR4', value: 'global4', public: true },
{ key: 'VAR3', value: 'global3', public: true },
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
end
end
context 'when inheritance is disabled' do
let(:inherit) { { variables: false } }
it 'does not inherit variables' do
expect(subject).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
end
end
end
......
......@@ -59,7 +59,7 @@ describe Gitlab::Config::Entry::Attributable do
end
end
expectation.to raise_error(ArgumentError, 'Method already defined: length')
expectation.to raise_error(ArgumentError, /Method 'length' already defined in/)
end
end
end
......@@ -25,7 +25,7 @@ describe Ci::JobArtifact do
end
it_behaves_like 'UpdateProjectStatistics' do
subject { build(:ci_job_artifact, :archive, size: 106365) }
subject { build(:ci_job_artifact, :archive, size: 107464) }
end
end
......@@ -35,7 +35,7 @@ describe Ci::JobArtifact do
end
it_behaves_like 'UpdateProjectStatistics' do
subject { build(:ci_job_artifact, :archive, size: 106365) }
subject { build(:ci_job_artifact, :archive, size: 107464) }
end
end
......@@ -173,7 +173,7 @@ describe Ci::JobArtifact do
let(:artifact) { create(:ci_job_artifact, :archive, project: project) }
it 'sets the size from the file size' do
expect(artifact.size).to eq(106365)
expect(artifact.size).to eq(107464)
end
end
......
......@@ -2553,6 +2553,19 @@ describe Ci::Pipeline, :mailer do
end
end
describe '#find_job_with_archive_artifacts' do
let!(:old_job) { create(:ci_build, name: 'rspec', retried: true, pipeline: pipeline) }
let!(:job_without_artifacts) { create(:ci_build, name: 'rspec', pipeline: pipeline) }
let!(:expected_job) { create(:ci_build, :artifacts, name: 'rspec', pipeline: pipeline ) }
let!(:different_job) { create(:ci_build, name: 'deploy', pipeline: pipeline) }
subject { pipeline.find_job_with_archive_artifacts('rspec') }
it 'finds the expected job' do
expect(subject).to eq(expected_job)
end
end
describe '#latest_builds_with_artifacts' do
let!(:fresh_build) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
let!(:stale_build) { create(:ci_build, :success, :expired, :artifacts, pipeline: pipeline) }
......
......@@ -511,6 +511,32 @@ describe Snippet do
end
end
describe '#blobs' do
let(:snippet) { create(:snippet) }
context 'when repository does not exist' do
it 'returns empty array' do
expect(snippet.blobs).to be_empty
end
end
context 'when repository exists' do
let(:snippet) { create(:snippet, :repository) }
it 'returns array of blobs' do
expect(snippet.blobs).to all(be_a(Blob))
end
end
it 'returns a blob representing the snippet data' do
blob = snippet.blob
expect(blob).to be_a(Blob)
expect(blob.path).to eq(snippet.file_name)
expect(blob.data).to eq(snippet.content)
end
end
describe '#to_json' do
let(:snippet) { build(:snippet) }
......
......@@ -756,7 +756,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
expect(json_response['dependencies'].count).to eq(1)
expect(json_response['dependencies']).to include(
{ 'id' => job.id, 'name' => job.name, 'token' => job.token,
'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 106365 } })
'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 107464 } })
end
end
......
......@@ -4,30 +4,77 @@ require 'spec_helper'
describe Ci::CreatePipelineService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:admin) }
let(:upstream_pipeline) { create(:ci_pipeline) }
let(:ref) { 'refs/heads/master' }
let(:service) { described_class.new(project, user, { ref: ref }) }
let(:upstream_pipeline) { create(:ci_pipeline, project: project) }
let(:bridge) { create(:ci_bridge, pipeline: upstream_pipeline) }
subject { service.execute(:push, bridge: bridge) }
context 'custom config content' do
let(:bridge) do
create(:ci_bridge, status: 'running', pipeline: upstream_pipeline, project: upstream_pipeline.project).tap do |bridge|
allow(bridge).to receive(:yaml_for_downstream).and_return(
<<~YML
rspec:
script: rspec
custom:
script: custom
YML
)
allow(bridge).to receive(:yaml_for_downstream).and_return(config_from_bridge)
end
end
subject { service.execute(:push, bridge: bridge) }
let(:config_from_bridge) do
<<~YML
rspec:
script: rspec
custom:
script: custom
YML
end
before do
allow(bridge).to receive(:yaml_for_downstream).and_return config_from_bridge
end
it 'creates a pipeline using the content passed in as param' do
expect(subject).to be_persisted
expect(subject.builds.map(&:name)).to eq %w[rspec custom]
expect(subject.config_source).to eq 'bridge_source'
end
context 'when bridge includes yaml from artifact' do
# the generated.yml is available inside the ci_build_artifacts.zip associated
# to the generator_job
let(:config_from_bridge) do
<<~YML
include:
- artifact: generated.yml
job: generator
YML
end
context 'when referenced job exists' do
let!(:generator_job) do
create(:ci_build, :artifacts,
project: project,
pipeline: upstream_pipeline,
name: 'generator')
end
it 'created a pipeline using the content passed in as param and download the artifact' do
expect(subject).to be_persisted
expect(subject.builds.pluck(:name)).to eq %w[rspec time custom]
expect(subject.config_source).to eq 'bridge_source'
end
end
context 'when referenced job does not exist' do
it 'creates an empty pipeline' do
expect(subject).to be_persisted
expect(subject).to be_failed
expect(subject.errors.full_messages)
.to contain_exactly(
'Job `generator` not found in parent pipeline or does not have artifacts!')
expect(subject.builds.pluck(:name)).to be_empty
expect(subject.config_source).to eq 'bridge_source'
end
end
end
end
end
......@@ -315,6 +315,7 @@ describe Projects::ForkService do
# Stub everything required to move a project to a Gitaly shard that does not exist
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/second_storage' })
allow_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror).and_return(true)
allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum).and_return(::Gitlab::Git::BLANK_SHA)
Projects::UpdateRepositoryStorageService.new(project).execute('test_second_storage')
fork_after_move = fork_project(project)
......
......@@ -16,6 +16,15 @@ describe Projects::UpdateRepositoryStorageService do
context 'without wiki and design repository' do
let(:project) { create(:project, :repository, repository_read_only: true, wiki_enabled: false) }
let!(:checksum) { project.repository.checksum }
let(:project_repository_double) { double(:repository) }
before do
allow(Gitlab::Git::Repository).to receive(:new).and_call_original
allow(Gitlab::Git::Repository).to receive(:new)
.with('test_second_storage', project.repository.raw.relative_path, project.repository.gl_repository, project.repository.full_path)
.and_return(project_repository_double)
end
context 'when the move succeeds' do
it 'moves the repository to the new storage and unmarks the repository as read only' do
......@@ -23,10 +32,14 @@ describe Projects::UpdateRepositoryStorageService do
project.repository.path_to_repo
end
expect_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror)
expect(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
expect(project_repository_double).to receive(:checksum)
.and_return(checksum)
result = subject.execute('test_second_storage')
subject.execute('test_second_storage')
expect(result[:status]).to eq(:success)
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('test_second_storage')
expect(gitlab_shell.repository_exists?('default', old_path)).to be(false)
......@@ -44,16 +57,50 @@ describe Projects::UpdateRepositoryStorageService do
context 'when the move fails' do
it 'unmarks the repository as read-only without updating the repository storage' do
expect_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror)
expect(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(false)
expect(GitlabShellWorker).not_to receive(:perform_async)
subject.execute('test_second_storage')
result = subject.execute('test_second_storage')
expect(result[:status]).to eq(:error)
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
end
end
context 'when the checksum does not match' do
it 'unmarks the repository as read-only without updating the repository storage' do
expect(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
expect(project_repository_double).to receive(:checksum)
.and_return('not matching checksum')
expect(GitlabShellWorker).not_to receive(:perform_async)
result = subject.execute('test_second_storage')
expect(result[:status]).to eq(:error)
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
end
end
context 'when a object pool was joined' do
let!(:pool) { create(:pool_repository, :ready, source_project: project) }
it 'leaves the pool' do
expect(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
expect(project_repository_double).to receive(:checksum)
.and_return(checksum)
result = subject.execute('test_second_storage')
expect(result[:status]).to eq(:success)
expect(project.repository_storage).to eq('test_second_storage')
expect(project.reload_pool_repository).to be_nil
end
end
end
context 'with wiki repository' do
......@@ -66,18 +113,5 @@ describe Projects::UpdateRepositoryStorageService do
end
end
end
context 'when a object pool was joined' do
let(:project) { create(:project, :repository, wiki_enabled: false, repository_read_only: true) }
let(:pool) { create(:pool_repository, :ready, source_project: project) }
it 'leaves the pool' do
allow_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror).and_return(true)
subject.execute('test_second_storage')
expect(project.reload_pool_repository).to be_nil
end
end
end
end
......@@ -53,7 +53,7 @@ RSpec.shared_examples 'with inheritable CI config' do
let(:deps) do
if inheritable_key
double('deps', inheritable_key => inheritable, '[]' => unspecified)
double('deps', "#{inheritable_key}_entry" => inheritable, '[]' => unspecified)
else
inheritable
end
......@@ -68,7 +68,7 @@ RSpec.shared_examples 'with inheritable CI config' do
it 'does inherit value' do
expect(inheritable).to receive('[]').with(entry_key).and_return(specified)
entry.compose!(deps)
entry.send(:inherit!, deps)
expect(entry[entry_key]).to eq(specified)
end
......@@ -86,7 +86,7 @@ RSpec.shared_examples 'with inheritable CI config' do
expect do
# we ignore exceptions as `#overwrite_entry`
# can raise exception on duplicates
entry.compose!(deps) rescue described_class::InheritError
entry.send(:inherit!, deps) rescue described_class::InheritError
end.not_to change { entry[entry_key] }
end
end
......@@ -94,7 +94,7 @@ RSpec.shared_examples 'with inheritable CI config' do
context 'when inheritable does not specify' do
it 'does not inherit value' do
entry.compose!(deps)
entry.send(:inherit!, deps)
expect(entry[entry_key]).to be_a(
Gitlab::Config::Entry::Undefined)
......
......@@ -2,7 +2,10 @@
RSpec.shared_examples 'moves repository to another storage' do |repository_type|
let(:project_repository_double) { double(:repository) }
let!(:project_repository_checksum) { project.repository.checksum }
let(:repository_double) { double(:repository) }
let(:repository_checksum) { repository.checksum }
before do
# Default stub for non-specified params
......@@ -19,15 +22,16 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
context 'when the move succeeds', :clean_gitlab_redis_shared_state do
before do
allow(project_repository_double)
.to receive(:fetch_repository_as_mirror)
allow(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw)
.and_return(true)
allow(project_repository_double).to receive(:checksum)
.and_return(project_repository_checksum)
allow(repository_double)
.to receive(:fetch_repository_as_mirror)
.with(repository.raw)
.and_return(true)
allow(repository_double).to receive(:fetch_repository_as_mirror)
.with(repository.raw).and_return(true)
allow(repository_double).to receive(:checksum)
.and_return(repository_checksum)
end
it "moves the project and its #{repository_type} repository to the new storage and unmarks the repository as read only" do
......@@ -37,8 +41,9 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
old_repository_path = repository.full_path
subject.execute('test_second_storage')
result = subject.execute('test_second_storage')
expect(result[:status]).to eq(:success)
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('test_second_storage')
expect(gitlab_shell.repository_exists?('default', old_project_repository_path)).to be(false)
......@@ -87,13 +92,38 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
it 'unmarks the repository as read-only without updating the repository storage' do
allow(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
allow(project_repository_double).to receive(:checksum)
.and_return(project_repository_checksum)
allow(repository_double).to receive(:fetch_repository_as_mirror)
.with(repository.raw).and_return(false)
expect(GitlabShellWorker).not_to receive(:perform_async)
subject.execute('test_second_storage')
result = subject.execute('test_second_storage')
expect(result[:status]).to eq(:error)
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
end
end
context "when the checksum of the #{repository_type} repository does not match" do
it 'unmarks the repository as read-only without updating the repository storage' do
allow(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
allow(project_repository_double).to receive(:checksum)
.and_return(project_repository_checksum)
allow(repository_double).to receive(:fetch_repository_as_mirror)
.with(repository.raw).and_return(true)
allow(repository_double).to receive(:checksum)
.and_return('not matching checksum')
expect(GitlabShellWorker).not_to receive(:perform_async)
result = subject.execute('test_second_storage')
expect(result[:status]).to eq(:error)
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
end
......
......@@ -421,17 +421,35 @@ describe PostReceive do
perform
end
end
it 'updates the snippet db information' do
blob = snippet.blobs.first
expect(snippet).to receive(:update).with(file_name: blob.path, content: blob.data)
perform
end
context 'when snippet does not have any blob' do
it 'does not update snippet db information' do
allow(snippet).to receive(:blobs).and_return([])
expect(snippet).not_to receive(:update)
perform
end
end
end
end
context 'with PersonalSnippet' do
let!(:snippet) { create(:personal_snippet, author: project.owner) }
let!(:snippet) { create(:personal_snippet, :repository, author: project.owner) }
it_behaves_like 'snippet changes actions'
end
context 'with ProjectSnippet' do
let!(:snippet) { create(:project_snippet, project: project, author: project.owner) }
let!(:snippet) { create(:project_snippet, :repository, project: project, author: project.owner) }
it_behaves_like 'snippet changes actions'
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment