Commit b71a496c authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent c2041156
Please view this file on the master branch, on stable branches it's out of date.
## 12.9.3 (2020-04-14)
### Security (1 change)
- Fix filename bypass when uploading NuGet packages.
## 12.9.2 (2020-03-31)
### Fixed (4 changes)
......@@ -152,6 +159,13 @@ Please view this file on the master branch, on stable branches it's out of date.
- Allow users to be marked as service users. !202680
## 12.8.9 (2020-04-14)
### Security (1 change)
- Fix filename bypass when uploading NuGet packages.
## 12.8.7 (2020-03-16)
### Fixed (1 change)
......@@ -305,6 +319,13 @@ Please view this file on the master branch, on stable branches it's out of date.
- Prepare DB structure for GMA forking changes. !22002
## 12.7.9 (2020-04-14)
### Security (1 change)
- Fix filename bypass when uploading NuGet packages.
## 12.7.5
### Fixed (1 change)
......
......@@ -163,7 +163,7 @@ gem 'diffy', '~> 3.3'
gem 'diff_match_patch', '~> 0.1.0'
# Application server
gem 'rack', '~> 2.0.7'
gem 'rack', '~> 2.0.9'
group :unicorn do
gem 'unicorn', '~> 5.4.1'
......
......@@ -173,7 +173,7 @@ GEM
concord (0.1.5)
adamantium (~> 0.2.0)
equalizer (~> 0.0.9)
concurrent-ruby (1.1.5)
concurrent-ruby (1.1.6)
connection_pool (2.2.2)
contracts (0.11.0)
cork (0.3.0)
......@@ -788,7 +788,7 @@ GEM
public_suffix (4.0.3)
pyu-ruby-sasl (0.0.3.3)
raabro (1.1.6)
rack (2.0.7)
rack (2.0.9)
rack-accept (0.4.5)
rack (>= 0.4)
rack-attack (6.2.0)
......@@ -859,17 +859,17 @@ GEM
json
recursive-open-struct (1.1.0)
redis (4.1.3)
redis-actionpack (5.1.0)
actionpack (>= 4.0, < 7)
redis-rack (>= 1, < 3)
redis-actionpack (5.2.0)
actionpack (>= 5, < 7)
redis-rack (>= 2.1.0, < 3)
redis-store (>= 1.1.0, < 2)
redis-activesupport (5.2.0)
activesupport (>= 3, < 7)
redis-store (>= 1.3, < 2)
redis-namespace (1.6.0)
redis (>= 3.0.4)
redis-rack (2.0.6)
rack (>= 1.5, < 3)
redis-rack (2.1.2)
rack (>= 2.0.8, < 3)
redis-store (>= 1.2, < 2)
redis-rails (5.0.2)
redis-actionpack (>= 5.0, < 6)
......@@ -1331,7 +1331,7 @@ DEPENDENCIES
prometheus-client-mmap (~> 0.10.0)
pry-byebug (~> 3.5.1)
pry-rails (~> 0.3.9)
rack (~> 2.0.7)
rack (~> 2.0.9)
rack-attack (~> 6.2.0)
rack-cors (~> 1.0.6)
rack-oauth2 (~> 1.9.3)
......
......@@ -6,31 +6,32 @@ class ActiveSession
SESSION_BATCH_SIZE = 200
ALLOWED_NUMBER_OF_ACTIVE_SESSIONS = 100
attr_writer :session_id
attr_accessor :created_at, :updated_at,
:ip_address, :browser, :os,
:device_name, :device_type,
:is_impersonated
:is_impersonated, :session_id
def current?(session)
return false if session_id.nil? || session.id.nil?
session_id == session.id
# Rack v2.0.8+ added private_id, which uses the hash of the
# public_id to avoid timing attacks.
session_id.private_id == session.id.private_id
end
def human_device_type
device_type&.titleize
end
# This is not the same as Rack::Session::SessionId#public_id, but we
# need to preserve this for backwards compatibility.
def public_id
encrypted_id = Gitlab::CryptoHelper.aes256_gcm_encrypt(session_id)
CGI.escape(encrypted_id)
Gitlab::CryptoHelper.aes256_gcm_encrypt(session_id.public_id)
end
def self.set(user, request)
Gitlab::Redis::SharedState.with do |redis|
session_id = request.session.id
session_id = request.session.id.public_id
client = DeviceDetector.new(request.user_agent)
timestamp = Time.current
......@@ -63,32 +64,35 @@ class ActiveSession
def self.list(user)
Gitlab::Redis::SharedState.with do |redis|
cleaned_up_lookup_entries(redis, user).map do |entry|
# rubocop:disable Security/MarshalLoad
Marshal.load(entry)
# rubocop:enable Security/MarshalLoad
cleaned_up_lookup_entries(redis, user).map do |raw_session|
load_raw_session(raw_session)
end
end
end
def self.destroy(user, session_id)
return unless session_id
Gitlab::Redis::SharedState.with do |redis|
destroy_sessions(redis, user, [session_id])
end
end
def self.destroy_with_public_id(user, public_id)
session_id = decrypt_public_id(public_id)
destroy(user, session_id) unless session_id.nil?
decrypted_id = decrypt_public_id(public_id)
return if decrypted_id.nil?
session_id = Rack::Session::SessionId.new(decrypted_id)
destroy(user, session_id)
end
def self.destroy_sessions(redis, user, session_ids)
key_names = session_ids.map {|session_id| key_name(user.id, session_id) }
session_names = session_ids.map {|session_id| "#{Gitlab::Redis::SharedState::SESSION_NAMESPACE}:#{session_id}" }
key_names = session_ids.map { |session_id| key_name(user.id, session_id.public_id) }
redis.srem(lookup_key_name(user.id), session_ids)
redis.srem(lookup_key_name(user.id), session_ids.map(&:public_id))
redis.del(key_names)
redis.del(session_names)
redis.del(rack_session_keys(session_ids))
end
def self.cleanup(user)
......@@ -110,28 +114,65 @@ class ActiveSession
sessions_from_ids(session_ids_for_user(user.id))
end
# Lists the relevant session IDs for the user.
#
# Returns an array of Rack::Session::SessionId objects
def self.session_ids_for_user(user_id)
Gitlab::Redis::SharedState.with do |redis|
redis.smembers(lookup_key_name(user_id))
session_ids = redis.smembers(lookup_key_name(user_id))
session_ids.map { |id| Rack::Session::SessionId.new(id) }
end
end
# Lists the ActiveSession objects for the given session IDs.
#
# session_ids - An array of Rack::Session::SessionId objects
#
# Returns an array of ActiveSession objects
def self.sessions_from_ids(session_ids)
return [] if session_ids.empty?
Gitlab::Redis::SharedState.with do |redis|
session_keys = session_ids.map { |session_id| "#{Gitlab::Redis::SharedState::SESSION_NAMESPACE}:#{session_id}" }
session_keys = rack_session_keys(session_ids)
session_keys.each_slice(SESSION_BATCH_SIZE).flat_map do |session_keys_batch|
redis.mget(session_keys_batch).compact.map do |raw_session|
# rubocop:disable Security/MarshalLoad
Marshal.load(raw_session)
# rubocop:enable Security/MarshalLoad
load_raw_session(raw_session)
end
end
end
end
# Deserializes an ActiveSession object from Redis.
#
# raw_session - Raw bytes from Redis
#
# Returns an ActiveSession object
def self.load_raw_session(raw_session)
# rubocop:disable Security/MarshalLoad
session = Marshal.load(raw_session)
# rubocop:enable Security/MarshalLoad
# Older ActiveSession models serialize `session_id` as strings, To
# avoid breaking older sessions, we keep backwards compatibility
# with older Redis keys and initiate Rack::Session::SessionId here.
session.session_id = Rack::Session::SessionId.new(session.session_id) if session.try(:session_id).is_a?(String)
session
end
def self.rack_session_keys(session_ids)
session_ids.each_with_object([]) do |session_id, arr|
# This is a redis-rack implementation detail
# (https://github.com/redis-store/redis-rack/blob/master/lib/rack/session/redis.rb#L88)
#
# We need to delete session keys based on the legacy public key name
# and the newer private ID keys, but there's no well-defined interface
# so we have to do it directly.
arr << "#{Gitlab::Redis::SharedState::SESSION_NAMESPACE}:#{session_id.public_id}"
arr << "#{Gitlab::Redis::SharedState::SESSION_NAMESPACE}:#{session_id.private_id}"
end
end
def self.raw_active_session_entries(redis, session_ids, user_id)
return [] if session_ids.empty?
......@@ -146,7 +187,7 @@ class ActiveSession
entry_keys = raw_active_session_entries(redis, session_ids, user_id)
entry_keys.compact.map do |raw_session|
Marshal.load(raw_session) # rubocop:disable Security/MarshalLoad
load_raw_session(raw_session)
end
end
......@@ -159,10 +200,13 @@ class ActiveSession
sessions = active_session_entries(session_ids, user.id, redis)
sessions.sort_by! {|session| session.updated_at }.reverse!
destroyable_sessions = sessions.drop(ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
destroyable_session_ids = destroyable_sessions.map { |session| session.send :session_id } # rubocop:disable GitlabSecurity/PublicSend
destroyable_session_ids = destroyable_sessions.map { |session| session.session_id }
destroy_sessions(redis, user, destroyable_session_ids) if destroyable_session_ids.any?
end
# Cleans up the lookup set by removing any session IDs that are no longer present.
#
# Returns an array of marshalled ActiveModel objects that are still active.
def self.cleaned_up_lookup_entries(redis, user)
session_ids = session_ids_for_user(user.id)
entries = raw_active_session_entries(redis, session_ids, user.id)
......@@ -181,13 +225,8 @@ class ActiveSession
end
private_class_method def self.decrypt_public_id(public_id)
decoded_id = CGI.unescape(public_id)
Gitlab::CryptoHelper.aes256_gcm_decrypt(decoded_id)
Gitlab::CryptoHelper.aes256_gcm_decrypt(public_id)
rescue
nil
end
private
attr_reader :session_id
end
......@@ -2,17 +2,19 @@
# rubocop:disable Rails/ActiveRecordAliases
class WikiPage
include Gitlab::Utils::StrongMemoize
PageChangedError = Class.new(StandardError)
PageRenameError = Class.new(StandardError)
MAX_TITLE_BYTES = 245
MAX_DIRECTORY_BYTES = 255
FrontMatterTooLong = Class.new(StandardError)
include ActiveModel::Validations
include ActiveModel::Conversion
include StaticModel
extend ActiveModel::Naming
delegate :content, :front_matter, to: :parsed_content
def self.primary_key
'slug'
end
......@@ -114,8 +116,7 @@ class WikiPage
@attributes[:title] = new_title
end
# The raw content of this page.
def content
def raw_content
@attributes[:content] ||= @page&.text_data
end
......@@ -238,7 +239,7 @@ class WikiPage
save do
wiki.update_page(
@page,
content: content,
content: raw_content,
format: format,
message: attrs[:message],
title: title
......@@ -281,8 +282,10 @@ class WikiPage
# Updates the current @attributes hash by merging a hash of params
def update_attributes(attrs)
attrs[:title] = process_title(attrs[:title]) if attrs[:title].present?
update_front_matter(attrs)
attrs.slice!(:content, :format, :message, :title)
clear_memoization(:parsed_content) if attrs.has_key?(:content)
@attributes.merge!(attrs)
end
......@@ -293,6 +296,28 @@ class WikiPage
private
def serialize_front_matter(hash)
return '' unless hash.present?
YAML.dump(hash.transform_keys(&:to_s)) + "---\n"
end
def update_front_matter(attrs)
return unless Gitlab::WikiPages::FrontMatterParser.enabled?(project)
return unless attrs.has_key?(:front_matter)
fm_yaml = serialize_front_matter(attrs[:front_matter])
raise FrontMatterTooLong if fm_yaml.size > Gitlab::WikiPages::FrontMatterParser::MAX_FRONT_MATTER_LENGTH
attrs[:content] = fm_yaml + (attrs[:content].presence || content)
end
def parsed_content
strong_memoize(:parsed_content) do
Gitlab::WikiPages::FrontMatterParser.new(raw_content, project).parse
end
end
# Process and format the title based on the user input.
def process_title(title)
return if title.blank?
......@@ -339,14 +364,16 @@ class WikiPage
def validate_path_limits
*dirnames, title = @attributes[:title].split('/')
if title && title.bytesize > MAX_TITLE_BYTES
errors.add(:title, _("exceeds the limit of %{bytes} bytes") % { bytes: MAX_TITLE_BYTES })
if title && title.bytesize > Gitlab::WikiPages::MAX_TITLE_BYTES
errors.add(:title, _("exceeds the limit of %{bytes} bytes") % {
bytes: Gitlab::WikiPages::MAX_TITLE_BYTES
})
end
invalid_dirnames = dirnames.select { |d| d.bytesize > MAX_DIRECTORY_BYTES }
invalid_dirnames = dirnames.select { |d| d.bytesize > Gitlab::WikiPages::MAX_DIRECTORY_BYTES }
invalid_dirnames.each do |dirname|
errors.add(:title, _('exceeds the limit of %{bytes} bytes for directory name "%{dirname}"') % {
bytes: MAX_DIRECTORY_BYTES,
bytes: Gitlab::WikiPages::MAX_DIRECTORY_BYTES,
dirname: dirname
})
end
......
......@@ -29,7 +29,15 @@ module Groups
group.chat_team&.remove_mattermost_team(current_user)
user_ids_for_project_authorizations_refresh = group.user_ids_for_project_authorizations
group.destroy
UserProjectAccessChangedService
.new(user_ids_for_project_authorizations_refresh)
.execute(blocking: true)
group
end
# rubocop: enable CodeReuse/ActiveRecord
end
......
---
title: Read metadata from Wiki front-matter
merge_request: 27706
author:
type: added
# frozen_string_literal: true
class ScheduleRecalculateProjectAuthorizationsThirdRun < ActiveRecord::Migration[5.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION = 'RecalculateProjectAuthorizationsWithMinMaxUserId'
BATCH_SIZE = 2_500
DELAY_INTERVAL = 2.minutes.to_i
disable_ddl_transaction!
class User < ActiveRecord::Base
include ::EachBatch
self.table_name = 'users'
end
def up
say "Scheduling #{MIGRATION} jobs"
queue_background_migration_jobs_by_range_at_intervals(User, MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
end
def down
end
end
......@@ -12947,6 +12947,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200204070729
20200204113223
20200204113224
20200204113225
20200204131054
20200204131831
20200205143231
......
......@@ -2,6 +2,7 @@ Akismet
Alertmanager
Algolia
Ansible
Anthos
API
approvers
Artifactory
......@@ -118,6 +119,8 @@ hardcode
hardcoded
hardcodes
Helm
Heroku
Herokuish
HipChat
hostname
hostnames
......@@ -133,6 +136,7 @@ initializers
interdependencies
interdependency
Irker
Istio
jasmine-jquery
JavaScript
Jaeger
......@@ -180,6 +184,7 @@ misconfiguring
mitigations
mockup
mockups
ModSecurity
nameserver
nameservers
namespace
......@@ -326,6 +331,10 @@ unchecking
unchecks
uncomment
uncommented
unencode
unencoded
unencoder
unencodes
unencrypted
Unicorn
unindexed
......
......@@ -22,10 +22,9 @@ Pipelines comprise:
Jobs are executed by [Runners](../runners/README.md). Multiple jobs in the same stage are executed in parallel,
if there are enough concurrent runners.
If all the jobs in a stage:
If *all* jobs in a stage succeed, the pipeline moves on to the next stage.
- Succeed, the pipeline moves on to the next stage.
- Fail, the next stage is not (usually) executed and the pipeline ends early.
If *any* job in a stage fails, the next stage is not (usually) executed and the pipeline ends early.
In general, pipelines are executed automatically and require no intervention once created. However, there are
also times when you can manually interact with a pipeline.
......@@ -46,6 +45,10 @@ you may need to enable pipeline triggering in your project's
Pipelines can be configured in many different ways:
- [Basic pipelines](pipeline_architectures.md#basic-pipelines) run everything in each stage concurrently,
followed by the next stage.
- [Directed Acyclic Graph Pipeline (DAG) pipelines](../directed_acyclic_graph/index.md) are based on relationships
between jobs and can run more quickly than basic pipelines.
- [Multi-project pipelines](../multi_project_pipelines.md) combine pipelines for different projects together.
- [Parent-Child pipelines](../parent_child_pipelines.md) break down complex pipelines
into one parent pipeline that can trigger multiple child sub-pipelines, which all
......
This diff is collapsed.
......@@ -418,12 +418,23 @@ spec:
## Troubleshooting
- Auto Build and Auto Test may fail in detecting your language/framework. There
may be no buildpack for your application, or your application may be missing the
key files the buildpack is looking for. For example, for Ruby applications, you must
have a `Gemfile` to be properly detected, even though it is possible to write a
Ruby app without a `Gemfile`. Try specifying a [custom
buildpack](customize.md#custom-buildpacks).
- Auto Build and Auto Test may fail to detect your language or framework with the
following error:
```plaintext
Step 5/11 : RUN /bin/herokuish buildpack build
---> Running in eb468cd46085
-----> Unable to select a buildpack
The command '/bin/sh -c /bin/herokuish buildpack build' returned a non-zero code: 1
```
The following are possible reasons:
- Your application may be missing the key files the buildpack is looking for. For
example, for Ruby applications you must have a `Gemfile` to be properly detected,
even though it is possible to write a Ruby app without a `Gemfile`.
- There may be no buildpack for your application. Try specifying a
[custom buildpack](customize.md#custom-buildpacks).
- Auto Test may fail because of a mismatch between testing frameworks. In this
case, you may need to customize your `.gitlab-ci.yml` with your test commands.
- Auto Deploy will fail if GitLab can not create a Kubernetes namespace and
......
This diff is collapsed.
......@@ -58,7 +58,7 @@ under which this application will be deployed.
1. On the project's landing page, click **Add Kubernetes cluster**
(note that this option is also available when you navigate to **Operations > Kubernetes**).
![Project landing page](../autodevops/img/guide_project_landing_page_v12_3.png)
![Project landing page](../autodevops/img/guide_project_landing_page_v12_10.png)
1. On the **Create new cluster on GKE** tab, click **Sign in with Google**.
......
......@@ -524,6 +524,12 @@ does not make any unsolicited requests including checking for updates.
The DAST job can emit various reports.
### List of URLs scanned
When DAST completes scanning, the merge request page states the number of URLs that were scanned. Click **View details** to view the web console output which includes the list of scanned URLs.
![DAST Widget](img/dast_urls_scanned_v12_10.png)
### JSON
CAUTION: **Caution:**
......
......@@ -251,21 +251,14 @@ module API
end
params do
requires :id, type: Integer, desc: %q(Job's ID)
requires :file, type: ::API::Validations::Types::WorkhorseFile, desc: %(The artifact file to store (generated by Multipart middleware))
optional :token, type: String, desc: %q(Job's authentication token)
optional :expire_in, type: String, desc: %q(Specify when artifacts should expire)
optional :artifact_type, type: String, desc: %q(The type of artifact),
default: 'archive', values: Ci::JobArtifact.file_types.keys
optional :artifact_format, type: String, desc: %q(The format of artifact),
default: 'zip', values: Ci::JobArtifact.file_formats.keys
optional 'file.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'file.name', type: String, desc: %q(real filename as send in Content-Disposition (generated by Workhorse))
optional 'file.type', type: String, desc: %q(real content type as send in Content-Type (generated by Workhorse))
optional 'file.size', type: Integer, desc: %q(real size of file (generated by Workhorse))
optional 'file.sha256', type: String, desc: %q(sha256 checksum of the file (generated by Workhorse))
optional 'metadata.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'metadata.name', type: String, desc: %q(filename (generated by Workhorse))
optional 'metadata.size', type: Integer, desc: %q(real size of metadata (generated by Workhorse))
optional 'metadata.sha256', type: String, desc: %q(sha256 checksum of metadata (generated by Workhorse))
optional :metadata, type: ::API::Validations::Types::WorkhorseFile, desc: %(The artifact metadata to store (generated by Multipart middleware))
end
post '/:id/artifacts' do
not_allowed! unless Gitlab.config.artifacts.enabled
......@@ -274,10 +267,9 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
artifacts = UploadedFile.from_params(params, :file, JobArtifactUploader.workhorse_local_upload_path)
metadata = UploadedFile.from_params(params, :metadata, JobArtifactUploader.workhorse_local_upload_path)
artifacts = params[:file]
metadata = params[:metadata]
bad_request!('Missing artifacts file!') unless artifacts
file_too_large! unless artifacts.size < max_artifacts_size(job)
result = Ci::CreateJobArtifactsService.new(job.project).execute(job, artifacts, params, metadata_file: metadata)
......
......@@ -3,28 +3,11 @@
module Banzai
module Filter
class FrontMatterFilter < HTML::Pipeline::Filter
DELIM_LANG = {
'---' => 'yaml',
'+++' => 'toml',
';;;' => 'json'
}.freeze
DELIM = Regexp.union(DELIM_LANG.keys)
PATTERN = %r{
\A(?:[^\r\n]*coding:[^\r\n]*)? # optional encoding line
\s*
^(?<delim>#{DELIM})[ \t]*(?<lang>\S*) # opening front matter marker (optional language specifier)
\s*
^(?<front_matter>.*?) # front matter (not greedy)
\s*
^\k<delim> # closing front matter marker
\s*
}mx.freeze
def call
html.sub(PATTERN) do |_match|
lang = $~[:lang].presence || DELIM_LANG[$~[:delim]]
lang_mapping = Gitlab::FrontMatter::DELIM_LANG
html.sub(Gitlab::FrontMatter::PATTERN) do |_match|
lang = $~[:lang].presence || lang_mapping[$~[:delim]]
["```#{lang}", $~[:front_matter], "```", "\n"].join("\n")
end
......
# frozen_string_literal: true
module Gitlab
module FrontMatter
DELIM_LANG = {
'---' => 'yaml',
'+++' => 'toml',
';;;' => 'json'
}.freeze
DELIM = Regexp.union(DELIM_LANG.keys)
PATTERN = %r{
\A(?:[^\r\n]*coding:[^\r\n]*)? # optional encoding line
\s*
^(?<delim>#{DELIM})[ \t]*(?<lang>\S*) # opening front matter marker (optional language specifier)
\s*
^(?<front_matter>.*?) # front matter block content (not greedy)
\s*
^(\k<delim> | \.{3}) # closing front matter marker
\s*
}mx.freeze
end
end
......@@ -107,6 +107,7 @@ module Gitlab
[
::FileUploader.root,
Gitlab.config.uploads.storage_path,
JobArtifactUploader.workhorse_upload_path,
File.join(Rails.root, 'public/uploads/tmp')
]
end
......@@ -125,6 +126,8 @@ module Gitlab
Handler.new(env, message).with_open_files do
@app.call(env)
end
rescue UploadedFile::InvalidPathError => e
[400, { 'Content-Type' => 'text/plain' }, e.message]
end
end
end
......
# frozen_string_literal: true
module Gitlab
module WikiPages
# Many common file systems have a limit of 255 bytes for file and
# directory names, and while Git and GitLab both support paths exceeding
# those limits, the presence of them makes it impossible for users on
# those file systems to checkout a wiki repository locally.
# To avoid this situation, we enforce these limits when editing pages
# through the GitLab web interface and API:
MAX_TITLE_BYTES = 245 # reserving 10 bytes for the file extension
MAX_DIRECTORY_BYTES = 255
end
end
# frozen_string_literal: true
module Gitlab
module WikiPages
class FrontMatterParser
FEATURE_FLAG = :wiki_front_matter
# We limit the maximum length of text we are prepared to parse as YAML, to
# avoid exploitations and attempts to consume memory and CPU. We allow for:
# - a title line
# - a "slugs:" line
# - and up to 50 slugs
#
# This limit does not take comments into account.
MAX_SLUGS = 50
SLUG_LINE_LENGTH = (4 + Gitlab::WikiPages::MAX_DIRECTORY_BYTES + 1 + Gitlab::WikiPages::MAX_TITLE_BYTES)
MAX_FRONT_MATTER_LENGTH = (8 + Gitlab::WikiPages::MAX_TITLE_BYTES) + 7 + (SLUG_LINE_LENGTH * MAX_SLUGS)
ParseError = Class.new(StandardError)
class Result
attr_reader :front_matter, :content, :reason, :error
def initialize(content:, front_matter: {}, reason: nil, error: nil)
@content = content
@front_matter = front_matter.freeze
@reason = reason
@error = error
end
end
# @param [String] wiki_content
# @param [FeatureGate] feature_gate The scope for feature availability
def initialize(wiki_content, feature_gate)
@wiki_content = wiki_content
@feature_gate = feature_gate
end
def self.enabled?(gate = nil)
Feature.enabled?(FEATURE_FLAG, gate)
end
def parse
return empty_result unless enabled? && wiki_content.present?
return empty_result(block.error) unless block.valid?
Result.new(front_matter: block.data, content: strip_front_matter_block)
rescue ParseError => error
empty_result(:parse_error, error)
end
class Block
include Gitlab::Utils::StrongMemoize
def initialize(delim = nil, lang = '', text = nil)
@lang = lang.downcase.presence || Gitlab::FrontMatter::DELIM_LANG[delim]
@text = text
end
def data
@data ||= YAML.safe_load(text, symbolize_names: true)
rescue Psych::DisallowedClass, Psych::SyntaxError => error
raise ParseError, error.message
end
def valid?
error.nil?
end
def error
strong_memoize(:error) { no_match? || too_long? || not_yaml? || not_mapping? }
end
private
attr_reader :lang, :text
def no_match?
:no_match if text.nil?
end
def not_yaml?
:not_yaml if lang != 'yaml'
end
def too_long?
:too_long if text.size > MAX_FRONT_MATTER_LENGTH
end
def not_mapping?
:not_mapping unless data.is_a?(Hash)
end
end
private
attr_reader :wiki_content, :feature_gate
def empty_result(reason = nil, error = nil)
Result.new(content: wiki_content, reason: reason, error: error)
end
def enabled?
self.class.enabled?(feature_gate)
end
def block
@block ||= parse_front_matter_block
end
def parse_front_matter_block
wiki_content.match(Gitlab::FrontMatter::PATTERN) { |m| Block.new(*m.captures) } || Block.new
end
def strip_front_matter_block
wiki_content.gsub(Gitlab::FrontMatter::PATTERN, '')
end
end
end
end
......@@ -89,6 +89,17 @@ describe Gitlab::Middleware::Multipart do
end
end
it 'allows files in the job artifact upload path' do
with_tmp_dir('artifacts') do |dir, env|
expect(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(File.join(dir, 'artifacts'))
expect(app).to receive(:call) do |env|
expect(get_params(env)['file']).to be_a(::UploadedFile)
end
middleware.call(env)
end
end
it 'allows symlinks for uploads dir' do
Tempfile.open('two-levels') do |tempfile|
symlinked_dir = '/some/dir/uploads'
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::WikiPages::FrontMatterParser do
subject(:parser) { described_class.new(raw_content, gate) }
let(:content) { 'This is the content' }
let(:end_divider) { '---' }
let(:gate) { double('Gate') }
let(:with_front_matter) do
<<~MD
---
a: 1
b: 2
c:
- foo
- bar
date: I am safe. Not actually a date
#{end_divider}
#{content}
MD
end
def have_correct_front_matter
include(a: 1, b: 2, c: %w(foo bar))
end
describe '#parse' do
subject { parser.parse }
context 'there is front matter' do
let(:raw_content) { with_front_matter }
it do
is_expected.to have_attributes(
front_matter: have_correct_front_matter,
content: content + "\n",
error: be_nil
)
end
end
context 'there is no content' do
let(:raw_content) { '' }
it do
is_expected.to have_attributes(
front_matter: {},
content: raw_content,
error: be_nil
)
end
end
context 'there is no front_matter' do
let(:raw_content) { content }
it { is_expected.to have_attributes(front_matter: be_empty, content: raw_content) }
it { is_expected.to have_attributes(reason: :no_match) }
end
context 'the feature flag is disabled' do
let(:raw_content) { with_front_matter }
before do
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
end
it { is_expected.to have_attributes(front_matter: be_empty, content: raw_content) }
end
context 'the feature flag is enabled for the gated object' do
let(:raw_content) { with_front_matter }
before do
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => {
enabled: true,
thing: gate
})
end
it do
is_expected.to have_attributes(
front_matter: have_correct_front_matter,
content: content + "\n",
reason: be_nil
)
end
end
context 'the end divider is ...' do
let(:end_divider) { '...' }
let(:raw_content) { with_front_matter }
it { is_expected.to have_attributes(front_matter: have_correct_front_matter) }
end
context 'the front-matter is not a mapping' do
let(:raw_content) do
<<~MD
---
- thing one
- thing two
---
#{content}
MD
end
it { is_expected.to have_attributes(reason: :not_mapping) }
end
context 'there is nothing in the front-matter block' do
let(:raw_content) do
<<~MD
---
---
My content here
MD
end
it { is_expected.to have_attributes(reason: :not_mapping) }
end
context 'there is a string in the YAML block' do
let(:raw_content) do
<<~MD
---
This is a string
---
#{content}
MD
end
it { is_expected.to have_attributes(reason: :not_mapping) }
end
context 'there is dangerous YAML in the block' do
let(:raw_content) do
<<~MD
---
date: 2010-02-11 11:02:57
---
#{content}
MD
end
it { is_expected.to have_attributes(reason: :parse_error, error: be_present) }
end
context 'there is acceptably long YAML in the front-matter block' do
let(:raw_content) do
key = 'title: '
length = described_class::MAX_FRONT_MATTER_LENGTH - key.size
<<~MD
---
title: #{FFaker::Lorem.characters(length)}
---
#{content}
MD
end
it { is_expected.to have_attributes(front_matter: include(title: be_present)) }
end
context 'there is suspiciously long YAML in the front-matter block' do
let(:raw_content) do
<<~MD
---
title: #{FFaker::Lorem.characters(described_class::MAX_FRONT_MATTER_LENGTH)}
---
#{content}
MD
end
it { is_expected.to have_attributes(reason: :too_long) }
end
context 'TOML front matter' do
let(:raw_content) do
<<~MD
+++
title = "My title"
+++
#{content}
MD
end
it { is_expected.to have_attributes(reason: :not_yaml) }
end
context 'TOML style fences, advertised as YAML' do
let(:raw_content) do
<<~MD
+++ yaml
title: "My title"
+++
#{content}
MD
end
it { is_expected.to have_attributes(front_matter: include(title: 'My title')) }
end
context 'YAML, advertised as something else' do
let(:raw_content) do
<<~MD
--- toml
title: My title
---
#{content}
MD
end
it { is_expected.to have_attributes(reason: :not_yaml) }
end
context 'there is text content in the YAML block, in comments' do
let(:raw_content) do
<<~MD
---
# This is YAML
#
# It has comments though. Explaining things
foo: 1
## It has headings
headings:
- heading one
- heading two
# And lists
lists:
- and lists
- with things in them
---
#{content}
MD
end
it { is_expected.to have_attributes(front_matter: include(foo: 1)) }
end
context 'there is text content in the YAML block' do
let(:raw_content) do
<<~MD
---
# This is not YAML
In fact is looks like markdown
## It has headings
Paragraphs
- and lists
- with things in them
---
#{content}
MD
end
it { is_expected.to have_attributes(reason: :not_mapping) }
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200204113225_schedule_recalculate_project_authorizations_third_run.rb')
describe ScheduleRecalculateProjectAuthorizationsThirdRun do
let(:users_table) { table(:users) }
before do
stub_const("#{described_class}::BATCH_SIZE", 2)
1.upto(4) do |i|
users_table.create!(id: i, name: "user#{i}", email: "user#{i}@example.com", projects_limit: 1)
end
end
it 'schedules background migration' do
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
expect(described_class::MIGRATION).to be_scheduled_migration(1, 2)
expect(described_class::MIGRATION).to be_scheduled_migration(3, 4)
end
end
end
end
......@@ -9,10 +9,8 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
end
let(:session) do
double(:session, { id: '6919a6f1bb119dd7396fadc38fd18d0d',
'[]': {} })
end
let(:rack_session) { Rack::Session::SessionId.new('6919a6f1bb119dd7396fadc38fd18d0d') }
let(:session) { instance_double(ActionDispatch::Request::Session, id: rack_session, '[]': {}) }
let(:request) do
double(:request, {
......@@ -25,13 +23,13 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
describe '#current?' do
it 'returns true if the active session matches the current session' do
active_session = ActiveSession.new(session_id: '6919a6f1bb119dd7396fadc38fd18d0d')
active_session = ActiveSession.new(session_id: rack_session)
expect(active_session.current?(session)).to be true
end
it 'returns false if the active session does not match the current session' do
active_session = ActiveSession.new(session_id: '59822c7d9fcdfa03725eff41782ad97d')
active_session = ActiveSession.new(session_id: Rack::Session::SessionId.new('59822c7d9fcdfa03725eff41782ad97d'))
expect(active_session.current?(session)).to be false
end
......@@ -46,14 +44,12 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
describe '#public_id' do
it 'returns an encrypted, url-encoded session id' do
original_session_id = "!*'();:@&\n=+$,/?%abcd#123[4567]8"
original_session_id = Rack::Session::SessionId.new("!*'();:@&\n=+$,/?%abcd#123[4567]8")
active_session = ActiveSession.new(session_id: original_session_id)
encrypted_encoded_id = active_session.public_id
encrypted_id = CGI.unescape(encrypted_encoded_id)
encrypted_id = active_session.public_id
derived_session_id = Gitlab::CryptoHelper.aes256_gcm_decrypt(encrypted_id)
expect(original_session_id).to eq derived_session_id
expect(original_session_id.public_id).to eq derived_session_id
end
end
......@@ -104,7 +100,8 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
describe '.list_sessions' do
it 'uses the ActiveSession lookup to return original sessions' do
Gitlab::Redis::SharedState.with do |redis|
redis.set("session:gitlab:6919a6f1bb119dd7396fadc38fd18d0d", Marshal.dump({ _csrf_token: 'abcd' }))
# Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88
redis.set("session:gitlab:#{rack_session.private_id}", Marshal.dump({ _csrf_token: 'abcd' }))
redis.sadd(
"session:lookup:user:gitlab:#{user.id}",
......@@ -127,17 +124,18 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
redis.sadd("session:lookup:user:gitlab:#{user.id}", session_ids)
end
expect(ActiveSession.session_ids_for_user(user.id)).to eq(session_ids)
expect(ActiveSession.session_ids_for_user(user.id).map(&:to_s)).to eq(session_ids)
end
end
describe '.sessions_from_ids' do
it 'uses the ActiveSession lookup to return original sessions' do
Gitlab::Redis::SharedState.with do |redis|
redis.set("session:gitlab:6919a6f1bb119dd7396fadc38fd18d0d", Marshal.dump({ _csrf_token: 'abcd' }))
# Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88
redis.set("session:gitlab:#{rack_session.private_id}", Marshal.dump({ _csrf_token: 'abcd' }))
end
expect(ActiveSession.sessions_from_ids(['6919a6f1bb119dd7396fadc38fd18d0d'])).to eq [{ _csrf_token: 'abcd' }]
expect(ActiveSession.sessions_from_ids([rack_session])).to eq [{ _csrf_token: 'abcd' }]
end
it 'avoids a redis lookup for an empty array' do
......@@ -152,11 +150,12 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
redis = double(:redis)
expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
sessions = %w[session-a session-b]
sessions = %w[session-a session-b session-c session-d]
mget_responses = sessions.map { |session| [Marshal.dump(session)]}
expect(redis).to receive(:mget).twice.and_return(*mget_responses)
expect(redis).to receive(:mget).exactly(4).times.and_return(*mget_responses)
expect(ActiveSession.sessions_from_ids([1, 2])).to eql(sessions)
session_ids = [1, 2].map { |id| Rack::Session::SessionId.new(id.to_s) }
expect(ActiveSession.sessions_from_ids(session_ids).map(&:to_s)).to eql(sessions)
end
end
......@@ -212,6 +211,12 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
describe '.destroy' do
it 'gracefully handles a nil session ID' do
expect(described_class).not_to receive(:destroy_sessions)
ActiveSession.destroy(user, nil)
end
it 'removes the entry associated with the currently killed user session' do
Gitlab::Redis::SharedState.with do |redis|
redis.set("session:user:gitlab:#{user.id}:6919a6f1bb119dd7396fadc38fd18d0d", '')
......@@ -244,8 +249,9 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'removes the devise session' do
Gitlab::Redis::SharedState.with do |redis|
redis.set("session:user:gitlab:#{user.id}:6919a6f1bb119dd7396fadc38fd18d0d", '')
redis.set("session:gitlab:6919a6f1bb119dd7396fadc38fd18d0d", '')
redis.set("session:user:gitlab:#{user.id}:#{rack_session.public_id}", '')
# Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88
redis.set("session:gitlab:#{rack_session.private_id}", '')
end
ActiveSession.destroy(user, request.session.id)
......@@ -322,7 +328,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
(1..max_number_of_sessions_plus_two).each do |number|
redis.set(
"session:user:gitlab:#{user.id}:#{number}",
Marshal.dump(ActiveSession.new(session_id: "#{number}", updated_at: number.days.ago))
Marshal.dump(ActiveSession.new(session_id: number.to_s, updated_at: number.days.ago))
)
redis.sadd(
"session:lookup:user:gitlab:#{user.id}",
......
......@@ -20,6 +20,17 @@ describe WikiPage do
subject { new_page }
def disable_front_matter
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
end
def enable_front_matter_for_project
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => {
thing: project,
enabled: true
})
end
describe '.group_by_directory' do
context 'when there are no pages' do
it 'returns an empty array' do
......@@ -101,6 +112,119 @@ describe WikiPage do
end
end
describe '#front_matter' do
let_it_be(:project) { create(:project) }
let(:wiki_page) { create(:wiki_page, project: project, content: content) }
shared_examples 'a page without front-matter' do
it { expect(wiki_page).to have_attributes(front_matter: {}, content: content) }
end
shared_examples 'a page with front-matter' do
let(:front_matter) { { title: 'Foo', slugs: %w[slug_a slug_b] } }
it { expect(wiki_page.front_matter).to eq(front_matter) }
end
context 'the wiki page has front matter' do
let(:content) do
<<~MD
---
title: Foo
slugs:
- slug_a
- slug_b
---
My actual content
MD
end
it_behaves_like 'a page with front-matter'
it 'strips the front matter from the content' do
expect(wiki_page.content.strip).to eq('My actual content')
end
context 'the feature flag is off' do
before do
disable_front_matter
end
it_behaves_like 'a page without front-matter'
context 'but enabled for the project' do
before do
enable_front_matter_for_project
end
it_behaves_like 'a page with front-matter'
end
end
end
context 'the wiki page does not have front matter' do
let(:content) { 'My actual content' }
it_behaves_like 'a page without front-matter'
end
context 'the wiki page has fenced blocks, but nothing in them' do
let(:content) do
<<~MD
---
---
My actual content
MD
end
it_behaves_like 'a page without front-matter'
end
context 'the wiki page has invalid YAML type in fenced blocks' do
let(:content) do
<<~MD
---
this isn't YAML
---
My actual content
MD
end
it_behaves_like 'a page without front-matter'
end
context 'the wiki page has a disallowed class in fenced block' do
let(:content) do
<<~MD
---
date: 2010-02-11 11:02:57
---
My actual content
MD
end
it_behaves_like 'a page without front-matter'
end
context 'the wiki page has invalid YAML in fenced block' do
let(:content) do
<<~MD
---
invalid-use-of-reserved-indicator: @text
---
My actual content
MD
end
it_behaves_like 'a page without front-matter'
end
end
describe '.unhyphenize' do
it 'removes hyphens from a name' do
name = 'a-name--with-hyphens'
......@@ -155,8 +279,8 @@ describe WikiPage do
end
describe '#validate_path_limits' do
let(:max_title) { described_class::MAX_TITLE_BYTES }
let(:max_directory) { described_class::MAX_DIRECTORY_BYTES }
let(:max_title) { Gitlab::WikiPages::MAX_TITLE_BYTES }
let(:max_directory) { Gitlab::WikiPages::MAX_DIRECTORY_BYTES }
where(:character) do
['a', 'ä', '🙈']
......@@ -296,7 +420,7 @@ describe WikiPage do
subject.update(content: "new content")
page = wiki.find_page(title)
expect(page.content).to eq('new content')
expect([subject.content, page.content]).to all(eq('new content'))
end
it "returns true" do
......@@ -333,7 +457,7 @@ describe WikiPage do
subject.update(content: new_content)
page = wiki.find_page('test page')
expect(page.content).to eq("new content")
expect([subject.content, page.content]).to all(eq("new content"))
end
it "updates the title of the page" do
......@@ -342,7 +466,75 @@ describe WikiPage do
subject.update(title: new_title)
page = wiki.find_page(new_title)
expect(page.title).to eq(new_title)
expect([subject.title, page.title]).to all(eq(new_title))
end
describe 'updating front_matter' do
shared_examples 'able to update front-matter' do
it 'updates the wiki-page front-matter' do
title = subject.title
content = subject.content
subject.update(front_matter: { slugs: ['x'] })
page = wiki.find_page(title)
expect([subject, page]).to all(
have_attributes(
front_matter: include(slugs: include('x')),
content: content
))
end
end
it_behaves_like 'able to update front-matter'
context 'the front matter is too long' do
let(:new_front_matter) do
{
title: generate(:wiki_page_title),
slugs: Array.new(51).map { FFaker::Lorem.characters(512) }
}
end
it 'raises an error' do
expect { subject.update(front_matter: new_front_matter) }.to raise_error(described_class::FrontMatterTooLong)
end
end
context 'the front-matter feature flag is not enabled' do
before do
disable_front_matter
end
it 'does not update the front-matter' do
content = subject.content
subject.update(front_matter: { slugs: ['x'] })
page = wiki.find_page(subject.title)
expect([subject, page]).to all(have_attributes(front_matter: be_empty, content: content))
end
context 'but it is enabled for the project' do
before do
enable_front_matter_for_project
end
it_behaves_like 'able to update front-matter'
end
end
it 'updates the wiki-page front-matter and content together' do
title = subject.title
content = 'totally new content'
subject.update(content: content, front_matter: { slugs: ['x'] })
page = wiki.find_page(title)
expect([subject, page]).to all(
have_attributes(
front_matter: include(slugs: include('x')),
content: content
))
end
end
it "returns true" do
......
......@@ -1422,8 +1422,8 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
describe 'artifacts' do
let(:job) { create(:ci_build, :pending, user: user, project: project, pipeline: pipeline, runner_id: runner.id) }
let(:jwt_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt_token } }
let(:jwt) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt } }
let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
let(:file_upload) { fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') }
let(:file_upload2) { fixture_file_upload('spec/fixtures/dk.png', 'image/gif') }
......@@ -1703,12 +1703,12 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it 'fails to post artifacts without GitLab-Workhorse' do
post api("/jobs/#{job.id}/artifacts"), params: { token: job.token }, headers: {}
expect(response).to have_gitlab_http_status(:forbidden)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'Is missing GitLab Workhorse token headers' do
let(:jwt_token) { JWT.encode({ 'iss' => 'invalid-header' }, Gitlab::Workhorse.secret, 'HS256') }
let(:jwt) { JWT.encode({ 'iss' => 'invalid-header' }, Gitlab::Workhorse.secret, 'HS256') }
it 'fails to post artifacts without GitLab-Workhorse' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).once
......@@ -1722,15 +1722,14 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
context 'when setting an expire date' do
let(:default_artifacts_expire_in) {}
let(:post_data) do
{ 'file.path' => file_upload.path,
'file.name' => file_upload.original_filename,
'expire_in' => expire_in }
{ file: file_upload,
expire_in: expire_in }
end
before do
stub_application_setting(default_artifacts_expire_in: default_artifacts_expire_in)
post(api("/jobs/#{job.id}/artifacts"), params: post_data, headers: headers_with_token)
upload_artifacts(file_upload, headers_with_token, post_data)
end
context 'when an expire_in is given' do
......@@ -1783,20 +1782,22 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
let(:stored_artifacts_size) { job.reload.artifacts_size }
let(:stored_artifacts_sha256) { job.reload.job_artifacts_archive.file_sha256 }
let(:stored_metadata_sha256) { job.reload.job_artifacts_metadata.file_sha256 }
let(:file_keys) { post_data.keys }
let(:send_rewritten_field) { true }
before do
post(api("/jobs/#{job.id}/artifacts"), params: post_data, headers: headers_with_token)
workhorse_finalize_with_multiple_files(
api("/jobs/#{job.id}/artifacts"),
method: :post,
file_keys: file_keys,
params: post_data,
headers: headers_with_token,
send_rewritten_field: send_rewritten_field
)
end
context 'when posts data accelerated by workhorse is correct' do
let(:post_data) do
{ 'file.path' => artifacts.path,
'file.name' => artifacts.original_filename,
'file.sha256' => artifacts_sha256,
'metadata.path' => metadata.path,
'metadata.name' => metadata.original_filename,
'metadata.sha256' => metadata_sha256 }
end
let(:post_data) { { file: artifacts, metadata: metadata } }
it 'stores artifacts and artifacts metadata' do
expect(response).to have_gitlab_http_status(:created)
......@@ -1808,9 +1809,30 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
end
context 'with a malicious file.path param' do
let(:post_data) { {} }
let(:tmp_file) { Tempfile.new('crafted.file.path') }
let(:url) { "/jobs/#{job.id}/artifacts?file.path=#{tmp_file.path}" }
it 'rejects the request' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(stored_artifacts_size).to be_nil
end
end
context 'when workhorse header is missing' do
let(:post_data) { { file: artifacts, metadata: metadata } }
let(:send_rewritten_field) { false }
it 'rejects the request' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(stored_artifacts_size).to be_nil
end
end
context 'when there is no artifacts file in post data' do
let(:post_data) do
{ 'metadata' => metadata }
{ metadata: metadata }
end
it 'is expected to respond with bad request' do
......@@ -2053,7 +2075,8 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
method: :post,
file_key: :file,
params: params.merge(file: file),
headers: headers
headers: headers,
send_rewritten_field: true
)
end
end
......
......@@ -133,4 +133,55 @@ describe Groups::DestroyService do
end
end
end
describe 'authorization updates', :sidekiq_inline do
context 'shared groups' do
let!(:shared_group) { create(:group, :private) }
let!(:shared_group_child) { create(:group, :private, parent: shared_group) }
let!(:project) { create(:project, group: shared_group) }
let!(:project_child) { create(:project, group: shared_group_child) }
before do
create(:group_group_link, shared_group: shared_group, shared_with_group: group)
group.refresh_members_authorized_projects
end
it 'updates project authorization' do
expect(user.can?(:read_project, project)).to eq(true)
expect(user.can?(:read_project, project_child)).to eq(true)
destroy_group(group, user, false)
expect(user.can?(:read_project, project)).to eq(false)
expect(user.can?(:read_project, project_child)).to eq(false)
end
end
context 'shared groups in the same group hierarchy' do
let!(:subgroup) { create(:group, :private, parent: group) }
let!(:subgroup_user) { create(:user) }
before do
subgroup.add_user(subgroup_user, Gitlab::Access::MAINTAINER)
create(:group_group_link, shared_group: group, shared_with_group: subgroup)
subgroup.refresh_members_authorized_projects
end
context 'group is deleted' do
it 'updates project authorization' do
expect { destroy_group(group, user, false) }.to(
change { subgroup_user.can?(:read_project, project) }.from(true).to(false))
end
end
context 'subgroup is deleted' do
it 'updates project authorization' do
expect { destroy_group(subgroup, user, false) }.to(
change { subgroup_user.can?(:read_project, project) }.from(true).to(false))
end
end
end
end
end
......@@ -33,22 +33,36 @@ module WorkhorseHelpers
# workhorse_finalize will transform file_key inside params as if it was the finalize call of an inline object storage upload.
# note that based on the content of the params it can simulate a disc acceleration or an object storage upload
def workhorse_finalize(url, method: :post, file_key:, params:, headers: {}, send_rewritten_field: false)
workhorse_request_with_file(method, url,
file_key: file_key,
params: params,
extra_headers: headers,
send_rewritten_field: send_rewritten_field
workhorse_finalize_with_multiple_files(url, method: method, file_keys: file_key, params: params, headers: headers, send_rewritten_field: send_rewritten_field)
end
def workhorse_finalize_with_multiple_files(url, method: :post, file_keys:, params:, headers: {}, send_rewritten_field: false)
workhorse_request_with_multiple_files(method, url,
file_keys: file_keys,
params: params,
extra_headers: headers,
send_rewritten_field: send_rewritten_field
)
end
def workhorse_request_with_file(method, url, file_key:, params:, env: {}, extra_headers: {}, send_rewritten_field:)
workhorse_request_with_multiple_files(method, url, file_keys: file_key, params: params, env: env, extra_headers: extra_headers, send_rewritten_field: send_rewritten_field)
end
def workhorse_request_with_multiple_files(method, url, file_keys:, params:, env: {}, extra_headers: {}, send_rewritten_field:)
workhorse_params = params.dup
file = workhorse_params.delete(file_key)
workhorse_params = workhorse_disk_accelerated_file_params(file_key, file).merge(workhorse_params)
file_keys = Array(file_keys)
rewritten_fields = {}
file_keys.each do |key|
file = workhorse_params.delete(key)
rewritten_fields[key] = file.path if file
workhorse_params = workhorse_disk_accelerated_file_params(key, file).merge(workhorse_params)
end
headers = if send_rewritten_field
workhorse_rewritten_fields_header(file_key => file.path)
workhorse_rewritten_fields_header(rewritten_fields)
else
{}
end
......@@ -75,7 +89,11 @@ module WorkhorseHelpers
"#{key}.name" => file.original_filename,
"#{key}.size" => file.size
}.tap do |params|
params["#{key}.path"] = file.path if file.path
if file.path
params["#{key}.path"] = file.path
params["#{key}.sha256"] = Digest::SHA256.file(file.path).hexdigest
end
params["#{key}.remote_id"] = file.remote_id if file.respond_to?(:remote_id) && file.remote_id.present?
end
end
......
# frozen_string_literal: true
#
# This file pulls in the changes in https://github.com/rails/rails/pull/38063
# to fix controller specs updated with the latest Rack versions.
#
# This file should be removed after that change ships. It is not
# present in Rails 6.0.2.2.
module ActionController
class TestRequest < ActionDispatch::TestRequest #:nodoc:
def self.new_session
TestSessionPatched.new
end
end
# Methods #destroy and #load! are overridden to avoid calling methods on the
# @store object, which does not exist for the TestSession class.
class TestSessionPatched < Rack::Session::Abstract::PersistedSecure::SecureSessionHash #:nodoc:
DEFAULT_OPTIONS = Rack::Session::Abstract::Persisted::DEFAULT_OPTIONS
def initialize(session = {})
super(nil, nil)
@id = Rack::Session::SessionId.new(SecureRandom.hex(16))
@data = stringify_keys(session)
@loaded = true
end
def exists?
true
end
def keys
@data.keys
end
def values
@data.values
end
def destroy
clear
end
def fetch(key, *args, &block)
@data.fetch(key.to_s, *args, &block)
end
private
def load!
@id
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment