Commit 835fe4d3 authored by Sean McGivern's avatar Sean McGivern

Merge branch '40781-os-to-ce' into 'master'

Bring Object Storage to CE

Closes #4171, #4163, #3370, #2841, and #29203

See merge request gitlab-org/gitlab-ce!17358
parents ab8f13c3 6d63a098
module SendFileUpload
def send_upload(file_upload, send_params: {}, redirect_params: {}, attachment: nil, disposition: 'attachment')
if attachment
redirect_params[:query] = { "response-content-disposition" => "#{disposition};filename=#{attachment.inspect}" }
send_params.merge!(filename: attachment, disposition: disposition)
end
if file_upload.file_storage?
send_file file_upload.path, send_params
elsif file_upload.class.proxy_download_enabled?
headers.store(*Gitlab::Workhorse.send_url(file_upload.url(**redirect_params)))
head :ok
else
redirect_to file_upload.url(**redirect_params)
end
end
end
module UploadsActions module UploadsActions
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
include SendFileUpload
UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze
...@@ -26,14 +27,11 @@ module UploadsActions ...@@ -26,14 +27,11 @@ module UploadsActions
def show def show
return render_404 unless uploader&.exists? return render_404 unless uploader&.exists?
if uploader.file_storage?
disposition = uploader.image_or_video? ? 'inline' : 'attachment'
expires_in 0.seconds, must_revalidate: true, private: true expires_in 0.seconds, must_revalidate: true, private: true
send_file uploader.file.path, disposition: disposition disposition = uploader.image_or_video? ? 'inline' : 'attachment'
else
redirect_to uploader.url send_upload(uploader, attachment: uploader.filename, disposition: disposition)
end
end end
private private
...@@ -62,19 +60,27 @@ module UploadsActions ...@@ -62,19 +60,27 @@ module UploadsActions
end end
def build_uploader_from_upload def build_uploader_from_upload
return nil unless params[:secret] && params[:filename] return unless uploader = build_uploader
upload_path = uploader_class.upload_path(params[:secret], params[:filename]) upload_paths = uploader.upload_paths(params[:filename])
upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_path) upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_paths)
upload&.build_uploader upload&.build_uploader
end end
def build_uploader_from_params def build_uploader_from_params
return unless uploader = build_uploader
uploader.retrieve_from_store!(params[:filename])
uploader
end
def build_uploader
return unless params[:secret] && params[:filename]
uploader = uploader_class.new(model, secret: params[:secret]) uploader = uploader_class.new(model, secret: params[:secret])
return nil unless uploader.model_valid? return unless uploader.model_valid?
uploader.retrieve_from_store!(params[:filename])
uploader uploader
end end
......
class Projects::ArtifactsController < Projects::ApplicationController class Projects::ArtifactsController < Projects::ApplicationController
include ExtractsPath include ExtractsPath
include RendersBlob include RendersBlob
include SendFileUpload
layout 'project' layout 'project'
before_action :authorize_read_build! before_action :authorize_read_build!
...@@ -10,11 +11,7 @@ class Projects::ArtifactsController < Projects::ApplicationController ...@@ -10,11 +11,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
before_action :entry, only: [:file] before_action :entry, only: [:file]
def download def download
if artifacts_file.file_storage? send_upload(artifacts_file, attachment: artifacts_file.filename)
send_file artifacts_file.path, disposition: 'attachment'
else
redirect_to artifacts_file.url
end
end end
def browse def browse
...@@ -45,8 +42,7 @@ class Projects::ArtifactsController < Projects::ApplicationController ...@@ -45,8 +42,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end end
def raw def raw
path = Gitlab::Ci::Build::Artifacts::Path path = Gitlab::Ci::Build::Artifacts::Path.new(params[:path])
.new(params[:path])
send_artifacts_entry(build, path) send_artifacts_entry(build, path)
end end
...@@ -75,7 +71,7 @@ class Projects::ArtifactsController < Projects::ApplicationController ...@@ -75,7 +71,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end end
def validate_artifacts! def validate_artifacts!
render_404 unless build && build.artifacts? render_404 unless build&.artifacts?
end end
def build def build
......
class Projects::JobsController < Projects::ApplicationController class Projects::JobsController < Projects::ApplicationController
include SendFileUpload
before_action :build, except: [:index, :cancel_all] before_action :build, except: [:index, :cancel_all]
before_action :authorize_read_build!, before_action :authorize_read_build!,
...@@ -117,6 +119,11 @@ class Projects::JobsController < Projects::ApplicationController ...@@ -117,6 +119,11 @@ class Projects::JobsController < Projects::ApplicationController
end end
def raw def raw
if trace_artifact_file
send_upload(trace_artifact_file,
send_params: raw_send_params,
redirect_params: raw_redirect_params)
else
build.trace.read do |stream| build.trace.read do |stream|
if stream.file? if stream.file?
send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline' send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
...@@ -125,6 +132,7 @@ class Projects::JobsController < Projects::ApplicationController ...@@ -125,6 +132,7 @@ class Projects::JobsController < Projects::ApplicationController
end end
end end
end end
end
private private
...@@ -136,6 +144,18 @@ class Projects::JobsController < Projects::ApplicationController ...@@ -136,6 +144,18 @@ class Projects::JobsController < Projects::ApplicationController
return access_denied! unless can?(current_user, :erase_build, build) return access_denied! unless can?(current_user, :erase_build, build)
end end
def raw_send_params
{ type: 'text/plain; charset=utf-8', disposition: 'inline' }
end
def raw_redirect_params
{ query: { 'response-content-type' => 'text/plain; charset=utf-8', 'response-content-disposition' => 'inline' } }
end
def trace_artifact_file
@trace_artifact_file ||= build.job_artifacts_trace&.file
end
def build def build
@build ||= project.builds.find(params[:id]) @build ||= project.builds.find(params[:id])
.present(current_user: current_user) .present(current_user: current_user)
......
class Projects::LfsStorageController < Projects::GitHttpClientController class Projects::LfsStorageController < Projects::GitHttpClientController
include LfsRequest include LfsRequest
include WorkhorseRequest include WorkhorseRequest
include SendFileUpload
skip_before_action :verify_workhorse_api!, only: [:download, :upload_finalize] skip_before_action :verify_workhorse_api!, only: [:download, :upload_finalize]
...@@ -11,7 +12,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController ...@@ -11,7 +12,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
return return
end end
send_file lfs_object.file.path, content_type: "application/octet-stream" send_upload(lfs_object.file, send_params: { content_type: "application/octet-stream" })
end end
def upload_authorize def upload_authorize
...@@ -70,10 +71,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController ...@@ -70,10 +71,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
end end
def move_tmp_file_to_storage(object, path) def move_tmp_file_to_storage(object, path)
File.open(path) do |f| object.file = File.open(path)
object.file = f
end
object.file.store! object.file.store!
object.save object.save
end end
......
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
class Projects::RawController < Projects::ApplicationController class Projects::RawController < Projects::ApplicationController
include ExtractsPath include ExtractsPath
include BlobHelper include BlobHelper
include SendFileUpload
before_action :require_non_empty_project before_action :require_non_empty_project
before_action :assign_ref_vars before_action :assign_ref_vars
...@@ -31,7 +32,7 @@ class Projects::RawController < Projects::ApplicationController ...@@ -31,7 +32,7 @@ class Projects::RawController < Projects::ApplicationController
lfs_object = find_lfs_object lfs_object = find_lfs_object
if lfs_object && lfs_object.project_allowed_access?(@project) if lfs_object && lfs_object.project_allowed_access?(@project)
send_file lfs_object.file.path, filename: @blob.name, disposition: 'attachment' send_upload(lfs_object.file, attachment: @blob.name)
else else
render_404 render_404
end end
......
class Appearance < ActiveRecord::Base class Appearance < ActiveRecord::Base
include CacheMarkdownField include CacheMarkdownField
include AfterCommitQueue
include ObjectStorage::BackgroundMove
cache_markdown_field :description cache_markdown_field :description
cache_markdown_field :new_project_guidelines cache_markdown_field :new_project_guidelines
......
...@@ -3,6 +3,7 @@ module Ci ...@@ -3,6 +3,7 @@ module Ci
prepend ArtifactMigratable prepend ArtifactMigratable
include TokenAuthenticatable include TokenAuthenticatable
include AfterCommitQueue include AfterCommitQueue
include ObjectStorage::BackgroundMove
include Presentable include Presentable
include Importable include Importable
...@@ -45,6 +46,7 @@ module Ci ...@@ -45,6 +46,7 @@ module Ci
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)', where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive) '', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive)
end end
scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) } scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts_archive.where('artifacts_expire_at < ?', Time.now) } scope :with_expired_artifacts, ->() { with_artifacts_archive.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) } scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
...@@ -365,14 +367,20 @@ module Ci ...@@ -365,14 +367,20 @@ module Ci
project.running_or_pending_build_count(force: true) project.running_or_pending_build_count(force: true)
end end
def browsable_artifacts?
artifacts_metadata?
end
def artifacts_metadata_entry(path, **options) def artifacts_metadata_entry(path, **options)
artifacts_metadata.use_file do |metadata_path|
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new( metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
artifacts_metadata.path, metadata_path,
path, path,
**options) **options)
metadata.to_entry metadata.to_entry
end end
end
def erase_artifacts! def erase_artifacts!
remove_artifacts_file! remove_artifacts_file!
......
module Ci module Ci
class JobArtifact < ActiveRecord::Base class JobArtifact < ActiveRecord::Base
include AfterCommitQueue
include ObjectStorage::BackgroundMove
extend Gitlab::Ci::Model extend Gitlab::Ci::Model
belongs_to :project belongs_to :project
...@@ -7,9 +9,11 @@ module Ci ...@@ -7,9 +9,11 @@ module Ci
before_save :set_size, if: :file_changed? before_save :set_size, if: :file_changed?
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
mount_uploader :file, JobArtifactUploader mount_uploader :file, JobArtifactUploader
delegate :open, :exists?, to: :file delegate :exists?, :open, to: :file
enum file_type: { enum file_type: {
archive: 1, archive: 1,
...@@ -21,6 +25,10 @@ module Ci ...@@ -21,6 +25,10 @@ module Ci
self.where(project: project).sum(:size) self.where(project: project).sum(:size)
end end
def local_store?
[nil, ::JobArtifactUploader::Store::LOCAL].include?(self.file_store)
end
def set_size def set_size
self.size = file.size self.size = file.size
end end
......
...@@ -3,6 +3,7 @@ module Avatarable ...@@ -3,6 +3,7 @@ module Avatarable
included do included do
prepend ShadowMethods prepend ShadowMethods
include ObjectStorage::BackgroundMove
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? } validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i } validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
......
class LfsObject < ActiveRecord::Base class LfsObject < ActiveRecord::Base
include AfterCommitQueue
include ObjectStorage::BackgroundMove
has_many :lfs_objects_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent has_many :lfs_objects_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :projects, through: :lfs_objects_projects has_many :projects, through: :lfs_objects_projects
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
validates :oid, presence: true, uniqueness: true validates :oid, presence: true, uniqueness: true
mount_uploader :file, LfsObjectUploader mount_uploader :file, LfsObjectUploader
...@@ -10,6 +15,10 @@ class LfsObject < ActiveRecord::Base ...@@ -10,6 +15,10 @@ class LfsObject < ActiveRecord::Base
projects.exists?(project.lfs_storage_project.id) projects.exists?(project.lfs_storage_project.id)
end end
def local_store?
[nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store)
end
def self.destroy_unreferenced def self.destroy_unreferenced
joins("LEFT JOIN lfs_objects_projects ON lfs_objects_projects.lfs_object_id = #{table_name}.id") joins("LEFT JOIN lfs_objects_projects ON lfs_objects_projects.lfs_object_id = #{table_name}.id")
.where(lfs_objects_projects: { id: nil }) .where(lfs_objects_projects: { id: nil })
......
...@@ -9,6 +9,8 @@ class Upload < ActiveRecord::Base ...@@ -9,6 +9,8 @@ class Upload < ActiveRecord::Base
validates :model, presence: true validates :model, presence: true
validates :uploader, presence: true validates :uploader, presence: true
scope :with_files_stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
before_save :calculate_checksum!, if: :foreground_checksummable? before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable? after_commit :schedule_checksum, if: :checksummable?
...@@ -21,6 +23,7 @@ class Upload < ActiveRecord::Base ...@@ -21,6 +23,7 @@ class Upload < ActiveRecord::Base
end end
def absolute_path def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path? return path unless relative_path?
uploader_class.absolute_path(self) uploader_class.absolute_path(self)
...@@ -30,11 +33,11 @@ class Upload < ActiveRecord::Base ...@@ -30,11 +33,11 @@ class Upload < ActiveRecord::Base
self.checksum = nil self.checksum = nil
return unless checksummable? return unless checksummable?
self.checksum = self.class.hexdigest(absolute_path) self.checksum = Digest::SHA256.file(absolute_path).hexdigest
end end
def build_uploader def build_uploader(mounted_as = nil)
uploader_class.new(model, mount_point, **uploader_context).tap do |uploader| uploader_class.new(model, mounted_as || mount_point).tap do |uploader|
uploader.upload = self uploader.upload = self
uploader.retrieve_from_store!(identifier) uploader.retrieve_from_store!(identifier)
end end
...@@ -51,6 +54,12 @@ class Upload < ActiveRecord::Base ...@@ -51,6 +54,12 @@ class Upload < ActiveRecord::Base
}.compact }.compact
end end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
private private
def delete_file! def delete_file!
...@@ -61,10 +70,6 @@ class Upload < ActiveRecord::Base ...@@ -61,10 +70,6 @@ class Upload < ActiveRecord::Base
checksum.nil? && local? && exist? checksum.nil? && local? && exist?
end end
def local?
true
end
def foreground_checksummable? def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD checksummable? && size <= CHECKSUM_THRESHOLD
end end
......
...@@ -81,12 +81,14 @@ module Projects ...@@ -81,12 +81,14 @@ module Projects
end end
def extract_tar_archive!(temp_path) def extract_tar_archive!(temp_path)
results = Open3.pipeline(%W(gunzip -c #{artifacts}), build.artifacts_file.use_file do |artifacts_path|
results = Open3.pipeline(%W(gunzip -c #{artifacts_path}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}), %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} #{SITE_PATH}), %W(tar -x -C #{temp_path} #{SITE_PATH}),
err: '/dev/null') err: '/dev/null')
raise FailedToExtractError, 'pages failed to extract' unless results.compact.all?(&:success?) raise FailedToExtractError, 'pages failed to extract' unless results.compact.all?(&:success?)
end end
end
def extract_zip_archive!(temp_path) def extract_zip_archive!(temp_path)
raise FailedToExtractError, 'missing artifacts metadata' unless build.artifacts_metadata? raise FailedToExtractError, 'missing artifacts metadata' unless build.artifacts_metadata?
...@@ -103,10 +105,12 @@ module Projects ...@@ -103,10 +105,12 @@ module Projects
# -n never overwrite existing files # -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories # We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*') site_path = File.join(SITE_PATH, '*')
unless system(*%W(unzip -qq -n #{artifacts} #{site_path} -d #{temp_path})) build.artifacts_file.use_file do |artifacts_path|
unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path}))
raise FailedToExtractError, 'pages failed to extract' raise FailedToExtractError, 'pages failed to extract'
end end
end end
end
def deploy_page!(archive_public_path) def deploy_page!(archive_public_path)
# Do atomic move of pages # Do atomic move of pages
......
class AttachmentUploader < GitlabUploader class AttachmentUploader < GitlabUploader
include UploaderHelper
include RecordsUploads::Concern include RecordsUploads::Concern
include ObjectStorage::Concern
storage :file prepend ObjectStorage::Extension::RecordsUploads
include UploaderHelper
private private
......
class AvatarUploader < GitlabUploader class AvatarUploader < GitlabUploader
include UploaderHelper include UploaderHelper
include RecordsUploads::Concern include RecordsUploads::Concern
include ObjectStorage::Concern
storage :file prepend ObjectStorage::Extension::RecordsUploads
def exists? def exists?
model.avatar.file && model.avatar.file.present? model.avatar.file && model.avatar.file.present?
end end
def move_to_cache def move_to_store
false false
end end
def move_to_store def move_to_cache
false false
end end
......
...@@ -10,7 +10,11 @@ class FileMover ...@@ -10,7 +10,11 @@ class FileMover
def execute def execute
move move
uploader.record_upload if update_markdown
if update_markdown
uploader.record_upload
uploader.schedule_background_upload
end
end end
private private
...@@ -24,11 +28,8 @@ class FileMover ...@@ -24,11 +28,8 @@ class FileMover
updated_text = model.read_attribute(update_field) updated_text = model.read_attribute(update_field)
.gsub(temp_file_uploader.markdown_link, uploader.markdown_link) .gsub(temp_file_uploader.markdown_link, uploader.markdown_link)
model.update_attribute(update_field, updated_text) model.update_attribute(update_field, updated_text)
true
rescue rescue
revert revert
false false
end end
......
...@@ -9,14 +9,18 @@ ...@@ -9,14 +9,18 @@
class FileUploader < GitlabUploader class FileUploader < GitlabUploader
include UploaderHelper include UploaderHelper
include RecordsUploads::Concern include RecordsUploads::Concern
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)} MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}
DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)} DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)}
storage :file
after :remove, :prune_store_dir after :remove, :prune_store_dir
# FileUploader do not run in a model transaction, so we can simply
# enqueue a job after the :store hook.
after :store, :schedule_background_upload
def self.root def self.root
File.join(options.storage_path, 'uploads') File.join(options.storage_path, 'uploads')
end end
...@@ -28,8 +32,11 @@ class FileUploader < GitlabUploader ...@@ -28,8 +32,11 @@ class FileUploader < GitlabUploader
) )
end end
def self.base_dir(model) def self.base_dir(model, store = Store::LOCAL)
model_path_segment(model) decorated_model = model
decorated_model = Storage::HashedProject.new(model) if store == Store::REMOTE
model_path_segment(decorated_model)
end end
# used in migrations and import/exports # used in migrations and import/exports
...@@ -47,21 +54,24 @@ class FileUploader < GitlabUploader ...@@ -47,21 +54,24 @@ class FileUploader < GitlabUploader
# #
# Returns a String without a trailing slash # Returns a String without a trailing slash
def self.model_path_segment(model) def self.model_path_segment(model)
if model.hashed_storage?(:attachments) case model
model.disk_path when Storage::HashedProject then model.disk_path
else else
model.full_path model.hashed_storage?(:attachments) ? model.disk_path : model.full_path
end end
end end
def self.upload_path(secret, identifier)
File.join(secret, identifier)
end
def self.generate_secret def self.generate_secret
SecureRandom.hex SecureRandom.hex
end end
def upload_paths(filename)
[
File.join(secret, filename),
File.join(base_dir(Store::REMOTE), secret, filename)
]
end
attr_accessor :model attr_accessor :model
def initialize(model, mounted_as = nil, **uploader_context) def initialize(model, mounted_as = nil, **uploader_context)
...@@ -71,8 +81,10 @@ class FileUploader < GitlabUploader ...@@ -71,8 +81,10 @@ class FileUploader < GitlabUploader
apply_context!(uploader_context) apply_context!(uploader_context)
end end
def base_dir # enforce the usage of Hashed storage when storing to
self.class.base_dir(@model) # remote store as the FileMover doesn't support OS
def base_dir(store = nil)
self.class.base_dir(@model, store || object_store)
end end
# we don't need to know the actual path, an uploader instance should be # we don't need to know the actual path, an uploader instance should be
...@@ -82,15 +94,19 @@ class FileUploader < GitlabUploader ...@@ -82,15 +94,19 @@ class FileUploader < GitlabUploader
end end
def upload_path def upload_path
self.class.upload_path(dynamic_segment, identifier) if file_storage?
# Legacy path relative to project.full_path
File.join(dynamic_segment, identifier)
else
File.join(store_dir, identifier)
end end
def model_path_segment
self.class.model_path_segment(@model)
end end
def store_dir def store_dirs
File.join(base_dir, dynamic_segment) {
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join(base_dir(ObjectStorage::Store::REMOTE), dynamic_segment)
}
end end
def markdown_link def markdown_link
......
...@@ -37,12 +37,10 @@ class GitlabUploader < CarrierWave::Uploader::Base ...@@ -37,12 +37,10 @@ class GitlabUploader < CarrierWave::Uploader::Base
cache_storage.is_a?(CarrierWave::Storage::File) cache_storage.is_a?(CarrierWave::Storage::File)
end end
# Reduce disk IO
def move_to_cache def move_to_cache
file_storage? file_storage?
end end
# Reduce disk IO
def move_to_store def move_to_store
file_storage? file_storage?
end end
...@@ -51,10 +49,6 @@ class GitlabUploader < CarrierWave::Uploader::Base ...@@ -51,10 +49,6 @@ class GitlabUploader < CarrierWave::Uploader::Base
file.present? file.present?
end end
def store_dir
File.join(base_dir, dynamic_segment)
end
def cache_dir def cache_dir
File.join(root, base_dir, 'tmp/cache') File.join(root, base_dir, 'tmp/cache')
end end
...@@ -76,6 +70,10 @@ class GitlabUploader < CarrierWave::Uploader::Base ...@@ -76,6 +70,10 @@ class GitlabUploader < CarrierWave::Uploader::Base
# Designed to be overridden by child uploaders that have a dynamic path # Designed to be overridden by child uploaders that have a dynamic path
# segment -- that is, a path that changes based on mutable attributes of its # segment -- that is, a path that changes based on mutable attributes of its
# associated model # associated model
#
# For example, `FileUploader` builds the storage path based on the associated
# project model's `path_with_namespace` value, which can change when the
# project or its containing namespace is moved or renamed.
def dynamic_segment def dynamic_segment
raise(NotImplementedError) raise(NotImplementedError)
end end
......
class JobArtifactUploader < GitlabUploader class JobArtifactUploader < GitlabUploader
extend Workhorse::UploadPath extend Workhorse::UploadPath
include ObjectStorage::Concern
storage_options Gitlab.config.artifacts storage_options Gitlab.config.artifacts
...@@ -14,9 +15,11 @@ class JobArtifactUploader < GitlabUploader ...@@ -14,9 +15,11 @@ class JobArtifactUploader < GitlabUploader
end end
def open def open
raise 'Only File System is supported' unless file_storage? if file_storage?
File.open(path, "rb") if path File.open(path, "rb") if path
else
::Gitlab::Ci::Trace::HttpIO.new(url, size) if url
end
end end
private private
......
class LegacyArtifactUploader < GitlabUploader class LegacyArtifactUploader < GitlabUploader
extend Workhorse::UploadPath extend Workhorse::UploadPath
include ObjectStorage::Concern
storage_options Gitlab.config.artifacts storage_options Gitlab.config.artifacts
......
class LfsObjectUploader < GitlabUploader class LfsObjectUploader < GitlabUploader
extend Workhorse::UploadPath extend Workhorse::UploadPath
include ObjectStorage::Concern
# LfsObject are in `tmp/upload` instead of `tmp/uploads`
def self.workhorse_upload_path
File.join(root, 'tmp/upload')
end
storage_options Gitlab.config.lfs storage_options Gitlab.config.lfs
......
...@@ -4,7 +4,7 @@ class NamespaceFileUploader < FileUploader ...@@ -4,7 +4,7 @@ class NamespaceFileUploader < FileUploader
options.storage_path options.storage_path
end end
def self.base_dir(model) def self.base_dir(model, _store = nil)
File.join(options.base_dir, 'namespace', model_path_segment(model)) File.join(options.base_dir, 'namespace', model_path_segment(model))
end end
...@@ -14,6 +14,13 @@ class NamespaceFileUploader < FileUploader ...@@ -14,6 +14,13 @@ class NamespaceFileUploader < FileUploader
# Re-Override # Re-Override
def store_dir def store_dir
File.join(base_dir, dynamic_segment) store_dirs[object_store]
end
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join('namespace', self.class.model_path_segment(model), dynamic_segment)
}
end end
end end
require 'fog/aws'
require 'carrierwave/storage/fog'
#
# This concern should add object storage support
# to the GitlabUploader class
#
module ObjectStorage
RemoteStoreError = Class.new(StandardError)
UnknownStoreError = Class.new(StandardError)
ObjectStorageUnavailable = Class.new(StandardError)
module Store
LOCAL = 1
REMOTE = 2
end
module Extension
# this extension is the glue between the ObjectStorage::Concern and RecordsUploads::Concern
module RecordsUploads
extend ActiveSupport::Concern
def prepended(base)
raise "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern
base.include(RecordsUploads::Concern)
end
def retrieve_from_store!(identifier)
paths = store_dirs.map { |store, path| File.join(path, identifier) }
unless current_upload_satisfies?(paths, model)
# the upload we already have isn't right, find the correct one
self.upload = uploads.find_by(model: model, path: paths)
end
super
end
def build_upload
super.tap do |upload|
upload.store = object_store
end
end
def upload=(upload)
return unless upload
self.object_store = upload.store
super
end
def schedule_background_upload(*args)
return unless schedule_background_upload?
return unless upload
ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
upload.class.to_s,
mounted_as,
upload.id)
end
private
def current_upload_satisfies?(paths, model)
return false unless upload
return false unless model
paths.include?(upload.path) &&
upload.model_id == model.id &&
upload.model_type == model.class.base_class.sti_name
end
end
end
# Add support for automatic background uploading after the file is stored.
#
module BackgroundMove
extend ActiveSupport::Concern
def background_upload(mount_points = [])
return unless mount_points.any?
run_after_commit do
mount_points.each { |mount| send(mount).schedule_background_upload } # rubocop:disable GitlabSecurity/PublicSend
end
end
def changed_mounts
self.class.uploaders.select do |mount, uploader_class|
mounted_as = uploader_class.serialization_column(self.class, mount)
uploader = send(:"#{mounted_as}") # rubocop:disable GitlabSecurity/PublicSend
next unless uploader
next unless uploader.exists?
next unless send(:"#{mounted_as}_changed?") # rubocop:disable GitlabSecurity/PublicSend
mount
end.keys
end
included do
after_save on: [:create, :update] do
background_upload(changed_mounts)
end
end
end
module Concern
extend ActiveSupport::Concern
included do |base|
base.include(ObjectStorage)
after :migrate, :delete_migrated_file
end
class_methods do
def object_store_options
options.object_store
end
def object_store_enabled?
object_store_options.enabled
end
def background_upload_enabled?
object_store_options.background_upload
end
def proxy_download_enabled?
object_store_options.proxy_download
end
def direct_download_enabled?
!proxy_download_enabled?
end
def object_store_credentials
object_store_options.connection.to_hash.deep_symbolize_keys
end
def remote_store_path
object_store_options.remote_directory
end
def serialization_column(model_class, mount_point)
model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
end
end
def file_storage?
storage.is_a?(CarrierWave::Storage::File)
end
def file_cache_storage?
cache_storage.is_a?(CarrierWave::Storage::File)
end
def object_store
@object_store ||= model.try(store_serialization_column) || Store::LOCAL
end
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def object_store=(value)
@object_store = value || Store::LOCAL
@storage = storage_for(object_store)
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
# Return true if the current file is part or the model (i.e. is mounted in the model)
#
def persist_object_store?
model.respond_to?(:"#{store_serialization_column}=")
end
# Save the current @object_store to the model <mounted_as>_store column
def persist_object_store!
return unless persist_object_store?
updated = model.update_column(store_serialization_column, object_store)
raise 'Failed to update object store' unless updated
end
def use_file
if file_storage?
return yield path
end
begin
cache_stored_file!
yield cache_path
ensure
cache_storage.delete_dir!(cache_path(nil))
end
end
def filename
super || file&.filename
end
#
# Move the file to another store
#
# new_store: Enum (Store::LOCAL, Store::REMOTE)
#
def migrate!(new_store)
uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain
raise 'Already running' unless uuid
unsafe_migrate!(new_store)
ensure
Gitlab::ExclusiveLease.cancel(exclusive_lease_key, uuid)
end
def schedule_background_upload(*args)
return unless schedule_background_upload?
ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
model.class.name,
mounted_as,
model.id)
end
def fog_directory
self.class.remote_store_path
end
def fog_credentials
self.class.object_store_credentials
end
def fog_public
false
end
def delete_migrated_file(migrated_file)
migrated_file.delete if exists?
end
def exists?
file.present?
end
def store_dir(store = nil)
store_dirs[store || object_store]
end
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join(dynamic_segment)
}
end
private
def schedule_background_upload?
self.class.object_store_enabled? &&
self.class.background_upload_enabled? &&
self.file_storage?
end
# this is a hack around CarrierWave. The #migrate method needs to be
# able to force the current file to the migrated file upon success.
def file=(file)
@file = file # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
def serialization_column
self.class.serialization_column(model.class, mounted_as)
end
# Returns the column where the 'store' is saved
# defaults to 'store'
def store_serialization_column
[serialization_column, 'store'].compact.join('_').to_sym
end
def storage
@storage ||= storage_for(object_store)
end
def storage_for(store)
case store
when Store::REMOTE
raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
CarrierWave::Storage::Fog.new(self)
when Store::LOCAL
CarrierWave::Storage::File.new(self)
else
raise UnknownStoreError
end
end
def exclusive_lease_key
"object_storage_migrate:#{model.class}:#{model.id}"
end
#
# Move the file to another store
#
# new_store: Enum (Store::LOCAL, Store::REMOTE)
#
def unsafe_migrate!(new_store)
return unless object_store != new_store
return unless file
new_file = nil
file_to_delete = file
from_object_store = object_store
self.object_store = new_store # changes the storage and file
cache_stored_file! if file_storage?
with_callbacks(:migrate, file_to_delete) do
with_callbacks(:store, file_to_delete) do # for #store_versions!
new_file = storage.store!(file)
persist_object_store!
self.file = new_file
end
end
file
rescue => e
# in case of failure delete new file
new_file.delete unless new_file.nil?
# revert back to the old file
self.object_store = from_object_store
self.file = file_to_delete
raise e
end
end
end
...@@ -4,7 +4,7 @@ class PersonalFileUploader < FileUploader ...@@ -4,7 +4,7 @@ class PersonalFileUploader < FileUploader
options.storage_path options.storage_path
end end
def self.base_dir(model) def self.base_dir(model, _store = nil)
File.join(options.base_dir, model_path_segment(model)) File.join(options.base_dir, model_path_segment(model))
end end
...@@ -14,6 +14,12 @@ class PersonalFileUploader < FileUploader ...@@ -14,6 +14,12 @@ class PersonalFileUploader < FileUploader
File.join(model.class.to_s.underscore, model.id.to_s) File.join(model.class.to_s.underscore, model.id.to_s)
end end
def object_store
return Store::LOCAL unless model
super
end
# model_path_segment does not require a model to be passed, so we can always # model_path_segment does not require a model to be passed, so we can always
# generate a path, even when there's no model. # generate a path, even when there's no model.
def model_valid? def model_valid?
...@@ -22,7 +28,14 @@ class PersonalFileUploader < FileUploader ...@@ -22,7 +28,14 @@ class PersonalFileUploader < FileUploader
# Revert-Override # Revert-Override
def store_dir def store_dir
File.join(base_dir, dynamic_segment) store_dirs[object_store]
end
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join(self.class.model_path_segment(model), dynamic_segment)
}
end end
private private
......
...@@ -24,8 +24,7 @@ module RecordsUploads ...@@ -24,8 +24,7 @@ module RecordsUploads
uploads.where(path: upload_path).delete_all uploads.where(path: upload_path).delete_all
upload.destroy! if upload upload.destroy! if upload
self.upload = build_upload self.upload = build_upload.tap(&:save!)
upload.save!
end end
end end
......
...@@ -35,7 +35,7 @@ ...@@ -35,7 +35,7 @@
= link_to download_project_job_artifacts_path(@project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do = link_to download_project_job_artifacts_path(@project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do
Download Download
- if @build.artifacts_metadata? - if @build.browsable_artifacts?
= link_to browse_project_job_artifacts_path(@project, @build), class: 'btn btn-sm btn-default' do = link_to browse_project_job_artifacts_path(@project, @build), class: 'btn btn-sm btn-default' do
Browse Browse
......
...@@ -39,6 +39,10 @@ ...@@ -39,6 +39,10 @@
- github_importer:github_import_stage_import_pull_requests - github_importer:github_import_stage_import_pull_requests
- github_importer:github_import_stage_import_repository - github_importer:github_import_stage_import_repository
- object_storage_upload
- object_storage:object_storage_background_move
- object_storage:object_storage_migrate_uploads
- pipeline_cache:expire_job_cache - pipeline_cache:expire_job_cache
- pipeline_cache:expire_pipeline_cache - pipeline_cache:expire_pipeline_cache
- pipeline_creation:create_pipeline - pipeline_creation:create_pipeline
......
# Concern for setting Sidekiq settings for the various GitLab ObjectStorage workers.
module ObjectStorageQueue
extend ActiveSupport::Concern
included do
queue_namespace :object_storage
end
end
module ObjectStorage
class BackgroundMoveWorker
include ApplicationWorker
include ObjectStorageQueue
sidekiq_options retry: 5
def perform(uploader_class_name, subject_class_name, file_field, subject_id)
uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find(subject_id)
uploader = build_uploader(subject, file_field&.to_sym)
uploader.migrate!(ObjectStorage::Store::REMOTE)
end
def build_uploader(subject, mount_point)
case subject
when Upload then subject.build_uploader(mount_point)
else
subject.send(mount_point) # rubocop:disable GitlabSecurity/PublicSend
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Metrics/LineLength
# rubocop:disable Style/Documentation
module ObjectStorage
class MigrateUploadsWorker
include ApplicationWorker
include ObjectStorageQueue
SanityCheckError = Class.new(StandardError)
class Upload < ActiveRecord::Base
# Upper limit for foreground checksum processing
CHECKSUM_THRESHOLD = 100.megabytes
belongs_to :model, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
validates :size, presence: true
validates :path, presence: true
validates :model, presence: true
validates :uploader, presence: true
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
scope :stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
scope :stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
def self.hexdigest(path)
Digest::SHA256.file(path).hexdigest
end
def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
end
def calculate_checksum!
self.checksum = nil
return unless checksummable?
self.checksum = self.class.hexdigest(absolute_path)
end
def build_uploader(mounted_as = nil)
uploader_class.new(model, mounted_as).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
end
def exist?
File.exist?(absolute_path)
end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
private
def checksummable?
checksum.nil? && local? && exist?
end
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
def schedule_checksum
UploadChecksumWorker.perform_async(id)
end
def relative_path?
!path.start_with?('/')
end
def identifier
File.basename(path)
end
def uploader_class
Object.const_get(uploader)
end
end
class MigrationResult
attr_reader :upload
attr_accessor :error
def initialize(upload, error = nil)
@upload, @error = upload, error
end
def success?
error.nil?
end
def to_s
success? ? "Migration successful." : "Error while migrating #{upload.id}: #{error.message}"
end
end
module Report
class MigrationFailures < StandardError
attr_reader :errors
def initialize(errors)
@errors = errors
end
def message
errors.map(&:message).join("\n")
end
end
def report!(results)
success, failures = results.partition(&:success?)
Rails.logger.info header(success, failures)
Rails.logger.warn failures(failures)
raise MigrationFailures.new(failures.map(&:error)) if failures.any?
end
def header(success, failures)
"Migrated #{success.count}/#{success.count + failures.count} files."
end
def failures(failures)
failures.map { |f| "\t#{f}" }.join('\n')
end
end
include Report
def self.enqueue!(uploads, mounted_as, to_store)
sanity_check!(uploads, mounted_as)
perform_async(uploads.ids, mounted_as, to_store)
end
# We need to be sure all the uploads are for the same uploader and model type
# and that the mount point exists if provided.
#
def self.sanity_check!(uploads, mounted_as)
upload = uploads.first
uploader_class = upload.uploader.constantize
model_class = uploads.first.model_type.constantize
uploader_types = uploads.map(&:uploader).uniq
model_types = uploads.map(&:model_type).uniq
model_has_mount = mounted_as.nil? || model_class.uploaders[mounted_as] == uploader_class
raise(SanityCheckError, "Multiple uploaders found: #{uploader_types}") unless uploader_types.count == 1
raise(SanityCheckError, "Multiple model types found: #{model_types}") unless model_types.count == 1
raise(SanityCheckError, "Mount point #{mounted_as} not found in #{model_class}.") unless model_has_mount
end
def perform(ids, mounted_as, to_store)
@mounted_as = mounted_as&.to_sym
@to_store = to_store
uploads = Upload.preload(:model).where(id: ids)
sanity_check!(uploads)
results = migrate(uploads)
report!(results)
rescue SanityCheckError => e
# do not retry: the job is insane
Rails.logger.warn "#{self.class}: Sanity check error (#{e.message})"
end
def sanity_check!(uploads)
self.class.sanity_check!(uploads, @mounted_as)
end
def build_uploaders(uploads)
uploads.map { |upload| upload.build_uploader(@mounted_as) }
end
def migrate(uploads)
build_uploaders(uploads).map(&method(:process_uploader))
end
def process_uploader(uploader)
MigrationResult.new(uploader.upload).tap do |result|
begin
uploader.migrate!(@to_store)
rescue => e
result.error = e
end
end
end
end
end
# @Deprecated - remove once the `object_storage_upload` queue is empty
# The queue has been renamed `object_storage:object_storage_background_upload`
#
class ObjectStorageUploadWorker
include ApplicationWorker
sidekiq_options retry: 5
def perform(uploader_class_name, subject_class_name, file_field, subject_id)
uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find(subject_id)
uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
uploader.migrate!(ObjectStorage::Store::REMOTE)
end
end
---
title: Update CI/CD secret variables list to be dynamic and save without reloading
the page
merge_request: 4110
author:
type: added
---
title: Fix JavaScript bundle running on Cluster update/destroy pages
merge_request:
author:
type: fixed
---
title: Authorize project access with an external service
merge_request: 4675
author:
type: added
---
title: Add object storage support for LFS objects, CI artifacts, and uploads.
merge_request: 17358
author:
type: added
---
title: File uploads in remote storage now support project renaming.
merge_request: 4597
author:
type: fixed
...@@ -145,18 +145,55 @@ production: &base ...@@ -145,18 +145,55 @@ production: &base
enabled: true enabled: true
# The location where build artifacts are stored (default: shared/artifacts). # The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts # path: shared/artifacts
# object_store:
# enabled: false
# remote_directory: artifacts # The bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
# connection:
# provider: AWS # Only AWS supported at the moment
# aws_access_key_id: AWS_ACCESS_KEY_ID
# aws_secret_access_key: AWS_SECRET_ACCESS_KEY
# region: eu-central-1
## Git LFS ## Git LFS
lfs: lfs:
enabled: true enabled: true
# The location where LFS objects are stored (default: shared/lfs-objects). # The location where LFS objects are stored (default: shared/lfs-objects).
# storage_path: shared/lfs-objects # storage_path: shared/lfs-objects
object_store:
enabled: false
remote_directory: lfs-objects # Bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
connection:
provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
# Use the following options to configure an AWS compatible host
# host: 'localhost' # default: s3.amazonaws.com
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## Uploads (attachments, avatars, etc...) ## Uploads (attachments, avatars, etc...)
uploads: uploads:
# The location where uploads objects are stored (default: public/). # The location where uploads objects are stored (default: public/).
# storage_path: public/ # storage_path: public/
# base_dir: uploads/-/system # base_dir: uploads/-/system
object_store:
enabled: false
# remote_directory: uploads # Bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
# connection:
# provider: AWS
# aws_access_key_id: AWS_ACCESS_KEY_ID
# aws_secret_access_key: AWS_SECRET_ACCESS_KEY
# region: eu-central-1
# host: 'localhost' # default: s3.amazonaws.com
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## GitLab Pages ## GitLab Pages
pages: pages:
...@@ -655,10 +692,39 @@ test: ...@@ -655,10 +692,39 @@ test:
enabled: true enabled: true
lfs: lfs:
enabled: false enabled: false
# The location where LFS objects are stored (default: shared/lfs-objects).
# storage_path: shared/lfs-objects
object_store:
enabled: false
remote_directory: lfs-objects # The bucket name
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
artifacts: artifacts:
path: tmp/tests/artifacts path: tmp/tests/artifacts
enabled: true
# The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts
object_store:
enabled: false
remote_directory: artifacts # The bucket name
background_upload: false
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
uploads: uploads:
storage_path: tmp/tests/public storage_path: tmp/tests/public
object_store:
enabled: false
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
gitlab: gitlab:
host: localhost host: localhost
port: 80 port: 80
......
...@@ -305,6 +305,13 @@ Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values ...@@ -305,6 +305,13 @@ Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values
# Settings.artifact['path'] is deprecated, use `storage_path` instead # Settings.artifact['path'] is deprecated, use `storage_path` instead
Settings.artifacts['path'] = Settings.artifacts['storage_path'] Settings.artifacts['path'] = Settings.artifacts['storage_path']
Settings.artifacts['max_size'] ||= 100 # in megabytes Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
Settings.artifacts['object_store']['proxy_download'] = false if Settings.artifacts['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
# #
# Registry # Registry
...@@ -340,6 +347,13 @@ Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pa ...@@ -340,6 +347,13 @@ Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pa
Settings['lfs'] ||= Settingslogic.new({}) Settings['lfs'] ||= Settingslogic.new({})
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil? Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects")) Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
Settings.lfs['object_store'] ||= Settingslogic.new({})
Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
Settings.lfs['object_store']['remote_directory'] ||= nil
Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
Settings.lfs['object_store']['proxy_download'] = false if Settings.lfs['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy
Settings.lfs['object_store']['connection']&.deep_stringify_keys!
# #
# Uploads # Uploads
...@@ -347,6 +361,13 @@ Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || ...@@ -347,6 +361,13 @@ Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] ||
Settings['uploads'] ||= Settingslogic.new({}) Settings['uploads'] ||= Settingslogic.new({})
Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public') Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system' Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
Settings.uploads['object_store'] ||= Settingslogic.new({})
Settings.uploads['object_store']['enabled'] = false if Settings.uploads['object_store']['enabled'].nil?
Settings.uploads['object_store']['remote_directory'] ||= 'uploads'
Settings.uploads['object_store']['background_upload'] = true if Settings.uploads['object_store']['background_upload'].nil?
Settings.uploads['object_store']['proxy_download'] = false if Settings.uploads['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy
Settings.uploads['object_store']['connection']&.deep_stringify_keys!
# #
# Mattermost # Mattermost
......
#
# Monkey patching the https support for private urls
# See https://gitlab.com/gitlab-org/gitlab-ee/issues/4879
#
module Fog
module Storage
class GoogleXML
class File < Fog::Model
module MonkeyPatch
def url(expires)
requires :key
collection.get_https_url(key, expires)
end
end
prepend MonkeyPatch
end
end
end
end
...@@ -68,5 +68,7 @@ ...@@ -68,5 +68,7 @@
- [project_migrate_hashed_storage, 1] - [project_migrate_hashed_storage, 1]
- [storage_migrator, 1] - [storage_migrator, 1]
- [pages_domain_verification, 1] - [pages_domain_verification, 1]
- [object_storage_upload, 1]
- [object_storage, 1]
- [plugin, 1] - [plugin, 1]
- [pipeline_background, 1] - [pipeline_background, 1]
class AddArtifactsStoreToCiBuild < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column(:ci_builds, :artifacts_file_store, :integer)
add_column(:ci_builds, :artifacts_metadata_store, :integer)
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddFileStoreToLfsObjects < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
# When a migration requires downtime you **must** uncomment the following
# constant and define a short and easy to understand explanation as to why the
# migration requires downtime.
# DOWNTIME_REASON = ''
# When using the methods "add_concurrent_index", "remove_concurrent_index" or
# "add_column_with_default" you must disable the use of transactions
# as these methods can not run in an existing transaction.
# When using "add_concurrent_index" or "remove_concurrent_index" methods make sure
# that either of them is the _only_ method called in the migration,
# any other changes should go in a separate migration.
# This ensures that upon failure _only_ the index creation or removing fails
# and can be retried or reverted easily.
#
# To disable transactions uncomment the following line and remove these
# comments:
# disable_ddl_transaction!
def change
add_column(:lfs_objects, :file_store, :integer)
end
end
class AddFileStoreJobArtifacts < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
def change
add_column(:ci_job_artifacts, :file_store, :integer)
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddStoreColumnToUploads < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column(:uploads, :store, :integer)
end
end
...@@ -307,6 +307,8 @@ ActiveRecord::Schema.define(version: 20180323150945) do ...@@ -307,6 +307,8 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.integer "auto_canceled_by_id" t.integer "auto_canceled_by_id"
t.boolean "retried" t.boolean "retried"
t.integer "stage_id" t.integer "stage_id"
t.integer "artifacts_file_store"
t.integer "artifacts_metadata_store"
t.boolean "protected" t.boolean "protected"
t.integer "failure_reason" t.integer "failure_reason"
end end
...@@ -345,6 +347,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do ...@@ -345,6 +347,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.integer "project_id", null: false t.integer "project_id", null: false
t.integer "job_id", null: false t.integer "job_id", null: false
t.integer "file_type", null: false t.integer "file_type", null: false
t.integer "file_store"
t.integer "size", limit: 8 t.integer "size", limit: 8
t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false t.datetime_with_timezone "updated_at", null: false
...@@ -1009,6 +1012,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do ...@@ -1009,6 +1012,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.datetime "created_at" t.datetime "created_at"
t.datetime "updated_at" t.datetime "updated_at"
t.string "file" t.string "file"
t.integer "file_store"
end end
add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree
...@@ -1824,6 +1828,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do ...@@ -1824,6 +1828,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.datetime "created_at", null: false t.datetime "created_at", null: false
t.string "mount_point" t.string "mount_point"
t.string "secret" t.string "secret"
t.integer "store"
end end
add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree
......
...@@ -87,10 +87,124 @@ _The artifacts are stored by default in ...@@ -87,10 +87,124 @@ _The artifacts are stored by default in
### Using object storage ### Using object storage
>**Notes:**
- [Introduced][ee-1762] in [GitLab Premium][eep] 9.4.
- Since version 9.5, artifacts are [browsable], when object storage is enabled.
9.4 lacks this feature.
> Available in [GitLab Premium](https://about.gitlab.com/products/) and > Available in [GitLab Premium](https://about.gitlab.com/products/) and
[GitLab.com Silver](https://about.gitlab.com/gitlab-com/). [GitLab.com Silver](https://about.gitlab.com/gitlab-com/).
> Since version 10.6, available in [GitLab CE](https://about.gitlab.com/products/)
If you don't want to use the local disk where GitLab is installed to store the
artifacts, you can use an object storage like AWS S3 instead.
This configuration relies on valid AWS credentials to be configured already.
Use an [Object storage option][os] like AWS S3 to store job artifacts.
### Object Storage Settings
For source installations the following settings are nested under `artifacts:` and then `object_store:`. On omnibus installs they are prefixed by `artifacts_object_store_`.
| Setting | Description | Default |
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Artfacts will be stored| |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
#### S3 compatible connection settings
The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
| Setting | Description | Default |
|---------|-------------|---------|
| `provider` | Always `AWS` for compatible hosts | AWS |
| `aws_access_key_id` | AWS credentials, or compatible | |
| `aws_secret_access_key` | AWS credentials, or compatible | |
| `region` | AWS region | us-east-1 |
| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
**In Omnibus installations:**
_The artifacts are stored by default in
`/var/opt/gitlab/gitlab-rails/shared/artifacts`._
1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
the values you want:
```ruby
gitlab_rails['artifacts_enabled'] = true
gitlab_rails['artifacts_object_store_enabled'] = true
gitlab_rails['artifacts_object_store_remote_directory'] = "artifacts"
gitlab_rails['artifacts_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'aws_access_key_id' => 'AWS_ACCESS_KEY_ID',
'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY'
}
```
NOTE: For GitLab 9.4+, if you are using AWS IAM profiles, be sure to omit the
AWS access key and secret acces key/value pairs. For example:
```ruby
gitlab_rails['artifacts_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'use_iam_profile' => true
}
```
1. Save the file and [reconfigure GitLab][] for the changes to take effect.
1. Migrate any existing local artifacts to the object storage:
```bash
gitlab-rake gitlab:artifacts:migrate
```
Currently this has to be executed manually and it will allow you to
migrate the existing artifacts to the object storage, but all new
artifacts will still be stored on the local disk. In the future
you will be given an option to define a default storage artifacts for all
new files.
---
**In installations from source:**
_The artifacts are stored by default in
`/home/git/gitlab/shared/artifacts`._
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
lines:
```yaml
artifacts:
enabled: true
object_store:
enabled: true
remote_directory: "artifacts" # The bucket name
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
```
1. Save the file and [restart GitLab][] for the changes to take effect.
1. Migrate any existing local artifacts to the object storage:
```bash
sudo -u git -H bundle exec rake gitlab:artifacts:migrate RAILS_ENV=production
```
Use an [Object storage option][ee-os] like AWS S3 to store job artifacts. Currently this has to be executed manually and it will allow you to
migrate the existing artifacts to the object storage, but all new
artifacts will still be stored on the local disk. In the future
you will be given an option to define a default storage artifacts for all
new files.
## Expiring artifacts ## Expiring artifacts
...@@ -194,7 +308,7 @@ When clicking on a specific file, [GitLab Workhorse] extracts it ...@@ -194,7 +308,7 @@ When clicking on a specific file, [GitLab Workhorse] extracts it
from the archive and the download begins. This implementation saves space, from the archive and the download begins. This implementation saves space,
memory and disk I/O. memory and disk I/O.
[reconfigure gitlab]: restart_gitlab.md "How to restart GitLab" [reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
[restart gitlab]: restart_gitlab.md "How to restart GitLab" [restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
[gitlab workhorse]: https://gitlab.com/gitlab-org/gitlab-workhorse "GitLab Workhorse repository" [gitlab workhorse]: https://gitlab.com/gitlab-org/gitlab-workhorse "GitLab Workhorse repository"
[ee-os]: https://docs.gitlab.com/ee/administration/job_artifacts.html#using-object-storage [os]: https://docs.gitlab.com/administration/job_artifacts.html#using-object-storage
# Uploads Migrate Rake Task
## Migrate to Object Storage
After [configuring the object storage](../../uploads.md#using-object-storage) for GitLab's uploads, you may use this task to migrate existing uploads from the local storage to the remote storage.
>**Note:**
All of the processing will be done in a background worker and requires **no downtime**.
This tasks uses 3 parameters to find uploads to migrate.
>**Note:**
These parameters are mainly internal to GitLab's structure, you may want to refer to the task list instead below.
Parameter | Type | Description
--------- | ---- | -----------
`uploader_class` | string | Type of the uploader to migrate from
`model_class` | string | Type of the model to migrate from
`mount_point` | string/symbol | Name of the model's column on which the uploader is mounted on.
This task also accepts some environment variables which you can use to override
certain values:
Variable | Type | Description
-------- | ---- | -----------
`BATCH` | integer | Specifies the size of the batch. Defaults to 200.
** Omnibus Installation**
```bash
# gitlab-rake gitlab:uploads:migrate[uploader_class, model_class, mount_point]
# Avatars
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
# Attachments
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
# Markdown
gitlab-rake "gitlab:uploads:migrate[FileUploader, Project]"
gitlab-rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
gitlab-rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
gitlab-rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
```
**Source Installation**
>**Note:**
Use `RAILS_ENV=production` for every task.
```bash
# sudo -u git -H bundle exec rake gitlab:uploads:migrate
# Avatars
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
# Attachments
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
# Markdown
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, Project]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
```
# Uploads administration
>**Notes:**
Uploads represent all user data that may be sent to GitLab as a single file. As an example, avatars and notes' attachments are uploads. Uploads are integral to GitLab functionality, and therefore cannot be disabled.
### Using local storage
>**Notes:**
This is the default configuration
To change the location where the uploads are stored locally, follow the steps
below.
---
**In Omnibus installations:**
>**Notes:**
For historical reasons, uploads are stored into a base directory, which by default is `uploads/-/system`. It is strongly discouraged to change this configuration option on an existing GitLab installation.
_The uploads are stored by default in `/var/opt/gitlab/gitlab-rails/public/uploads/-/system`._
1. To change the storage path for example to `/mnt/storage/uploads`, edit
`/etc/gitlab/gitlab.rb` and add the following line:
```ruby
gitlab_rails['uploads_storage_path'] = "/mnt/storage/"
gitlab_rails['uploads_base_dir'] = "uploads"
```
1. Save the file and [reconfigure GitLab][] for the changes to take effect.
---
**In installations from source:**
_The uploads are stored by default in
`/home/git/gitlab/public/uploads/-/system`._
1. To change the storage path for example to `/mnt/storage/uploads`, edit
`/home/git/gitlab/config/gitlab.yml` and add or amend the following lines:
```yaml
uploads:
storage_path: /mnt/storage
base_dir: uploads
```
1. Save the file and [restart GitLab][] for the changes to take effect.
### Using object storage
>**Notes:**
- [Introduced][ee-3867] in [GitLab Enterprise Edition Premium][eep] 10.5.
If you don't want to use the local disk where GitLab is installed to store the
uploads, you can use an object storage provider like AWS S3 instead.
This configuration relies on valid AWS credentials to be configured already.
### Object Storage Settings
For source installations the following settings are nested under `uploads:` and then `object_store:`. On omnibus installs they are prefixed by `uploads_object_store_`.
| Setting | Description | Default |
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Uploads will be stored| |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
#### S3 compatible connection settings
The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
| Setting | Description | Default |
|---------|-------------|---------|
| `provider` | Always `AWS` for compatible hosts | AWS |
| `aws_access_key_id` | AWS credentials, or compatible | |
| `aws_secret_access_key` | AWS credentials, or compatible | |
| `region` | AWS region | us-east-1 |
| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
**In Omnibus installations:**
_The uploads are stored by default in
`/var/opt/gitlab/gitlab-rails/public/uploads/-/system`._
1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
the values you want:
```ruby
gitlab_rails['uploads_object_store_enabled'] = true
gitlab_rails['uploads_object_store_remote_directory'] = "uploads"
gitlab_rails['uploads_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'aws_access_key_id' => 'AWS_ACCESS_KEY_ID',
'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY'
}
```
>**Note:**
If you are using AWS IAM profiles, be sure to omit the AWS access key and secret acces key/value pairs.
```ruby
gitlab_rails['uploads_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'use_iam_profile' => true
}
```
1. Save the file and [reconfigure GitLab][] for the changes to take effect.
1. Migrate any existing local uploads to the object storage:
>**Notes:**
These task complies with the `BATCH` environment variable to process uploads in batch (200 by default). All of the processing will be done in a background worker and requires **no downtime**.
```bash
# gitlab-rake gitlab:uploads:migrate[uploader_class, model_class, mount_point]
# Avatars
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
# Attachments
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
# Markdown
gitlab-rake "gitlab:uploads:migrate[FileUploader, Project]"
gitlab-rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
gitlab-rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
gitlab-rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
```
Currently this has to be executed manually and it will allow you to
migrate the existing uploads to the object storage, but all new
uploads will still be stored on the local disk. In the future
you will be given an option to define a default storage for all
new files.
---
**In installations from source:**
_The uploads are stored by default in
`/home/git/gitlab/public/uploads/-/system`._
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
lines:
```yaml
uploads:
object_store:
enabled: true
remote_directory: "uploads" # The bucket name
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
```
1. Save the file and [restart GitLab][] for the changes to take effect.
1. Migrate any existing local uploads to the object storage:
>**Notes:**
- These task comply with the `BATCH` environment variable to process uploads in batch (200 by default). All of the processing will be done in a background worker and requires **no downtime**.
- To migrate in production use `RAILS_ENV=production` environment variable.
```bash
# sudo -u git -H bundle exec rake gitlab:uploads:migrate
# Avatars
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
# Attachments
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
# Markdown
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, Project]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
```
Currently this has to be executed manually and it will allow you to
migrate the existing uploads to the object storage, but all new
uploads will still be stored on the local disk. In the future
you will be given an option to define a default storage for all
new files.
[reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
[restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
[eep]: https://about.gitlab.com/gitlab-ee/ "GitLab Enterprise Edition Premium"
[ee-3867]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/3867
...@@ -14,3 +14,4 @@ comments: false ...@@ -14,3 +14,4 @@ comments: false
- [Webhooks](web_hooks.md) - [Webhooks](web_hooks.md)
- [Import](import.md) of git repositories in bulk - [Import](import.md) of git repositories in bulk
- [Rebuild authorized_keys file](http://docs.gitlab.com/ce/raketasks/maintenance.html#rebuild-authorized_keys-file) task for administrators - [Rebuild authorized_keys file](http://docs.gitlab.com/ce/raketasks/maintenance.html#rebuild-authorized_keys-file) task for administrators
- [Migrate Uploads](../administration/raketasks/uploads/migrate.md)
...@@ -5,6 +5,7 @@ Documentation on how to use Git LFS are under [Managing large binary files with ...@@ -5,6 +5,7 @@ Documentation on how to use Git LFS are under [Managing large binary files with
## Requirements ## Requirements
* Git LFS is supported in GitLab starting with version 8.2. * Git LFS is supported in GitLab starting with version 8.2.
* Support for object storage, such as AWS S3, was introduced in 10.0.
* Users need to install [Git LFS client](https://git-lfs.github.com) version 1.0.1 and up. * Users need to install [Git LFS client](https://git-lfs.github.com) version 1.0.1 and up.
## Configuration ## Configuration
...@@ -12,16 +13,18 @@ Documentation on how to use Git LFS are under [Managing large binary files with ...@@ -12,16 +13,18 @@ Documentation on how to use Git LFS are under [Managing large binary files with
Git LFS objects can be large in size. By default, they are stored on the server Git LFS objects can be large in size. By default, they are stored on the server
GitLab is installed on. GitLab is installed on.
There are two configuration options to help GitLab server administrators: There are various configuration options to help GitLab server administrators:
* Enabling/disabling Git LFS support * Enabling/disabling Git LFS support
* Changing the location of LFS object storage * Changing the location of LFS object storage
* Setting up AWS S3 compatible object storage
### Omnibus packages ### Omnibus packages
In `/etc/gitlab/gitlab.rb`: In `/etc/gitlab/gitlab.rb`:
```ruby ```ruby
# Change to true to enable lfs
gitlab_rails['lfs_enabled'] = false gitlab_rails['lfs_enabled'] = false
# Optionally, change the storage path location. Defaults to # Optionally, change the storage path location. Defaults to
...@@ -35,11 +38,114 @@ gitlab_rails['lfs_storage_path'] = "/mnt/storage/lfs-objects" ...@@ -35,11 +38,114 @@ gitlab_rails['lfs_storage_path'] = "/mnt/storage/lfs-objects"
In `config/gitlab.yml`: In `config/gitlab.yml`:
```yaml ```yaml
# Change to true to enable lfs
lfs: lfs:
enabled: false enabled: false
storage_path: /mnt/storage/lfs-objects storage_path: /mnt/storage/lfs-objects
``` ```
## Setting up S3 compatible object storage
> **Note:** [Introduced][ee-2760] in [GitLab Premium][eep] 10.0.
> Available in [GitLab CE][ce] 10.7
It is possible to store LFS objects on remote object storage instead of on a local disk.
This allows you to offload storage to an external AWS S3 compatible service, freeing up disk space locally. You can also host your own S3 compatible storage decoupled from GitLab, with with a service such as [Minio](https://www.minio.io/).
Object storage currently transfers files first to GitLab, and then on the object storage in a second stage. This can be done either by using a rake task to transfer existing objects, or in a background job after each file is received.
### Object Storage Settings
For source installations the following settings are nested under `lfs:` and then `object_store:`. On omnibus installs they are prefixed by `lfs_object_store_`.
| Setting | Description | Default |
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where LFS objects will be stored| |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
#### S3 compatible connection settings
The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
| Setting | Description | Default |
|---------|-------------|---------|
| `provider` | Always `AWS` for compatible hosts | AWS |
| `aws_access_key_id` | AWS credentials, or compatible | |
| `aws_secret_access_key` | AWS credentials, or compatible | |
| `region` | AWS region | us-east-1 |
| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
### From source
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
lines:
```yaml
lfs:
enabled: true
object_store:
enabled: false
remote_directory: lfs-objects # Bucket name
connection:
provider: AWS
aws_access_key_id: 1ABCD2EFGHI34JKLM567N
aws_secret_access_key: abcdefhijklmnopQRSTUVwxyz0123456789ABCDE
region: eu-central-1
# Use the following options to configure an AWS compatible host such as Minio
host: 'localhost'
endpoint: 'http://127.0.0.1:9000'
path_style: true
```
1. Save the file and [restart GitLab][] for the changes to take effect.
1. Migrate any existing local LFS objects to the object storage:
```bash
sudo -u git -H bundle exec rake gitlab:lfs:migrate RAILS_ENV=production
```
This will migrate existing LFS objects to object storage. New LFS objects
will be forwarded to object storage unless
`gitlab_rails['lfs_object_store_background_upload']` is set to false.
### In Omnibus
1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
the values you want:
```ruby
gitlab_rails['lfs_object_store_enabled'] = true
gitlab_rails['lfs_object_store_remote_directory'] = "lfs-objects"
gitlab_rails['lfs_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'aws_access_key_id' => '1ABCD2EFGHI34JKLM567N',
'aws_secret_access_key' => 'abcdefhijklmnopQRSTUVwxyz0123456789ABCDE',
# The below options configure an S3 compatible host instead of AWS
'host' => 'localhost',
'endpoint' => 'http://127.0.0.1:9000',
'path_style' => true
}
```
1. Save the file and [reconfigure GitLab]s for the changes to take effect.
1. Migrate any existing local LFS objects to the object storage:
```bash
gitlab-rake gitlab:lfs:migrate
```
This will migrate existing LFS objects to object storage. New LFS objects
will be forwarded to object storage unless
`gitlab_rails['lfs_object_store_background_upload']` is set to false.
## Storage statistics ## Storage statistics
You can see the total storage used for LFS objects on groups and projects You can see the total storage used for LFS objects on groups and projects
...@@ -48,10 +154,13 @@ and [projects APIs](../../api/projects.md). ...@@ -48,10 +154,13 @@ and [projects APIs](../../api/projects.md).
## Known limitations ## Known limitations
* Currently, storing GitLab Git LFS objects on a non-local storage (like S3 buckets)
is not supported
* Support for removing unreferenced LFS objects was added in 8.14 onwards. * Support for removing unreferenced LFS objects was added in 8.14 onwards.
* LFS authentications via SSH was added with GitLab 8.12 * LFS authentications via SSH was added with GitLab 8.12
* Only compatible with the GitLFS client versions 1.1.0 and up, or 1.0.2. * Only compatible with the GitLFS client versions 1.1.0 and up, or 1.0.2.
* The storage statistics currently count each LFS object multiple times for * The storage statistics currently count each LFS object multiple times for
every project linking to it every project linking to it
[reconfigure gitlab]: ../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
[restart gitlab]: ../../administration/restart_gitlab.md#installations-from-source "How to restart GitLab"
[eep]: https://about.gitlab.com/products/ "GitLab Premium"
[ee-2760]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/2760
...@@ -410,7 +410,7 @@ module API ...@@ -410,7 +410,7 @@ module API
) )
end end
def present_file!(path, filename, content_type = 'application/octet-stream') def present_disk_file!(path, filename, content_type = 'application/octet-stream')
filename ||= File.basename(path) filename ||= File.basename(path)
header['Content-Disposition'] = "attachment; filename=#{filename}" header['Content-Disposition'] = "attachment; filename=#{filename}"
header['Content-Transfer-Encoding'] = 'binary' header['Content-Transfer-Encoding'] = 'binary'
...@@ -426,13 +426,17 @@ module API ...@@ -426,13 +426,17 @@ module API
end end
end end
def present_artifacts!(artifacts_file) def present_carrierwave_file!(file, supports_direct_download: true)
return not_found! unless artifacts_file.exists? return not_found! unless file.exists?
if artifacts_file.file_storage? if file.file_storage?
present_file!(artifacts_file.path, artifacts_file.filename) present_disk_file!(file.path, file.filename)
elsif supports_direct_download && file.class.direct_download_enabled?
redirect(file.url)
else else
redirect_to(artifacts_file.url) header(*Gitlab::Workhorse.send_url(file.url))
status :ok
body
end end
end end
......
...@@ -28,7 +28,7 @@ module API ...@@ -28,7 +28,7 @@ module API
builds = user_project.latest_successful_builds_for(params[:ref_name]) builds = user_project.latest_successful_builds_for(params[:ref_name])
latest_build = builds.find_by!(name: params[:job]) latest_build = builds.find_by!(name: params[:job])
present_artifacts!(latest_build.artifacts_file) present_carrierwave_file!(latest_build.artifacts_file)
end end
desc 'Download the artifacts archive from a job' do desc 'Download the artifacts archive from a job' do
...@@ -43,7 +43,7 @@ module API ...@@ -43,7 +43,7 @@ module API
build = find_build!(params[:job_id]) build = find_build!(params[:job_id])
present_artifacts!(build.artifacts_file) present_carrierwave_file!(build.artifacts_file)
end end
desc 'Download a specific file from artifacts archive' do desc 'Download a specific file from artifacts archive' do
......
...@@ -72,7 +72,7 @@ module API ...@@ -72,7 +72,7 @@ module API
present build, with: Entities::Job present build, with: Entities::Job
end end
# TODO: We should use `present_file!` and leave this implementation for backward compatibility (when build trace # TODO: We should use `present_disk_file!` and leave this implementation for backward compatibility (when build trace
# is saved in the DB instead of file). But before that, we need to consider how to replace the value of # is saved in the DB instead of file). But before that, we need to consider how to replace the value of
# `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse. # `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse.
desc 'Get a trace of a specific job of a project' desc 'Get a trace of a specific job of a project'
......
...@@ -25,7 +25,7 @@ module API ...@@ -25,7 +25,7 @@ module API
render_api_error!('404 Not found or has expired', 404) unless path render_api_error!('404 Not found or has expired', 404) unless path
present_file!(path, File.basename(path), 'application/gzip') present_disk_file!(path, File.basename(path), 'application/gzip')
end end
desc 'Start export' do desc 'Start export' do
......
...@@ -244,11 +244,12 @@ module API ...@@ -244,11 +244,12 @@ module API
params do params do
requires :id, type: Integer, desc: %q(Job's ID) requires :id, type: Integer, desc: %q(Job's ID)
optional :token, type: String, desc: %q(Job's authentication token) optional :token, type: String, desc: %q(Job's authentication token)
optional :direct_download, default: false, type: Boolean, desc: %q(Perform direct download from remote storage instead of proxying artifacts)
end end
get '/:id/artifacts' do get '/:id/artifacts' do
job = authenticate_job! job = authenticate_job!
present_artifacts!(job.artifacts_file) present_carrierwave_file!(job.artifacts_file, supports_direct_download: params[:direct_download])
end end
end end
end end
......
...@@ -85,7 +85,7 @@ module API ...@@ -85,7 +85,7 @@ module API
build = get_build!(params[:build_id]) build = get_build!(params[:build_id])
present_artifacts!(build.artifacts_file) present_carrierwave_file!(build.artifacts_file)
end end
desc 'Download the artifacts file from build' do desc 'Download the artifacts file from build' do
...@@ -102,10 +102,10 @@ module API ...@@ -102,10 +102,10 @@ module API
builds = user_project.latest_successful_builds_for(params[:ref_name]) builds = user_project.latest_successful_builds_for(params[:ref_name])
latest_build = builds.find_by!(name: params[:job]) latest_build = builds.find_by!(name: params[:job])
present_artifacts!(latest_build.artifacts_file) present_carrierwave_file!(latest_build.artifacts_file)
end end
# TODO: We should use `present_file!` and leave this implementation for backward compatibility (when build trace # TODO: We should use `present_disk_file!` and leave this implementation for backward compatibility (when build trace
# is saved in the DB instead of file). But before that, we need to consider how to replace the value of # is saved in the DB instead of file). But before that, we need to consider how to replace the value of
# `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse. # `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse.
desc 'Get a trace of a specific build of a project' desc 'Get a trace of a specific build of a project'
......
##
# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
# source: https://gitlab.com/snippets/1685610
module Gitlab
module Ci
class Trace
class HttpIO
BUFFER_SIZE = 128.kilobytes
InvalidURLError = Class.new(StandardError)
FailedToGetChunkError = Class.new(StandardError)
attr_reader :uri, :size
attr_reader :tell
attr_reader :chunk, :chunk_range
alias_method :pos, :tell
def initialize(url, size)
raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url)
@uri = URI(url)
@size = size
@tell = 0
end
def close
# no-op
end
def binmode
# no-op
end
def binmode?
true
end
def path
nil
end
def url
@uri.to_s
end
def seek(pos, where = IO::SEEK_SET)
new_pos =
case where
when IO::SEEK_END
size + pos
when IO::SEEK_SET
pos
when IO::SEEK_CUR
tell + pos
else
-1
end
raise 'new position is outside of file' if new_pos < 0 || new_pos > size
@tell = new_pos
end
def eof?
tell == size
end
def each_line
until eof?
line = readline
break if line.nil?
yield(line)
end
end
def read(length = nil)
out = ""
until eof? || (length && out.length >= length)
data = get_chunk
break if data.empty?
out << data
@tell += data.bytesize
end
out = out[0, length] if length && out.length > length
out
end
def readline
out = ""
until eof?
data = get_chunk
new_line = data.index("\n")
if !new_line.nil?
out << data[0..new_line]
@tell += new_line + 1
break
else
out << data
@tell += data.bytesize
end
end
out
end
def write(data)
raise NotImplementedError
end
def truncate(offset)
raise NotImplementedError
end
def flush
raise NotImplementedError
end
def present?
true
end
private
##
# The below methods are not implemented in IO class
#
def in_range?
@chunk_range&.include?(tell)
end
def get_chunk
unless in_range?
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http|
http.request(request)
end
raise FailedToGetChunkError unless response.code == '200' || response.code == '206'
@chunk = response.body.force_encoding(Encoding::BINARY)
@chunk_range = response.content_range
##
# Note: If provider does not return content_range, then we set it as we requested
# Provider: minio
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: AWS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: GCS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPOK 200
@chunk_range ||= (chunk_start...(chunk_start + @chunk.length))
end
@chunk[chunk_offset..BUFFER_SIZE]
end
def request
Net::HTTP::Get.new(uri).tap do |request|
request.set_range(chunk_start, BUFFER_SIZE)
end
end
def chunk_offset
tell % BUFFER_SIZE
end
def chunk_start
(tell / BUFFER_SIZE) * BUFFER_SIZE
end
def chunk_end
[chunk_start + BUFFER_SIZE, size].min
end
end
end
end
end
...@@ -8,7 +8,7 @@ module Gitlab ...@@ -8,7 +8,7 @@ module Gitlab
attr_reader :stream attr_reader :stream
delegate :close, :tell, :seek, :size, :path, :truncate, to: :stream, allow_nil: true delegate :close, :tell, :seek, :size, :path, :url, :truncate, to: :stream, allow_nil: true
delegate :valid?, to: :stream, as: :present?, allow_nil: true delegate :valid?, to: :stream, as: :present?, allow_nil: true
......
...@@ -12,7 +12,7 @@ module Gitlab ...@@ -12,7 +12,7 @@ module Gitlab
private private
def relation def relation
LfsObject.all LfsObject.with_files_stored_locally
end end
def expected_checksum(lfs_object) def expected_checksum(lfs_object)
......
...@@ -12,7 +12,7 @@ module Gitlab ...@@ -12,7 +12,7 @@ module Gitlab
private private
def relation def relation
Upload.all Upload.with_files_stored_locally
end end
def expected_checksum(upload) def expected_checksum(upload)
......
require 'logger'
require 'resolv-replace'
desc "GitLab | Migrate files for artifacts to comply with new storage format"
namespace :gitlab do
namespace :artifacts do
task migrate: :environment do
logger = Logger.new(STDOUT)
logger.info('Starting transfer of artifacts')
Ci::Build.joins(:project)
.with_artifacts_stored_locally
.find_each(batch_size: 10) do |build|
begin
build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
rescue => e
logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
end
end
end
end
end
require 'logger'
desc "GitLab | Migrate LFS objects to remote storage"
namespace :gitlab do
namespace :lfs do
task migrate: :environment do
logger = Logger.new(STDOUT)
logger.info('Starting transfer of LFS files to object storage')
LfsObject.with_files_stored_locally
.find_each(batch_size: 10) do |lfs_object|
begin
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
rescue => e
logger.error("Failed to transfer LFS object #{lfs_object.oid} with error: #{e.message}")
end
end
end
end
end
namespace :gitlab do
namespace :uploads do
desc 'GitLab | Uploads | Migrate the uploaded files to object storage'
task :migrate, [:uploader_class, :model_class, :mounted_as] => :environment do |task, args|
batch_size = ENV.fetch('BATCH', 200).to_i
@to_store = ObjectStorage::Store::REMOTE
@mounted_as = args.mounted_as&.gsub(':', '')&.to_sym
@uploader_class = args.uploader_class.constantize
@model_class = args.model_class.constantize
uploads.each_batch(of: batch_size, &method(:enqueue_batch)) # rubocop: disable Cop/InBatches
end
def enqueue_batch(batch, index)
job = ObjectStorage::MigrateUploadsWorker.enqueue!(batch,
@mounted_as,
@to_store)
puts "Enqueued job ##{index}: #{job}"
rescue ObjectStorage::MigrateUploadsWorker::SanityCheckError => e
# continue for the next batch
puts "Could not enqueue batch (#{batch.ids}) #{e.message}".color(:red)
end
def uploads
Upload.class_eval { include EachBatch } unless Upload < EachBatch
Upload
.where.not(store: @to_store)
.where(uploader: @uploader_class.to_s,
model_type: @model_class.base_class.sti_name)
end
end
end
require 'spec_helper'
describe SendFileUpload do
let(:uploader_class) do
Class.new(GitlabUploader) do
include ObjectStorage::Concern
storage_options Gitlab.config.uploads
private
# user/:id
def dynamic_segment
File.join(model.class.to_s.underscore, model.id.to_s)
end
end
end
let(:controller_class) do
Class.new do
include SendFileUpload
end
end
let(:object) { build_stubbed(:user) }
let(:uploader) { uploader_class.new(object, :file) }
describe '#send_upload' do
let(:controller) { controller_class.new }
let(:temp_file) { Tempfile.new('test') }
subject { controller.send_upload(uploader) }
before do
FileUtils.touch(temp_file)
end
after do
FileUtils.rm_f(temp_file)
end
context 'when local file is used' do
before do
uploader.store!(temp_file)
end
it 'sends a file' do
expect(controller).to receive(:send_file).with(uploader.path, anything)
subject
end
end
context 'when remote file is used' do
before do
stub_uploads_object_storage(uploader: uploader_class)
uploader.object_store = ObjectStorage::Store::REMOTE
uploader.store!(temp_file)
end
context 'and proxying is enabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { true }
end
it 'sends a file' do
headers = double
expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-url:/)
expect(controller).to receive(:headers) { headers }
expect(controller).to receive(:head).with(:ok)
subject
end
end
context 'and proxying is disabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { false }
end
it 'sends a file' do
expect(controller).to receive(:redirect_to).with(/#{uploader.path}/)
subject
end
end
end
end
end
...@@ -145,9 +145,23 @@ describe Projects::ArtifactsController do ...@@ -145,9 +145,23 @@ describe Projects::ArtifactsController do
context 'when using local file storage' do context 'when using local file storage' do
it_behaves_like 'a valid file' do it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
let(:store) { ObjectStorage::Store::LOCAL }
let(:archive_path) { JobArtifactUploader.root } let(:archive_path) { JobArtifactUploader.root }
end end
end end
context 'when using remote file storage' do
before do
stub_artifacts_object_storage
end
it_behaves_like 'a valid file' do
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
let(:store) { ObjectStorage::Store::REMOTE }
let(:archive_path) { 'https://' }
end
end
end end
end end
......
# coding: utf-8
require 'spec_helper' require 'spec_helper'
describe Projects::JobsController do describe Projects::JobsController do
include ApiHelpers include ApiHelpers
include HttpIOHelpers
let(:project) { create(:project, :public) } let(:project) { create(:project, :public) }
let(:pipeline) { create(:ci_pipeline, project: project) } let(:pipeline) { create(:ci_pipeline, project: project) }
...@@ -203,6 +205,41 @@ describe Projects::JobsController do ...@@ -203,6 +205,41 @@ describe Projects::JobsController do
end end
end end
context 'when trace artifact is in ObjectStorage' do
let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
before do
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
end
context 'when there are no network issues' do
before do
stub_remote_trace_206
get_trace
end
it 'returns a trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq job.id
expect(json_response['status']).to eq job.status
expect(json_response['html']).to eq(job.trace.html)
end
end
context 'when there is a network issue' do
before do
stub_remote_trace_500
end
it 'returns a trace' do
expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
end
def get_trace def get_trace
get :trace, namespace_id: project.namespace, get :trace, namespace_id: project.namespace,
project_id: project, project_id: project,
...@@ -446,14 +483,18 @@ describe Projects::JobsController do ...@@ -446,14 +483,18 @@ describe Projects::JobsController do
end end
describe 'GET raw' do describe 'GET raw' do
before do subject do
get_raw post :raw, namespace_id: project.namespace,
project_id: project,
id: job.id
end end
context 'when job has a trace artifact' do context 'when job has a trace artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'returns a trace' do it 'returns a trace' do
response = subject
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8' expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq job.job_artifacts_trace.open.read expect(response.body).to eq job.job_artifacts_trace.open.read
...@@ -464,6 +505,8 @@ describe Projects::JobsController do ...@@ -464,6 +505,8 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) } let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'send a trace file' do it 'send a trace file' do
response = subject
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8' expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq 'BUILD TRACE' expect(response.body).to eq 'BUILD TRACE'
...@@ -474,14 +517,22 @@ describe Projects::JobsController do ...@@ -474,14 +517,22 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, pipeline: pipeline) } let(:job) { create(:ci_build, pipeline: pipeline) }
it 'returns not_found' do it 'returns not_found' do
response = subject
expect(response).to have_gitlab_http_status(:not_found) expect(response).to have_gitlab_http_status(:not_found)
end end
end end
def get_raw context 'when the trace artifact is in ObjectStorage' do
post :raw, namespace_id: project.namespace, let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
project_id: project,
id: job.id before do
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
end
it 'redirect to the trace file url' do
expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
end
end end
end end
end end
...@@ -8,10 +8,7 @@ describe Projects::RawController do ...@@ -8,10 +8,7 @@ describe Projects::RawController do
let(:id) { 'master/README.md' } let(:id) { 'master/README.md' }
it 'delivers ASCII file' do it 'delivers ASCII file' do
get(:show, get_show(public_project, id)
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8') expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
...@@ -25,10 +22,7 @@ describe Projects::RawController do ...@@ -25,10 +22,7 @@ describe Projects::RawController do
let(:id) { 'master/files/images/6049019_460s.jpg' } let(:id) { 'master/files/images/6049019_460s.jpg' }
it 'sets image content type header' do it 'sets image content type header' do
get(:show, get_show(public_project, id)
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('image/jpeg') expect(response.header['Content-Type']).to eq('image/jpeg')
...@@ -54,21 +48,40 @@ describe Projects::RawController do ...@@ -54,21 +48,40 @@ describe Projects::RawController do
it 'serves the file' do it 'serves the file' do
expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment') expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
get(:show, get_show(public_project, id)
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
end end
context 'and lfs uses object storage' do
before do
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
stub_lfs_object_storage
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect to file' do
get_show(public_project, id)
expect(response).to have_gitlab_http_status(302)
expect(response.location).to include(lfs_object.reload.file.path)
end
it 'sets content disposition' do
get_show(public_project, id)
file_uri = URI.parse(response.location)
params = CGI.parse(file_uri.query)
expect(params["response-content-disposition"].first).to eq 'attachment;filename="lfs_object.iso"'
end
end
end end
context 'when project does not have access' do context 'when project does not have access' do
it 'does not serve the file' do it 'does not serve the file' do
get(:show, get_show(public_project, id)
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
expect(response).to have_gitlab_http_status(404) expect(response).to have_gitlab_http_status(404)
end end
...@@ -81,10 +94,7 @@ describe Projects::RawController do ...@@ -81,10 +94,7 @@ describe Projects::RawController do
end end
it 'delivers ASCII file' do it 'delivers ASCII file' do
get(:show, get_show(public_project, id)
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8') expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
...@@ -95,4 +105,10 @@ describe Projects::RawController do ...@@ -95,4 +105,10 @@ describe Projects::RawController do
end end
end end
end end
def get_show(project, id)
get(:show, namespace_id: project.namespace.to_param,
project_id: project,
id: id)
end
end end
...@@ -2,8 +2,21 @@ ...@@ -2,8 +2,21 @@
FactoryBot.define do FactoryBot.define do
factory :appearance do factory :appearance do
title "MepMep" title "GitLab Community Edition"
description "This is my Community Edition instance" description "Open source software to collaborate on code"
new_project_guidelines "Custom project guidelines" new_project_guidelines "Custom project guidelines"
end end
trait :with_logo do
logo { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :with_header_logo do
header_logo { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :with_logos do
with_logo
with_header_logo
end
end end
...@@ -5,6 +5,10 @@ FactoryBot.define do ...@@ -5,6 +5,10 @@ FactoryBot.define do
job factory: :ci_build job factory: :ci_build
file_type :archive file_type :archive
trait :remote_store do
file_store JobArtifactUploader::Store::REMOTE
end
after :build do |artifact| after :build do |artifact|
artifact.project ||= artifact.job.project artifact.project ||= artifact.job.project
end end
......
...@@ -15,4 +15,8 @@ FactoryBot.define do ...@@ -15,4 +15,8 @@ FactoryBot.define do
trait :correct_oid do trait :correct_oid do
oid 'b804383982bb89b00e828e3f44c038cc991d3d1768009fc39ba8e2c081b9fb75' oid 'b804383982bb89b00e828e3f44c038cc991d3d1768009fc39ba8e2c081b9fb75'
end end
trait :object_storage do
file_store { LfsObjectUploader::Store::REMOTE }
end
end end
...@@ -5,6 +5,7 @@ FactoryBot.define do ...@@ -5,6 +5,7 @@ FactoryBot.define do
uploader "AvatarUploader" uploader "AvatarUploader"
mount_point :avatar mount_point :avatar
secret nil secret nil
store ObjectStorage::Store::LOCAL
# we should build a mount agnostic upload by default # we should build a mount agnostic upload by default
transient do transient do
...@@ -27,6 +28,10 @@ FactoryBot.define do ...@@ -27,6 +28,10 @@ FactoryBot.define do
secret SecureRandom.hex secret SecureRandom.hex
end end
trait :object_storage do
store ObjectStorage::Store::REMOTE
end
trait :namespace_upload do trait :namespace_upload do
model { build(:group) } model { build(:group) }
path { File.join(secret, filename) } path { File.join(secret, filename) }
......
require 'spec_helper'
describe 'Fog::Storage::GoogleXML::File' do
let(:storage) do
Fog.mock!
Fog::Storage.new({
google_storage_access_key_id: "asdf",
google_storage_secret_access_key: "asdf",
provider: "Google"
})
end
let(:file) do
directory = storage.directories.create(key: 'data')
directory.files.create(
body: 'Hello World!',
key: 'hello_world.txt'
)
end
it 'delegates to #get_https_url' do
expect(file.url(Time.now)).to start_with("https://")
end
end
require 'spec_helper'
describe Gitlab::Ci::Trace::HttpIO do
include HttpIOHelpers
let(:http_io) { described_class.new(url, size) }
let(:url) { remote_trace_url }
let(:size) { remote_trace_size }
describe '#close' do
subject { http_io.close }
it { is_expected.to be_nil }
end
describe '#binmode' do
subject { http_io.binmode }
it { is_expected.to be_nil }
end
describe '#binmode?' do
subject { http_io.binmode? }
it { is_expected.to be_truthy }
end
describe '#path' do
subject { http_io.path }
it { is_expected.to be_nil }
end
describe '#url' do
subject { http_io.url }
it { is_expected.to eq(url) }
end
describe '#seek' do
subject { http_io.seek(pos, where) }
context 'when moves pos to end of the file' do
let(:pos) { 0 }
let(:where) { IO::SEEK_END }
it { is_expected.to eq(size) }
end
context 'when moves pos to middle of the file' do
let(:pos) { size / 2 }
let(:where) { IO::SEEK_SET }
it { is_expected.to eq(size / 2) }
end
context 'when moves pos around' do
it 'matches the result' do
expect(http_io.seek(0)).to eq(0)
expect(http_io.seek(100, IO::SEEK_CUR)).to eq(100)
expect { http_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file')
end
end
end
describe '#eof?' do
subject { http_io.eof? }
context 'when current pos is at end of the file' do
before do
http_io.seek(size, IO::SEEK_SET)
end
it { is_expected.to be_truthy }
end
context 'when current pos is not at end of the file' do
before do
http_io.seek(0, IO::SEEK_SET)
end
it { is_expected.to be_falsey }
end
end
describe '#each_line' do
subject { http_io.each_line }
let(:string_io) { StringIO.new(remote_trace_body) }
before do
stub_remote_trace_206
end
it 'yields lines' do
expect { |b| http_io.each_line(&b) }.to yield_successive_args(*string_io.each_line.to_a)
end
context 'when buckets on GCS' do
context 'when BUFFER_SIZE is larger than file size' do
before do
stub_remote_trace_200
set_larger_buffer_size_than(size)
end
it 'calls get_chunk only once' do
expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original
http_io.each_line { |line| }
end
end
end
end
describe '#read' do
subject { http_io.read(length) }
context 'when there are no network issue' do
before do
stub_remote_trace_206
end
context 'when read whole size' do
let(:length) { nil }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
end
context 'when read only first 100 bytes' do
let(:length) { 100 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body[0, length])
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body[0, length])
end
end
end
context 'when tries to read oversize' do
let(:length) { size + 1000 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
end
context 'when tries to read 0 bytes' do
let(:length) { 0 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to be_empty
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to be_empty
end
end
end
end
context 'when there is anetwork issue' do
let(:length) { nil }
before do
stub_remote_trace_500
end
it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
end
describe '#readline' do
subject { http_io.readline }
let(:string_io) { StringIO.new(remote_trace_body) }
before do
stub_remote_trace_206
end
shared_examples 'all line matching' do
it 'reads a line' do
(0...remote_trace_body.lines.count).each do
expect(http_io.readline).to eq(string_io.readline)
end
end
end
context 'when there is anetwork issue' do
let(:length) { nil }
before do
stub_remote_trace_500
end
it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it_behaves_like 'all line matching'
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it_behaves_like 'all line matching'
end
context 'when pos is at middle of the file' do
before do
set_smaller_buffer_size_than(size)
http_io.seek(size / 2)
string_io.seek(size / 2)
end
it 'reads from pos' do
expect(http_io.readline).to eq(string_io.readline)
end
end
end
describe '#write' do
subject { http_io.write(nil) }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#truncate' do
subject { http_io.truncate(nil) }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#flush' do
subject { http_io.flush }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#present?' do
subject { http_io.present? }
it { is_expected.to be_truthy }
end
end
...@@ -265,7 +265,9 @@ CommitStatus: ...@@ -265,7 +265,9 @@ CommitStatus:
- target_url - target_url
- description - description
- artifacts_file - artifacts_file
- artifacts_file_store
- artifacts_metadata - artifacts_metadata
- artifacts_metadata_store
- erased_by_id - erased_by_id
- erased_at - erased_at
- artifacts_expire_at - artifacts_expire_at
......
...@@ -31,5 +31,21 @@ describe Gitlab::Verify::LfsObjects do ...@@ -31,5 +31,21 @@ describe Gitlab::Verify::LfsObjects do
expect(failures.keys).to contain_exactly(lfs_object) expect(failures.keys).to contain_exactly(lfs_object)
expect(failure.to_s).to include('Checksum mismatch') expect(failure.to_s).to include('Checksum mismatch')
end end
context 'with remote files' do
before do
stub_lfs_object_storage
end
it 'skips LFS objects in object storage' do
local_failure = create(:lfs_object)
create(:lfs_object, :object_storage)
failures = {}
described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) }
expect(failures.keys).to contain_exactly(local_failure)
end
end
end end
end end
...@@ -40,5 +40,21 @@ describe Gitlab::Verify::Uploads do ...@@ -40,5 +40,21 @@ describe Gitlab::Verify::Uploads do
expect(failures.keys).to contain_exactly(upload) expect(failures.keys).to contain_exactly(upload)
expect(failure.to_s).to include('Checksum missing') expect(failure.to_s).to include('Checksum missing')
end end
context 'with remote files' do
before do
stub_uploads_object_storage(AvatarUploader)
end
it 'skips uploads in object storage' do
local_failure = create(:upload)
create(:upload, :object_storage)
failures = {}
described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) }
expect(failures.keys).to contain_exactly(local_failure)
end
end
end end
end end
...@@ -19,6 +19,10 @@ describe RemoveEmptyForkNetworks, :migration do ...@@ -19,6 +19,10 @@ describe RemoveEmptyForkNetworks, :migration do
deleted_project.destroy! deleted_project.destroy!
end end
after do
Upload.reset_column_information
end
it 'deletes only the fork network without members' do it 'deletes only the fork network without members' do
expect(fork_networks.count).to eq(2) expect(fork_networks.count).to eq(2)
......
...@@ -198,6 +198,16 @@ describe Ci::Build do ...@@ -198,6 +198,16 @@ describe Ci::Build do
end end
context 'when legacy artifacts are used' do context 'when legacy artifacts are used' do
let(:build) { create(:ci_build, :legacy_artifacts) }
subject { build.artifacts? }
context 'is expired' do
let(:build) { create(:ci_build, :legacy_artifacts, :expired) }
it { is_expected.to be_falsy }
end
context 'artifacts archive does not exist' do context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) } let(:build) { create(:ci_build) }
...@@ -208,13 +218,25 @@ describe Ci::Build do ...@@ -208,13 +218,25 @@ describe Ci::Build do
let(:build) { create(:ci_build, :legacy_artifacts) } let(:build) { create(:ci_build, :legacy_artifacts) }
it { is_expected.to be_truthy } it { is_expected.to be_truthy }
end
end
end
context 'is expired' do describe '#browsable_artifacts?' do
let(:build) { create(:ci_build, :legacy_artifacts, :expired) } subject { build.browsable_artifacts? }
it { is_expected.to be_falsy } context 'artifacts metadata does not exist' do
before do
build.update_attributes(legacy_artifacts_metadata: nil)
end end
it { is_expected.to be_falsy }
end end
context 'artifacts metadata does exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy }
end end
end end
......
...@@ -15,6 +15,50 @@ describe Ci::JobArtifact do ...@@ -15,6 +15,50 @@ describe Ci::JobArtifact do
it { is_expected.to delegate_method(:open).to(:file) } it { is_expected.to delegate_method(:open).to(:file) }
it { is_expected.to delegate_method(:exists?).to(:file) } it { is_expected.to delegate_method(:exists?).to(:file) }
describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) }
describe '#schedule_background_upload' do
context 'when object storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
end
it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
subject
end
end
context 'when object storage is enabled' do
context 'when background upload is enabled' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
subject
end
end
context 'when background upload is disabled' do
before do
stub_artifacts_object_storage(background_upload: false)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
end
end
end
describe '#set_size' do describe '#set_size' do
it 'sets the size' do it 'sets the size' do
expect(artifact.size).to eq(106365) expect(artifact.size).to eq(106365)
......
require 'spec_helper'
describe LfsObject do
describe '#local_store?' do
it 'returns true when file_store is nil' do
subject.file_store = nil
expect(subject.local_store?).to eq true
end
it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
subject.file_store = LfsObjectUploader::Store::LOCAL
expect(subject.local_store?).to eq true
end
it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
subject.file_store = LfsObjectUploader::Store::REMOTE
expect(subject.local_store?).to eq false
end
end
describe '#schedule_background_upload' do
before do
stub_lfs_setting(enabled: true)
end
subject { create(:lfs_object, :with_file) }
context 'when object storage is disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
context 'when object storage is enabled' do
context 'when background upload is enabled' do
context 'when is licensed' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker)
.to receive(:perform_async)
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
.once
subject
end
it 'schedules the model for migration once' do
expect(ObjectStorage::BackgroundMoveWorker)
.to receive(:perform_async)
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
.once
lfs_object = create(:lfs_object)
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
end
end
end
context 'when background upload is disabled' do
before do
stub_lfs_object_storage(background_upload: false)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
end
end
end
require 'spec_helper' require 'spec_helper'
describe API::Jobs do describe API::Jobs do
include HttpIOHelpers
set(:project) do set(:project) do
create(:project, :repository, public_builds: false) create(:project, :repository, public_builds: false)
end end
...@@ -112,6 +114,7 @@ describe API::Jobs do ...@@ -112,6 +114,7 @@ describe API::Jobs do
let(:query) { Hash.new } let(:query) { Hash.new }
before do before do
job
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end end
...@@ -335,13 +338,58 @@ describe API::Jobs do ...@@ -335,13 +338,58 @@ describe API::Jobs do
end end
end end
it 'does not return job artifacts if not uploaded' do context 'when artifacts are stored remotely' do
let(:proxy_download) { false }
before do
stub_artifacts_object_storage(proxy_download: proxy_download)
end
let(:job) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'when proxy download is enabled' do
let(:proxy_download) { true }
it 'responds with the workhorse send-url' do
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
end
end
context 'when proxy download is disabled' do
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302)
end
end
context 'authorized user' do
it 'returns the file remote URL' do
expect(response).to redirect_to(artifact.file.url)
end
end
context 'unauthorized user' do
let(:api_user) { nil }
it 'does not return specific job artifacts' do
expect(response).to have_gitlab_http_status(404) expect(response).to have_gitlab_http_status(404)
end end
end end
end end
it 'does not return job artifacts if not uploaded' do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end end
describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do
...@@ -349,6 +397,7 @@ describe API::Jobs do ...@@ -349,6 +397,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) } let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do before do
stub_artifacts_object_storage
job.success job.success
end end
...@@ -412,9 +461,24 @@ describe API::Jobs do ...@@ -412,9 +461,24 @@ describe API::Jobs do
"attachment; filename=#{job.artifacts_file.filename}" } "attachment; filename=#{job.artifacts_file.filename}" }
end end
it { expect(response).to have_gitlab_http_status(200) } it { expect(response).to have_http_status(:ok) }
it { expect(response.headers).to include(download_headers) } it { expect(response.headers).to include(download_headers) }
end end
context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_http_status(:found)
end
end
end end
context 'with regular branch' do context 'with regular branch' do
...@@ -451,6 +515,22 @@ describe API::Jobs do ...@@ -451,6 +515,22 @@ describe API::Jobs do
end end
context 'authorized user' do context 'authorized user' do
context 'when trace is in ObjectStorage' do
let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
before do
stub_remote_trace_206
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
end
it 'returns specific job trace' do
expect(response).to have_gitlab_http_status(200)
expect(response.body).to eq(job.trace.raw)
end
end
context 'when trace is artifact' do context 'when trace is artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
......
...@@ -200,7 +200,7 @@ describe API::Runner do ...@@ -200,7 +200,7 @@ describe API::Runner do
let(:project) { create(:project, shared_runners_enabled: false) } let(:project) { create(:project, shared_runners_enabled: false) }
let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') } let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
let(:runner) { create(:ci_runner) } let(:runner) { create(:ci_runner) }
let!(:job) do let(:job) do
create(:ci_build, :artifacts, :extended_options, create(:ci_build, :artifacts, :extended_options,
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate") pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate")
end end
...@@ -215,6 +215,7 @@ describe API::Runner do ...@@ -215,6 +215,7 @@ describe API::Runner do
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' } let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
before do before do
job
stub_container_registry_config(enabled: false) stub_container_registry_config(enabled: false)
end end
...@@ -888,6 +889,7 @@ describe API::Runner do ...@@ -888,6 +889,7 @@ describe API::Runner do
let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') } let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') }
before do before do
stub_artifacts_object_storage
job.run! job.run!
end end
...@@ -1179,27 +1181,67 @@ describe API::Runner do ...@@ -1179,27 +1181,67 @@ describe API::Runner do
describe 'GET /api/v4/jobs/:id/artifacts' do describe 'GET /api/v4/jobs/:id/artifacts' do
let(:token) { job.token } let(:token) { job.token }
context 'when job has artifacts' do
let(:job) { create(:ci_build) }
let(:store) { JobArtifactUploader::Store::LOCAL }
before do before do
download_artifact create(:ci_job_artifact, :archive, file_store: store, job: job)
end end
context 'when job has artifacts' do context 'when using job token' do
let(:job) { create(:ci_build, :artifacts) } context 'when artifacts are stored locally' do
let(:download_headers) do let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary', { 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end end
context 'when using job token' do before do
download_artifact
end
it 'download artifacts' do it 'download artifacts' do
expect(response).to have_gitlab_http_status(200) expect(response).to have_http_status(200)
expect(response.headers).to include download_headers expect(response.headers).to include download_headers
end end
end end
context 'when artifacts are stored remotely' do
let(:store) { JobArtifactUploader::Store::REMOTE }
let!(:job) { create(:ci_build) }
context 'when proxy download is being used' do
before do
download_artifact(direct_download: false)
end
it 'uses workhorse send-url' do
expect(response).to have_gitlab_http_status(200)
expect(response.headers).to include(
'Gitlab-Workhorse-Send-Data' => /send-url:/)
end
end
context 'when direct download is being used' do
before do
download_artifact(direct_download: true)
end
it 'receive redirect for downloading artifacts' do
expect(response).to have_gitlab_http_status(302)
expect(response.headers).to include('Location')
end
end
end
end
context 'when using runnners token' do context 'when using runnners token' do
let(:token) { job.project.runners_token } let(:token) { job.project.runners_token }
before do
download_artifact
end
it 'responds with forbidden' do it 'responds with forbidden' do
expect(response).to have_gitlab_http_status(403) expect(response).to have_gitlab_http_status(403)
end end
...@@ -1208,12 +1250,16 @@ describe API::Runner do ...@@ -1208,12 +1250,16 @@ describe API::Runner do
context 'when job does not has artifacts' do context 'when job does not has artifacts' do
it 'responds with not found' do it 'responds with not found' do
download_artifact
expect(response).to have_gitlab_http_status(404) expect(response).to have_gitlab_http_status(404)
end end
end end
def download_artifact(params = {}, request_headers = headers) def download_artifact(params = {}, request_headers = headers)
params = params.merge(token: token) params = params.merge(token: token)
job.reload
get api("/jobs/#{job.id}/artifacts"), params, request_headers get api("/jobs/#{job.id}/artifacts"), params, request_headers
end end
end end
......
...@@ -216,6 +216,7 @@ describe API::V3::Builds do ...@@ -216,6 +216,7 @@ describe API::V3::Builds do
describe 'GET /projects/:id/builds/:build_id/artifacts' do describe 'GET /projects/:id/builds/:build_id/artifacts' do
before do before do
stub_artifacts_object_storage
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user) get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end end
...@@ -230,13 +231,24 @@ describe API::V3::Builds do ...@@ -230,13 +231,24 @@ describe API::V3::Builds do
end end
it 'returns specific job artifacts' do it 'returns specific job artifacts' do
expect(response).to have_gitlab_http_status(200) expect(response).to have_http_status(200)
expect(response.headers).to include(download_headers) expect(response.headers).to include(download_headers)
expect(response.body).to match_file(build.artifacts_file.file.file) expect(response.body).to match_file(build.artifacts_file.file.file)
end end
end end
end end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
it 'returns location redirect' do
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(302)
end
end
context 'unauthorized user' do context 'unauthorized user' do
let(:api_user) { nil } let(:api_user) { nil }
...@@ -256,6 +268,7 @@ describe API::V3::Builds do ...@@ -256,6 +268,7 @@ describe API::V3::Builds do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do before do
stub_artifacts_object_storage
build.success build.success
end end
...@@ -318,9 +331,24 @@ describe API::V3::Builds do ...@@ -318,9 +331,24 @@ describe API::V3::Builds do
"attachment; filename=#{build.artifacts_file.filename}" } "attachment; filename=#{build.artifacts_file.filename}" }
end end
it { expect(response).to have_gitlab_http_status(200) } it { expect(response).to have_http_status(200) }
it { expect(response.headers).to include(download_headers) } it { expect(response.headers).to include(download_headers) }
end end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
before do
build.reload
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_http_status(302)
end
end
end end
context 'with regular branch' do context 'with regular branch' do
......
...@@ -191,10 +191,12 @@ describe 'Git LFS API and storage' do ...@@ -191,10 +191,12 @@ describe 'Git LFS API and storage' do
describe 'when fetching lfs object' do describe 'when fetching lfs object' do
let(:project) { create(:project) } let(:project) { create(:project) }
let(:update_permissions) { } let(:update_permissions) { }
let(:before_get) { }
before do before do
enable_lfs enable_lfs
update_permissions update_permissions
before_get
get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
end end
...@@ -239,6 +241,21 @@ describe 'Git LFS API and storage' do ...@@ -239,6 +241,21 @@ describe 'Git LFS API and storage' do
end end
it_behaves_like 'responds with a file' it_behaves_like 'responds with a file'
context 'when LFS uses object storage' do
let(:before_get) do
stub_lfs_object_storage
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect' do
expect(response).to have_gitlab_http_status(302)
end
it 'responds with the file location' do
expect(response.location).to include(lfs_object.reload.file.path)
end
end
end end
end end
...@@ -978,6 +995,32 @@ describe 'Git LFS API and storage' do ...@@ -978,6 +995,32 @@ describe 'Git LFS API and storage' do
end end
end end
context 'and workhorse requests upload finalize for a new lfs object' do
before do
lfs_object.destroy
end
context 'with object storage disabled' do
it "doesn't attempt to migrate file to object storage" do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
put_finalize(with_tempfile: true)
end
end
context 'with object storage enabled' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'schedules migration of file to object storage' do
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
put_finalize(with_tempfile: true)
end
end
end
context 'invalid tempfiles' do context 'invalid tempfiles' do
it 'rejects slashes in the tempfile name (path traversal' do it 'rejects slashes in the tempfile name (path traversal' do
put_finalize('foo/bar') put_finalize('foo/bar')
...@@ -1177,7 +1220,9 @@ describe 'Git LFS API and storage' do ...@@ -1177,7 +1220,9 @@ describe 'Git LFS API and storage' do
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers
end end
def put_finalize(lfs_tmp = lfs_tmp_file) def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false)
setup_tempfile(lfs_tmp) if with_tempfile
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil, put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil,
headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact
end end
...@@ -1185,6 +1230,13 @@ describe 'Git LFS API and storage' do ...@@ -1185,6 +1230,13 @@ describe 'Git LFS API and storage' do
def lfs_tmp_file def lfs_tmp_file
"#{sample_oid}012345678" "#{sample_oid}012345678"
end end
def setup_tempfile(lfs_tmp)
upload_path = LfsObjectUploader.workhorse_upload_path
FileUtils.mkdir_p(upload_path)
FileUtils.touch(File.join(upload_path, lfs_tmp))
end
end end
def enable_lfs def enable_lfs
......
...@@ -117,6 +117,7 @@ describe PipelineSerializer do ...@@ -117,6 +117,7 @@ describe PipelineSerializer do
shared_examples 'no N+1 queries' do shared_examples 'no N+1 queries' do
it 'verifies number of queries', :request_store do it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject } recorded = ActiveRecord::QueryRecorder.new { subject }
expect(recorded.count).to be_within(1).of(36) expect(recorded.count).to be_within(1).of(36)
expect(recorded.cached_count).to eq(0) expect(recorded.cached_count).to eq(0)
end end
......
...@@ -28,7 +28,8 @@ describe Ci::RetryBuildService do ...@@ -28,7 +28,8 @@ describe Ci::RetryBuildService do
%i[type lock_version target_url base_tags trace_sections %i[type lock_version target_url base_tags trace_sections
commit_id deployments erased_by_id last_deployment project_id commit_id deployments erased_by_id last_deployment project_id
runner_id tag_taggings taggings tags trigger_request_id runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried failure_reason].freeze user_id auto_canceled_by_id retried failure_reason
artifacts_file_store artifacts_metadata_store].freeze
shared_examples 'build duplication' do shared_examples 'build duplication' do
let(:another_pipeline) { create(:ci_empty_pipeline, project: project) } let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
......
...@@ -6,7 +6,7 @@ describe Issues::MoveService do ...@@ -6,7 +6,7 @@ describe Issues::MoveService do
let(:title) { 'Some issue' } let(:title) { 'Some issue' }
let(:description) { 'Some issue description' } let(:description) { 'Some issue description' }
let(:old_project) { create(:project) } let(:old_project) { create(:project) }
let(:new_project) { create(:project, group: create(:group)) } let(:new_project) { create(:project) }
let(:milestone1) { create(:milestone, project_id: old_project.id, title: 'v9.0') } let(:milestone1) { create(:milestone, project_id: old_project.id, title: 'v9.0') }
let(:old_issue) do let(:old_issue) do
......
module HttpIOHelpers
def stub_remote_trace_206
WebMock.stub_request(:get, remote_trace_url)
.to_return { |request| remote_trace_response(request, 206) }
end
def stub_remote_trace_200
WebMock.stub_request(:get, remote_trace_url)
.to_return { |request| remote_trace_response(request, 200) }
end
def stub_remote_trace_500
WebMock.stub_request(:get, remote_trace_url)
.to_return(status: [500, "Internal Server Error"])
end
def remote_trace_url
"http://trace.com/trace"
end
def remote_trace_response(request, responce_status)
range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/)
{
status: responce_status,
headers: remote_trace_response_headers(responce_status, range[1].to_i, range[2].to_i),
body: range_trace_body(range[1].to_i, range[2].to_i)
}
end
def remote_trace_response_headers(responce_status, from, to)
headers = { 'Content-Type' => 'text/plain' }
if responce_status == 206
headers.merge('Content-Range' => "bytes #{from}-#{to}/#{remote_trace_size}")
end
headers
end
def range_trace_body(from, to)
remote_trace_body[from..to]
end
def remote_trace_body
@remote_trace_body ||= File.read(expand_fixture_path('trace/sample_trace'))
end
def remote_trace_size
remote_trace_body.length
end
def set_smaller_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks / 2) * 128
stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
end
def set_larger_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks * 2) * 128
stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
end
end
shared_context 'with storage' do |store, **stub_params|
before do
subject.object_store = store
end
end
shared_examples "migrates" do |to_store:, from_store: nil|
let(:to) { to_store }
let(:from) { from_store || subject.object_store }
def migrate(to)
subject.migrate!(to)
end
def checksum
Digest::SHA256.hexdigest(subject.read)
end
before do
migrate(from)
end
it 'returns corresponding file type' do
expect(subject).to be_an(CarrierWave::Uploader::Base)
expect(subject).to be_a(ObjectStorage::Concern)
if from == described_class::Store::REMOTE
expect(subject.file).to be_a(CarrierWave::Storage::Fog::File)
elsif from == described_class::Store::LOCAL
expect(subject.file).to be_a(CarrierWave::SanitizedFile)
else
raise 'Unexpected file type'
end
end
it 'does nothing when migrating to the current store' do
expect { migrate(from) }.not_to change { subject.object_store }.from(from)
end
it 'migrate to the specified store' do
from_checksum = checksum
expect { migrate(to) }.to change { subject.object_store }.from(from).to(to)
expect(checksum).to eq(from_checksum)
end
it 'removes the original file after the migration' do
original_file = subject.file.path
migrate(to)
expect(File.exist?(original_file)).to be_falsey
end
it 'can access to the original file during migration' do
file = subject.file
allow(subject).to receive(:delete_migrated_file) { } # Remove as a callback of :migrate
allow(subject).to receive(:record_upload) { } # Remove as a callback of :store (:record_upload)
expect(file.exists?).to be_truthy
expect { migrate(to) }.not_to change { file.exists? }
end
context 'when migrate! is not oqqupied by another process' do
it 'executes migrate!' do
expect(subject).to receive(:object_store=).at_least(1)
migrate(to)
end
end
context 'when migrate! is occupied by another process' do
let(:exclusive_lease_key) { "object_storage_migrate:#{subject.model.class}:#{subject.model.id}" }
before do
@uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain
end
it 'does not execute migrate!' do
expect(subject).not_to receive(:unsafe_migrate!)
expect { migrate(to) }.to raise_error('Already running')
end
after do
Gitlab::ExclusiveLease.cancel(exclusive_lease_key, @uuid)
end
end
context 'migration is unsuccessful' do
shared_examples "handles gracefully" do |error:|
it 'does not update the object_store' do
expect { migrate(to) }.to raise_error(error)
expect(subject.object_store).to eq(from)
end
it 'does not delete the original file' do
expect { migrate(to) }.to raise_error(error)
expect(subject.exists?).to be_truthy
end
end
context 'when the store is not supported' do
let(:to) { -1 } # not a valid store
include_examples "handles gracefully", error: ObjectStorage::UnknownStoreError
end
context 'upon a fog failure' do
before do
storage_class = subject.send(:storage_for, to).class
expect_any_instance_of(storage_class).to receive(:store!).and_raise("Store failure.")
end
include_examples "handles gracefully", error: "Store failure."
end
context 'upon a database failure' do
before do
expect(uploader).to receive(:persist_object_store!).and_raise("ActiveRecord failure.")
end
include_examples "handles gracefully", error: "ActiveRecord failure."
end
end
end
module StubConfiguration
def stub_object_storage_uploader(
config:, uploader:, remote_directory:,
enabled: true,
proxy_download: false,
background_upload: false)
Fog.mock!
allow(config).to receive(:enabled) { enabled }
allow(config).to receive(:proxy_download) { proxy_download }
allow(config).to receive(:background_upload) { background_upload }
return unless enabled
::Fog::Storage.new(uploader.object_store_credentials).tap do |connection|
begin
connection.directories.create(key: remote_directory)
rescue Excon::Error::Conflict
end
end
end
def stub_artifacts_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
uploader: JobArtifactUploader,
remote_directory: 'artifacts',
**params)
end
def stub_lfs_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.lfs.object_store,
uploader: LfsObjectUploader,
remote_directory: 'lfs-objects',
**params)
end
def stub_uploads_object_storage(uploader = described_class, **params)
stub_object_storage_uploader(config: Gitlab.config.uploads.object_store,
uploader: uploader,
remote_directory: 'uploads',
**params)
end
end
require 'rake_helper'
describe 'gitlab:artifacts namespace rake task' do
before(:context) do
Rake.application.rake_require 'tasks/gitlab/artifacts/migrate'
end
let(:object_storage_enabled) { false }
before do
stub_artifacts_object_storage(enabled: object_storage_enabled)
end
subject { run_rake_task('gitlab:artifacts:migrate') }
context 'legacy artifacts' do
describe 'migrate' do
let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
context 'when local storage is used' do
let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
let(:store) { nil }
it "migrates file to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
context 'and remote storage is defined' do
let(:object_storage_enabled) { true }
it "migrates file to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
context 'and remote storage is not defined' do
it "fails to migrate to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::LOCAL)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::LOCAL)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
end
end
context 'job artifacts' do
let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
context 'when local storage is used' do
let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
let(:store) { nil }
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
context 'and remote storage is defined' do
let(:object_storage_enabled) { true }
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
context 'and remote storage is not defined' do
it "fails to migrate to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
end
end
require 'rake_helper'
describe 'gitlab:lfs namespace rake task' do
before :all do
Rake.application.rake_require 'tasks/gitlab/lfs/migrate'
end
describe 'migrate' do
let(:local) { ObjectStorage::Store::LOCAL }
let(:remote) { ObjectStorage::Store::REMOTE }
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
def lfs_migrate
run_rake_task('gitlab:lfs:migrate')
end
context 'object storage disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it "doesn't migrate files" do
expect { lfs_migrate }.not_to change { lfs_object.reload.file_store }
end
end
context 'object storage enabled' do
before do
stub_lfs_object_storage
end
it 'migrates local file to object storage' do
expect { lfs_migrate }.to change { lfs_object.reload.file_store }.from(local).to(remote)
end
end
end
end
require 'rake_helper'
describe 'gitlab:uploads:migrate rake tasks' do
let!(:projects) { create_list(:project, 10, :with_avatar) }
let(:model_class) { Project }
let(:uploader_class) { AvatarUploader }
let(:mounted_as) { :avatar }
let(:batch_size) { 3 }
before do
stub_env('BATCH', batch_size.to_s)
stub_uploads_object_storage(uploader_class)
Rake.application.rake_require 'tasks/gitlab/uploads/migrate'
allow(ObjectStorage::MigrateUploadsWorker).to receive(:perform_async)
end
def run
args = [uploader_class.to_s, model_class.to_s, mounted_as].compact
run_rake_task("gitlab:uploads:migrate", *args)
end
it 'enqueue jobs in batch' do
expect(ObjectStorage::MigrateUploadsWorker).to receive(:enqueue!).exactly(4).times
run
end
end
...@@ -11,4 +11,26 @@ describe AttachmentUploader do ...@@ -11,4 +11,26 @@ describe AttachmentUploader do
store_dir: %r[uploads/-/system/note/attachment/], store_dir: %r[uploads/-/system/note/attachment/],
upload_path: %r[uploads/-/system/note/attachment/], upload_path: %r[uploads/-/system/note/attachment/],
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/] absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/]
context "object_store is REMOTE" do
before do
stub_uploads_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[note/attachment/],
upload_path: %r[note/attachment/]
end
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
stub_uploads_object_storage
end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end end
require 'spec_helper' require 'spec_helper'
describe AvatarUploader do describe AvatarUploader do
let(:model) { create(:user, :with_avatar) } let(:model) { build_stubbed(:user) }
let(:uploader) { described_class.new(model, :avatar) } let(:uploader) { described_class.new(model, :avatar) }
let(:upload) { create(:upload, model: model) } let(:upload) { create(:upload, model: model) }
...@@ -12,15 +12,28 @@ describe AvatarUploader do ...@@ -12,15 +12,28 @@ describe AvatarUploader do
upload_path: %r[uploads/-/system/user/avatar/], upload_path: %r[uploads/-/system/user/avatar/],
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/] absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/]
describe '#move_to_cache' do context "object_store is REMOTE" do
it 'is false' do before do
expect(uploader.move_to_cache).to eq(false) stub_uploads_object_storage
end end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[user/avatar/],
upload_path: %r[user/avatar/]
end end
describe '#move_to_store' do context "with a file" do
it 'is false' do let(:project) { create(:project, :with_avatar) }
expect(uploader.move_to_store).to eq(false) let(:uploader) { project.avatar }
let(:upload) { uploader.upload }
before do
stub_uploads_object_storage
end end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end end
end end
...@@ -36,6 +36,12 @@ describe FileMover do ...@@ -36,6 +36,12 @@ describe FileMover do
it 'creates a new update record' do it 'creates a new update record' do
expect { subject }.to change { Upload.count }.by(1) expect { subject }.to change { Upload.count }.by(1)
end end
it 'schedules a background migration' do
expect_any_instance_of(PersonalFileUploader).to receive(:schedule_background_upload).once
subject
end
end end
context 'when update_markdown fails' do context 'when update_markdown fails' do
......
...@@ -11,20 +11,20 @@ describe FileUploader do ...@@ -11,20 +11,20 @@ describe FileUploader do
shared_examples 'builds correct legacy storage paths' do shared_examples 'builds correct legacy storage paths' do
include_examples 'builds correct paths', include_examples 'builds correct paths',
store_dir: %r{awesome/project/\h+}, store_dir: %r{awesome/project/\h+},
upload_path: %r{\h+/<filename>},
absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg} absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg}
end end
shared_examples 'uses hashed storage' do context 'legacy storage' do
it_behaves_like 'builds correct legacy storage paths'
context 'uses hashed storage' do
context 'when rolled out attachments' do context 'when rolled out attachments' do
let(:project) { build_stubbed(:project, namespace: group, name: 'project') } let(:project) { build_stubbed(:project, namespace: group, name: 'project') }
before do include_examples 'builds correct paths',
allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed') store_dir: %r{@hashed/\h{2}/\h{2}/\h+},
end upload_path: %r{\h+/<filename>}
it_behaves_like 'builds correct paths',
store_dir: %r{ca/fe/fe/ed/\h+},
absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg}
end end
context 'when only repositories are rolled out' do context 'when only repositories are rolled out' do
...@@ -33,10 +33,19 @@ describe FileUploader do ...@@ -33,10 +33,19 @@ describe FileUploader do
it_behaves_like 'builds correct legacy storage paths' it_behaves_like 'builds correct legacy storage paths'
end end
end end
end
context 'legacy storage' do context 'object store is remote' do
it_behaves_like 'builds correct legacy storage paths' before do
include_examples 'uses hashed storage' stub_uploads_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
# always use hashed storage path for remote uploads
it_behaves_like 'builds correct paths',
store_dir: %r{@hashed/\h{2}/\h{2}/\h+},
upload_path: %r{@hashed/\h{2}/\h{2}/\h+/\h+/<filename>}
end end
describe 'initialize' do describe 'initialize' do
...@@ -78,6 +87,16 @@ describe FileUploader do ...@@ -78,6 +87,16 @@ describe FileUploader do
end end
end end
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/dk.png')))
stub_uploads_object_storage
end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
describe '#upload=' do describe '#upload=' do
let(:secret) { SecureRandom.hex } let(:secret) { SecureRandom.hex }
let(:upload) { create(:upload, :issuable_upload, secret: secret, filename: 'file.txt') } let(:upload) { create(:upload, :issuable_upload, secret: secret, filename: 'file.txt') }
...@@ -93,15 +112,5 @@ describe FileUploader do ...@@ -93,15 +112,5 @@ describe FileUploader do
uploader.upload = upload uploader.upload = upload
end end
context 'uploader_context is empty' do
it 'fallbacks to regex based extraction' do
expect(upload).to receive(:uploader_context).and_return({})
uploader.upload = upload
expect(uploader.secret).to eq(secret)
expect(uploader.instance_variable_get(:@identifier)).to eq('file.txt')
end
end
end end
end end
require 'spec_helper' require 'spec_helper'
describe JobArtifactUploader do describe JobArtifactUploader do
let(:job_artifact) { create(:ci_job_artifact) } let(:store) { described_class::Store::LOCAL }
let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) } let(:uploader) { described_class.new(job_artifact, :file) }
subject { uploader } subject { uploader }
...@@ -11,6 +12,17 @@ describe JobArtifactUploader do ...@@ -11,6 +12,17 @@ describe JobArtifactUploader do
cache_dir: %r[artifacts/tmp/cache], cache_dir: %r[artifacts/tmp/cache],
work_dir: %r[artifacts/tmp/work] work_dir: %r[artifacts/tmp/work]
context "object store is REMOTE" do
before do
stub_artifacts_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like "builds correct paths",
store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
end
describe '#open' do describe '#open' do
subject { uploader.open } subject { uploader.open }
...@@ -36,6 +48,17 @@ describe JobArtifactUploader do ...@@ -36,6 +48,17 @@ describe JobArtifactUploader do
end end
end end
end end
context 'when trace is stored in Object storage' do
before do
allow(uploader).to receive(:file_storage?) { false }
allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
end
it 'returns http io stream' do
is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
end
end
end end
context 'file is stored in valid local_path' do context 'file is stored in valid local_path' do
...@@ -55,4 +78,14 @@ describe JobArtifactUploader do ...@@ -55,4 +78,14 @@ describe JobArtifactUploader do
it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") } it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") } it { is_expected.to end_with("ci_build_artifacts.zip") }
end end
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/trace/sample_trace')))
stub_artifacts_object_storage
end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end end
require 'rails_helper' require 'rails_helper'
describe LegacyArtifactUploader do describe LegacyArtifactUploader do
let(:job) { create(:ci_build) } let(:store) { described_class::Store::LOCAL }
let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) } let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:local_path) { described_class.root } let(:local_path) { described_class.root }
...@@ -20,6 +21,17 @@ describe LegacyArtifactUploader do ...@@ -20,6 +21,17 @@ describe LegacyArtifactUploader do
cache_dir: %r[artifacts/tmp/cache], cache_dir: %r[artifacts/tmp/cache],
work_dir: %r[artifacts/tmp/work] work_dir: %r[artifacts/tmp/work]
context 'object store is remote' do
before do
stub_artifacts_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like "builds correct paths",
store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z]
end
describe '#filename' do describe '#filename' do
# we need to use uploader, as this makes to use mounter # we need to use uploader, as this makes to use mounter
# which initialises uploader.file object # which initialises uploader.file object
......
...@@ -11,4 +11,62 @@ describe LfsObjectUploader do ...@@ -11,4 +11,62 @@ describe LfsObjectUploader do
store_dir: %r[\h{2}/\h{2}], store_dir: %r[\h{2}/\h{2}],
cache_dir: %r[/lfs-objects/tmp/cache], cache_dir: %r[/lfs-objects/tmp/cache],
work_dir: %r[/lfs-objects/tmp/work] work_dir: %r[/lfs-objects/tmp/work]
context "object store is REMOTE" do
before do
stub_lfs_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like "builds correct paths",
store_dir: %r[\h{2}/\h{2}]
end
describe 'migration to object storage' do
context 'with object storage disabled' do
it "is skipped" do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
lfs_object
end
end
context 'with object storage enabled' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'is scheduled to run after creation' do
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
lfs_object
end
end
end
describe 'remote file' do
let(:remote) { described_class::Store::REMOTE }
let(:lfs_object) { create(:lfs_object, file_store: remote) }
context 'with object storage enabled' do
before do
stub_lfs_object_storage
end
it 'can store file remotely' do
allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
store_file(lfs_object)
expect(lfs_object.file_store).to eq remote
expect(lfs_object.file.path).not_to be_blank
end
end
end
def store_file(lfs_object)
lfs_object.file = fixture_file_upload(Rails.root.join("spec/fixtures/dk.png"), "`/png")
lfs_object.save!
end
end end
...@@ -13,4 +13,26 @@ describe NamespaceFileUploader do ...@@ -13,4 +13,26 @@ describe NamespaceFileUploader do
store_dir: %r[uploads/-/system/namespace/\d+], store_dir: %r[uploads/-/system/namespace/\d+],
upload_path: IDENTIFIER, upload_path: IDENTIFIER,
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}] absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}]
context "object_store is REMOTE" do
before do
stub_uploads_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[namespace/\d+/\h+],
upload_path: IDENTIFIER
end
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
stub_uploads_object_storage
end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end end
require 'rails_helper'
require 'carrierwave/storage/fog'
class Implementation < GitlabUploader
include ObjectStorage::Concern
include ::RecordsUploads::Concern
prepend ::ObjectStorage::Extension::RecordsUploads
storage_options Gitlab.config.uploads
private
# user/:id
def dynamic_segment
File.join(model.class.to_s.underscore, model.id.to_s)
end
end
describe ObjectStorage do
let(:uploader_class) { Implementation }
let(:object) { build_stubbed(:user) }
let(:uploader) { uploader_class.new(object, :file) }
before do
allow(uploader_class).to receive(:object_store_enabled?).and_return(true)
end
describe '#object_store=' do
it "reload the local storage" do
uploader.object_store = described_class::Store::LOCAL
expect(uploader.file_storage?).to be_truthy
end
it "reload the REMOTE storage" do
uploader.object_store = described_class::Store::REMOTE
expect(uploader.file_storage?).to be_falsey
end
end
context 'object_store is Store::LOCAL' do
before do
uploader.object_store = described_class::Store::LOCAL
end
describe '#store_dir' do
it 'is the composition of (base_dir, dynamic_segment)' do
expect(uploader.store_dir).to start_with("uploads/-/system/user/")
end
end
end
context 'object_store is Store::REMOTE' do
before do
uploader.object_store = described_class::Store::REMOTE
end
describe '#store_dir' do
it 'is the composition of (dynamic_segment)' do
expect(uploader.store_dir).to start_with("user/")
end
end
end
describe '#object_store' do
it "delegates to <mount>_store on model" do
expect(object).to receive(:file_store)
uploader.object_store
end
context 'when store is null' do
before do
expect(object).to receive(:file_store).and_return(nil)
end
it "returns Store::LOCAL" do
expect(uploader.object_store).to eq(described_class::Store::LOCAL)
end
end
context 'when value is set' do
before do
expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE)
end
it "returns the given value" do
expect(uploader.object_store).to eq(described_class::Store::REMOTE)
end
end
end
describe '#file_cache_storage?' do
context 'when file storage is used' do
before do
uploader_class.cache_storage(:file)
end
it { expect(uploader).to be_file_cache_storage }
end
context 'when is remote storage' do
before do
uploader_class.cache_storage(:fog)
end
it { expect(uploader).not_to be_file_cache_storage }
end
end
# this means the model shall include
# include RecordsUpload::Concern
# prepend ObjectStorage::Extension::RecordsUploads
# the object_store persistence is delegated to the `Upload` model.
#
context 'when persist_object_store? is false' do
let(:object) { create(:project, :with_avatar) }
let(:uploader) { object.avatar }
it { expect(object).to be_a(Avatarable) }
it { expect(uploader.persist_object_store?).to be_falsey }
describe 'delegates the object_store logic to the `Upload` model' do
it 'sets @upload to the found `upload`' do
expect(uploader.upload).to eq(uploader.upload)
end
it 'sets @object_store to the `Upload` value' do
expect(uploader.object_store).to eq(uploader.upload.store)
end
end
describe '#migrate!' do
let(:new_store) { ObjectStorage::Store::REMOTE }
before do
stub_uploads_object_storage(uploader: AvatarUploader)
end
subject { uploader.migrate!(new_store) }
it 'persist @object_store to the recorded upload' do
subject
expect(uploader.upload.store).to eq(new_store)
end
describe 'fails' do
it 'is handled gracefully' do
store = uploader.object_store
expect_any_instance_of(Upload).to receive(:save!).and_raise("An error")
expect { subject }.to raise_error("An error")
expect(uploader.exists?).to be_truthy
expect(uploader.upload.store).to eq(store)
end
end
end
end
# this means the model holds an <mounted_as>_store attribute directly
# and do not delegate the object_store persistence to the `Upload` model.
#
context 'persist_object_store? is true' do
context 'when using JobArtifactsUploader' do
let(:store) { described_class::Store::LOCAL }
let(:object) { create(:ci_job_artifact, :archive, file_store: store) }
let(:uploader) { object.file }
context 'checking described_class' do
it "uploader include described_class::Concern" do
expect(uploader).to be_a(described_class::Concern)
end
end
describe '#use_file' do
context 'when file is stored locally' do
it "calls a regular path" do
expect { |b| uploader.use_file(&b) }.not_to yield_with_args(%r[tmp/cache])
end
end
context 'when file is stored remotely' do
let(:store) { described_class::Store::REMOTE }
before do
stub_artifacts_object_storage
end
it "calls a cache path" do
expect { |b| uploader.use_file(&b) }.to yield_with_args(%r[tmp/cache])
end
end
end
describe '#migrate!' do
subject { uploader.migrate!(new_store) }
shared_examples "updates the underlying <mounted>_store" do
it do
subject
expect(object.file_store).to eq(new_store)
end
end
context 'when using the same storage' do
let(:new_store) { store }
it "to not migrate the storage" do
subject
expect(uploader).not_to receive(:store!)
expect(uploader.object_store).to eq(store)
end
end
context 'when migrating to local storage' do
let(:store) { described_class::Store::REMOTE }
let(:new_store) { described_class::Store::LOCAL }
before do
stub_artifacts_object_storage
end
include_examples "updates the underlying <mounted>_store"
it "local file does not exist" do
expect(File.exist?(uploader.path)).to eq(false)
end
it "remote file exist" do
expect(uploader.file.exists?).to be_truthy
end
it "does migrate the file" do
subject
expect(uploader.object_store).to eq(new_store)
expect(File.exist?(uploader.path)).to eq(true)
end
end
context 'when migrating to remote storage' do
let(:new_store) { described_class::Store::REMOTE }
let!(:current_path) { uploader.path }
it "file does exist" do
expect(File.exist?(current_path)).to eq(true)
end
context 'when storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
end
it "to raise an error" do
expect { subject }.to raise_error(/Object Storage is not enabled/)
end
end
context 'when credentials are set' do
before do
stub_artifacts_object_storage
end
include_examples "updates the underlying <mounted>_store"
it "does migrate the file" do
subject
expect(uploader.object_store).to eq(new_store)
end
it "does delete original file" do
subject
expect(File.exist?(current_path)).to eq(false)
end
context 'when subject save fails' do
before do
expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception")
end
it "original file is not removed" do
expect { subject }.to raise_error(/exception/)
expect(File.exist?(current_path)).to eq(true)
end
end
end
end
end
end
end
describe '#fog_directory' do
let(:remote_directory) { 'directory' }
before do
uploader_class.storage_options double(object_store: double(remote_directory: remote_directory))
end
subject { uploader.fog_directory }
it { is_expected.to eq(remote_directory) }
end
describe '#fog_credentials' do
let(:connection) { Settingslogic.new("provider" => "AWS") }
before do
uploader_class.storage_options double(object_store: double(connection: connection))
end
subject { uploader.fog_credentials }
it { is_expected.to eq(provider: 'AWS') }
end
describe '#fog_public' do
subject { uploader.fog_public }
it { is_expected.to eq(false) }
end
end
...@@ -14,6 +14,18 @@ describe PersonalFileUploader do ...@@ -14,6 +14,18 @@ describe PersonalFileUploader do
upload_path: IDENTIFIER, upload_path: IDENTIFIER,
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}] absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}]
context "object_store is REMOTE" do
before do
stub_uploads_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[\d+/\h+],
upload_path: IDENTIFIER
end
describe '#to_h' do describe '#to_h' do
before do before do
subject.instance_variable_set(:@secret, 'secret') subject.instance_variable_set(:@secret, 'secret')
...@@ -30,4 +42,14 @@ describe PersonalFileUploader do ...@@ -30,4 +42,14 @@ describe PersonalFileUploader do
) )
end end
end end
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
stub_uploads_object_storage
end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end end
require 'spec_helper'
describe ObjectStorageUploadWorker do
let(:local) { ObjectStorage::Store::LOCAL }
let(:remote) { ObjectStorage::Store::REMOTE }
def perform
described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
end
context 'for LFS' do
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
let(:uploader_class) { LfsObjectUploader }
let(:subject_class) { LfsObject }
let(:file_field) { :file }
let(:subject_id) { lfs_object.id }
context 'when object storage is enabled' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'uploads object to storage' do
expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote)
end
context 'when background upload is disabled' do
before do
allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false }
end
it 'is skipped' do
expect { perform }.not_to change { lfs_object.reload.file_store }
end
end
end
context 'when object storage is disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it "doesn't migrate files" do
perform
expect(lfs_object.reload.file_store).to eq(local)
end
end
end
context 'for legacy artifacts' do
let(:build) { create(:ci_build, :legacy_artifacts) }
let(:uploader_class) { LegacyArtifactUploader }
let(:subject_class) { Ci::Build }
let(:file_field) { :artifacts_file }
let(:subject_id) { build.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it "migrates file to remote storage" do
perform
expect(build.reload.artifacts_file_store).to eq(remote)
end
context 'for artifacts_metadata' do
let(:file_field) { :artifacts_metadata }
it 'migrates metadata to remote storage' do
perform
expect(build.reload.artifacts_metadata_store).to eq(remote)
end
end
end
end
end
context 'for job artifacts' do
let(:artifact) { create(:ci_job_artifact, :archive) }
let(:uploader_class) { JobArtifactUploader }
let(:subject_class) { Ci::JobArtifact }
let(:file_field) { :file }
let(:subject_id) { artifact.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it "migrates file to remote storage" do
perform
expect(artifact.reload.file_store).to eq(remote)
end
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment