Commit 835fe4d3 authored by Sean McGivern's avatar Sean McGivern

Merge branch '40781-os-to-ce' into 'master'

Bring Object Storage to CE

Closes #4171, #4163, #3370, #2841, and #29203

See merge request gitlab-org/gitlab-ce!17358
parents ab8f13c3 6d63a098
module SendFileUpload
def send_upload(file_upload, send_params: {}, redirect_params: {}, attachment: nil, disposition: 'attachment')
if attachment
redirect_params[:query] = { "response-content-disposition" => "#{disposition};filename=#{attachment.inspect}" }
send_params.merge!(filename: attachment, disposition: disposition)
end
if file_upload.file_storage?
send_file file_upload.path, send_params
elsif file_upload.class.proxy_download_enabled?
headers.store(*Gitlab::Workhorse.send_url(file_upload.url(**redirect_params)))
head :ok
else
redirect_to file_upload.url(**redirect_params)
end
end
end
module UploadsActions
include Gitlab::Utils::StrongMemoize
include SendFileUpload
UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze
......@@ -26,14 +27,11 @@ module UploadsActions
def show
return render_404 unless uploader&.exists?
if uploader.file_storage?
disposition = uploader.image_or_video? ? 'inline' : 'attachment'
expires_in 0.seconds, must_revalidate: true, private: true
expires_in 0.seconds, must_revalidate: true, private: true
send_file uploader.file.path, disposition: disposition
else
redirect_to uploader.url
end
disposition = uploader.image_or_video? ? 'inline' : 'attachment'
send_upload(uploader, attachment: uploader.filename, disposition: disposition)
end
private
......@@ -62,19 +60,27 @@ module UploadsActions
end
def build_uploader_from_upload
return nil unless params[:secret] && params[:filename]
return unless uploader = build_uploader
upload_path = uploader_class.upload_path(params[:secret], params[:filename])
upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_path)
upload_paths = uploader.upload_paths(params[:filename])
upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_paths)
upload&.build_uploader
end
def build_uploader_from_params
return unless uploader = build_uploader
uploader.retrieve_from_store!(params[:filename])
uploader
end
def build_uploader
return unless params[:secret] && params[:filename]
uploader = uploader_class.new(model, secret: params[:secret])
return nil unless uploader.model_valid?
return unless uploader.model_valid?
uploader.retrieve_from_store!(params[:filename])
uploader
end
......
class Projects::ArtifactsController < Projects::ApplicationController
include ExtractsPath
include RendersBlob
include SendFileUpload
layout 'project'
before_action :authorize_read_build!
......@@ -10,11 +11,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
before_action :entry, only: [:file]
def download
if artifacts_file.file_storage?
send_file artifacts_file.path, disposition: 'attachment'
else
redirect_to artifacts_file.url
end
send_upload(artifacts_file, attachment: artifacts_file.filename)
end
def browse
......@@ -45,8 +42,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end
def raw
path = Gitlab::Ci::Build::Artifacts::Path
.new(params[:path])
path = Gitlab::Ci::Build::Artifacts::Path.new(params[:path])
send_artifacts_entry(build, path)
end
......@@ -75,7 +71,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end
def validate_artifacts!
render_404 unless build && build.artifacts?
render_404 unless build&.artifacts?
end
def build
......
class Projects::JobsController < Projects::ApplicationController
include SendFileUpload
before_action :build, except: [:index, :cancel_all]
before_action :authorize_read_build!,
......@@ -117,11 +119,17 @@ class Projects::JobsController < Projects::ApplicationController
end
def raw
build.trace.read do |stream|
if stream.file?
send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
else
render_404
if trace_artifact_file
send_upload(trace_artifact_file,
send_params: raw_send_params,
redirect_params: raw_redirect_params)
else
build.trace.read do |stream|
if stream.file?
send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
else
render_404
end
end
end
end
......@@ -136,9 +144,21 @@ class Projects::JobsController < Projects::ApplicationController
return access_denied! unless can?(current_user, :erase_build, build)
end
def raw_send_params
{ type: 'text/plain; charset=utf-8', disposition: 'inline' }
end
def raw_redirect_params
{ query: { 'response-content-type' => 'text/plain; charset=utf-8', 'response-content-disposition' => 'inline' } }
end
def trace_artifact_file
@trace_artifact_file ||= build.job_artifacts_trace&.file
end
def build
@build ||= project.builds.find(params[:id])
.present(current_user: current_user)
.present(current_user: current_user)
end
def build_path(build)
......
class Projects::LfsStorageController < Projects::GitHttpClientController
include LfsRequest
include WorkhorseRequest
include SendFileUpload
skip_before_action :verify_workhorse_api!, only: [:download, :upload_finalize]
......@@ -11,7 +12,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
return
end
send_file lfs_object.file.path, content_type: "application/octet-stream"
send_upload(lfs_object.file, send_params: { content_type: "application/octet-stream" })
end
def upload_authorize
......@@ -70,10 +71,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
end
def move_tmp_file_to_storage(object, path)
File.open(path) do |f|
object.file = f
end
object.file = File.open(path)
object.file.store!
object.save
end
......
......@@ -2,6 +2,7 @@
class Projects::RawController < Projects::ApplicationController
include ExtractsPath
include BlobHelper
include SendFileUpload
before_action :require_non_empty_project
before_action :assign_ref_vars
......@@ -31,7 +32,7 @@ class Projects::RawController < Projects::ApplicationController
lfs_object = find_lfs_object
if lfs_object && lfs_object.project_allowed_access?(@project)
send_file lfs_object.file.path, filename: @blob.name, disposition: 'attachment'
send_upload(lfs_object.file, attachment: @blob.name)
else
render_404
end
......
class Appearance < ActiveRecord::Base
include CacheMarkdownField
include AfterCommitQueue
include ObjectStorage::BackgroundMove
cache_markdown_field :description
cache_markdown_field :new_project_guidelines
......
......@@ -3,6 +3,7 @@ module Ci
prepend ArtifactMigratable
include TokenAuthenticatable
include AfterCommitQueue
include ObjectStorage::BackgroundMove
include Presentable
include Importable
......@@ -45,6 +46,7 @@ module Ci
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive)
end
scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts_archive.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
......@@ -365,13 +367,19 @@ module Ci
project.running_or_pending_build_count(force: true)
end
def browsable_artifacts?
artifacts_metadata?
end
def artifacts_metadata_entry(path, **options)
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
artifacts_metadata.path,
path,
**options)
artifacts_metadata.use_file do |metadata_path|
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
metadata_path,
path,
**options)
metadata.to_entry
metadata.to_entry
end
end
def erase_artifacts!
......
module Ci
class JobArtifact < ActiveRecord::Base
include AfterCommitQueue
include ObjectStorage::BackgroundMove
extend Gitlab::Ci::Model
belongs_to :project
......@@ -7,9 +9,11 @@ module Ci
before_save :set_size, if: :file_changed?
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
mount_uploader :file, JobArtifactUploader
delegate :open, :exists?, to: :file
delegate :exists?, :open, to: :file
enum file_type: {
archive: 1,
......@@ -21,6 +25,10 @@ module Ci
self.where(project: project).sum(:size)
end
def local_store?
[nil, ::JobArtifactUploader::Store::LOCAL].include?(self.file_store)
end
def set_size
self.size = file.size
end
......
......@@ -3,6 +3,7 @@ module Avatarable
included do
prepend ShadowMethods
include ObjectStorage::BackgroundMove
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
......
class LfsObject < ActiveRecord::Base
include AfterCommitQueue
include ObjectStorage::BackgroundMove
has_many :lfs_objects_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :projects, through: :lfs_objects_projects
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
validates :oid, presence: true, uniqueness: true
mount_uploader :file, LfsObjectUploader
......@@ -10,6 +15,10 @@ class LfsObject < ActiveRecord::Base
projects.exists?(project.lfs_storage_project.id)
end
def local_store?
[nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store)
end
def self.destroy_unreferenced
joins("LEFT JOIN lfs_objects_projects ON lfs_objects_projects.lfs_object_id = #{table_name}.id")
.where(lfs_objects_projects: { id: nil })
......
......@@ -9,6 +9,8 @@ class Upload < ActiveRecord::Base
validates :model, presence: true
validates :uploader, presence: true
scope :with_files_stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
......@@ -21,6 +23,7 @@ class Upload < ActiveRecord::Base
end
def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
......@@ -30,11 +33,11 @@ class Upload < ActiveRecord::Base
self.checksum = nil
return unless checksummable?
self.checksum = self.class.hexdigest(absolute_path)
self.checksum = Digest::SHA256.file(absolute_path).hexdigest
end
def build_uploader
uploader_class.new(model, mount_point, **uploader_context).tap do |uploader|
def build_uploader(mounted_as = nil)
uploader_class.new(model, mounted_as || mount_point).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
......@@ -51,6 +54,12 @@ class Upload < ActiveRecord::Base
}.compact
end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
private
def delete_file!
......@@ -61,10 +70,6 @@ class Upload < ActiveRecord::Base
checksum.nil? && local? && exist?
end
def local?
true
end
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
......
......@@ -81,11 +81,13 @@ module Projects
end
def extract_tar_archive!(temp_path)
results = Open3.pipeline(%W(gunzip -c #{artifacts}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} #{SITE_PATH}),
err: '/dev/null')
raise FailedToExtractError, 'pages failed to extract' unless results.compact.all?(&:success?)
build.artifacts_file.use_file do |artifacts_path|
results = Open3.pipeline(%W(gunzip -c #{artifacts_path}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} #{SITE_PATH}),
err: '/dev/null')
raise FailedToExtractError, 'pages failed to extract' unless results.compact.all?(&:success?)
end
end
def extract_zip_archive!(temp_path)
......@@ -103,8 +105,10 @@ module Projects
# -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*')
unless system(*%W(unzip -qq -n #{artifacts} #{site_path} -d #{temp_path}))
raise FailedToExtractError, 'pages failed to extract'
build.artifacts_file.use_file do |artifacts_path|
unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path}))
raise FailedToExtractError, 'pages failed to extract'
end
end
end
......
class AttachmentUploader < GitlabUploader
include UploaderHelper
include RecordsUploads::Concern
storage :file
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
include UploaderHelper
private
......
class AvatarUploader < GitlabUploader
include UploaderHelper
include RecordsUploads::Concern
storage :file
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
def exists?
model.avatar.file && model.avatar.file.present?
end
def move_to_cache
def move_to_store
false
end
def move_to_store
def move_to_cache
false
end
......
......@@ -10,7 +10,11 @@ class FileMover
def execute
move
uploader.record_upload if update_markdown
if update_markdown
uploader.record_upload
uploader.schedule_background_upload
end
end
private
......@@ -24,11 +28,8 @@ class FileMover
updated_text = model.read_attribute(update_field)
.gsub(temp_file_uploader.markdown_link, uploader.markdown_link)
model.update_attribute(update_field, updated_text)
true
rescue
revert
false
end
......
......@@ -9,14 +9,18 @@
class FileUploader < GitlabUploader
include UploaderHelper
include RecordsUploads::Concern
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}
DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)}
storage :file
after :remove, :prune_store_dir
# FileUploader do not run in a model transaction, so we can simply
# enqueue a job after the :store hook.
after :store, :schedule_background_upload
def self.root
File.join(options.storage_path, 'uploads')
end
......@@ -28,8 +32,11 @@ class FileUploader < GitlabUploader
)
end
def self.base_dir(model)
model_path_segment(model)
def self.base_dir(model, store = Store::LOCAL)
decorated_model = model
decorated_model = Storage::HashedProject.new(model) if store == Store::REMOTE
model_path_segment(decorated_model)
end
# used in migrations and import/exports
......@@ -47,21 +54,24 @@ class FileUploader < GitlabUploader
#
# Returns a String without a trailing slash
def self.model_path_segment(model)
if model.hashed_storage?(:attachments)
model.disk_path
case model
when Storage::HashedProject then model.disk_path
else
model.full_path
model.hashed_storage?(:attachments) ? model.disk_path : model.full_path
end
end
def self.upload_path(secret, identifier)
File.join(secret, identifier)
end
def self.generate_secret
SecureRandom.hex
end
def upload_paths(filename)
[
File.join(secret, filename),
File.join(base_dir(Store::REMOTE), secret, filename)
]
end
attr_accessor :model
def initialize(model, mounted_as = nil, **uploader_context)
......@@ -71,8 +81,10 @@ class FileUploader < GitlabUploader
apply_context!(uploader_context)
end
def base_dir
self.class.base_dir(@model)
# enforce the usage of Hashed storage when storing to
# remote store as the FileMover doesn't support OS
def base_dir(store = nil)
self.class.base_dir(@model, store || object_store)
end
# we don't need to know the actual path, an uploader instance should be
......@@ -82,15 +94,19 @@ class FileUploader < GitlabUploader
end
def upload_path
self.class.upload_path(dynamic_segment, identifier)
end
def model_path_segment
self.class.model_path_segment(@model)
if file_storage?
# Legacy path relative to project.full_path
File.join(dynamic_segment, identifier)
else
File.join(store_dir, identifier)
end
end
def store_dir
File.join(base_dir, dynamic_segment)
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join(base_dir(ObjectStorage::Store::REMOTE), dynamic_segment)
}
end
def markdown_link
......
......@@ -37,12 +37,10 @@ class GitlabUploader < CarrierWave::Uploader::Base
cache_storage.is_a?(CarrierWave::Storage::File)
end
# Reduce disk IO
def move_to_cache
file_storage?
end
# Reduce disk IO
def move_to_store
file_storage?
end
......@@ -51,10 +49,6 @@ class GitlabUploader < CarrierWave::Uploader::Base
file.present?
end
def store_dir
File.join(base_dir, dynamic_segment)
end
def cache_dir
File.join(root, base_dir, 'tmp/cache')
end
......@@ -76,6 +70,10 @@ class GitlabUploader < CarrierWave::Uploader::Base
# Designed to be overridden by child uploaders that have a dynamic path
# segment -- that is, a path that changes based on mutable attributes of its
# associated model
#
# For example, `FileUploader` builds the storage path based on the associated
# project model's `path_with_namespace` value, which can change when the
# project or its containing namespace is moved or renamed.
def dynamic_segment
raise(NotImplementedError)
end
......
class JobArtifactUploader < GitlabUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
storage_options Gitlab.config.artifacts
......@@ -14,9 +15,11 @@ class JobArtifactUploader < GitlabUploader
end
def open
raise 'Only File System is supported' unless file_storage?
File.open(path, "rb") if path
if file_storage?
File.open(path, "rb") if path
else
::Gitlab::Ci::Trace::HttpIO.new(url, size) if url
end
end
private
......
class LegacyArtifactUploader < GitlabUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
storage_options Gitlab.config.artifacts
......
class LfsObjectUploader < GitlabUploader
extend Workhorse::UploadPath
# LfsObject are in `tmp/upload` instead of `tmp/uploads`
def self.workhorse_upload_path
File.join(root, 'tmp/upload')
end
include ObjectStorage::Concern
storage_options Gitlab.config.lfs
......
......@@ -4,7 +4,7 @@ class NamespaceFileUploader < FileUploader
options.storage_path
end
def self.base_dir(model)
def self.base_dir(model, _store = nil)
File.join(options.base_dir, 'namespace', model_path_segment(model))
end
......@@ -14,6 +14,13 @@ class NamespaceFileUploader < FileUploader
# Re-Override
def store_dir
File.join(base_dir, dynamic_segment)
store_dirs[object_store]
end
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join('namespace', self.class.model_path_segment(model), dynamic_segment)
}
end
end
require 'fog/aws'
require 'carrierwave/storage/fog'
#
# This concern should add object storage support
# to the GitlabUploader class
#
module ObjectStorage
RemoteStoreError = Class.new(StandardError)
UnknownStoreError = Class.new(StandardError)
ObjectStorageUnavailable = Class.new(StandardError)
module Store
LOCAL = 1
REMOTE = 2
end
module Extension
# this extension is the glue between the ObjectStorage::Concern and RecordsUploads::Concern
module RecordsUploads
extend ActiveSupport::Concern
def prepended(base)
raise "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern
base.include(RecordsUploads::Concern)
end
def retrieve_from_store!(identifier)
paths = store_dirs.map { |store, path| File.join(path, identifier) }
unless current_upload_satisfies?(paths, model)
# the upload we already have isn't right, find the correct one
self.upload = uploads.find_by(model: model, path: paths)
end
super
end
def build_upload
super.tap do |upload|
upload.store = object_store
end
end
def upload=(upload)
return unless upload
self.object_store = upload.store
super
end
def schedule_background_upload(*args)
return unless schedule_background_upload?
return unless upload
ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
upload.class.to_s,
mounted_as,
upload.id)
end
private
def current_upload_satisfies?(paths, model)
return false unless upload
return false unless model
paths.include?(upload.path) &&
upload.model_id == model.id &&
upload.model_type == model.class.base_class.sti_name
end
end
end
# Add support for automatic background uploading after the file is stored.
#
module BackgroundMove
extend ActiveSupport::Concern
def background_upload(mount_points = [])
return unless mount_points.any?
run_after_commit do
mount_points.each { |mount| send(mount).schedule_background_upload } # rubocop:disable GitlabSecurity/PublicSend
end
end
def changed_mounts
self.class.uploaders.select do |mount, uploader_class|
mounted_as = uploader_class.serialization_column(self.class, mount)
uploader = send(:"#{mounted_as}") # rubocop:disable GitlabSecurity/PublicSend
next unless uploader
next unless uploader.exists?
next unless send(:"#{mounted_as}_changed?") # rubocop:disable GitlabSecurity/PublicSend
mount
end.keys
end
included do
after_save on: [:create, :update] do
background_upload(changed_mounts)
end
end
end
module Concern
extend ActiveSupport::Concern
included do |base|
base.include(ObjectStorage)
after :migrate, :delete_migrated_file
end
class_methods do
def object_store_options
options.object_store
end
def object_store_enabled?
object_store_options.enabled
end
def background_upload_enabled?
object_store_options.background_upload
end
def proxy_download_enabled?
object_store_options.proxy_download
end
def direct_download_enabled?
!proxy_download_enabled?
end
def object_store_credentials
object_store_options.connection.to_hash.deep_symbolize_keys
end
def remote_store_path
object_store_options.remote_directory
end
def serialization_column(model_class, mount_point)
model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
end
end
def file_storage?
storage.is_a?(CarrierWave::Storage::File)
end
def file_cache_storage?
cache_storage.is_a?(CarrierWave::Storage::File)
end
def object_store
@object_store ||= model.try(store_serialization_column) || Store::LOCAL
end
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def object_store=(value)
@object_store = value || Store::LOCAL
@storage = storage_for(object_store)
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
# Return true if the current file is part or the model (i.e. is mounted in the model)
#
def persist_object_store?
model.respond_to?(:"#{store_serialization_column}=")
end
# Save the current @object_store to the model <mounted_as>_store column
def persist_object_store!
return unless persist_object_store?
updated = model.update_column(store_serialization_column, object_store)
raise 'Failed to update object store' unless updated
end
def use_file
if file_storage?
return yield path
end
begin
cache_stored_file!
yield cache_path
ensure
cache_storage.delete_dir!(cache_path(nil))
end
end
def filename
super || file&.filename
end
#
# Move the file to another store
#
# new_store: Enum (Store::LOCAL, Store::REMOTE)
#
def migrate!(new_store)
uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain
raise 'Already running' unless uuid
unsafe_migrate!(new_store)
ensure
Gitlab::ExclusiveLease.cancel(exclusive_lease_key, uuid)
end
def schedule_background_upload(*args)
return unless schedule_background_upload?
ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
model.class.name,
mounted_as,
model.id)
end
def fog_directory
self.class.remote_store_path
end
def fog_credentials
self.class.object_store_credentials
end
def fog_public
false
end
def delete_migrated_file(migrated_file)
migrated_file.delete if exists?
end
def exists?
file.present?
end
def store_dir(store = nil)
store_dirs[store || object_store]
end
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join(dynamic_segment)
}
end
private
def schedule_background_upload?
self.class.object_store_enabled? &&
self.class.background_upload_enabled? &&
self.file_storage?
end
# this is a hack around CarrierWave. The #migrate method needs to be
# able to force the current file to the migrated file upon success.
def file=(file)
@file = file # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
def serialization_column
self.class.serialization_column(model.class, mounted_as)
end
# Returns the column where the 'store' is saved
# defaults to 'store'
def store_serialization_column
[serialization_column, 'store'].compact.join('_').to_sym
end
def storage
@storage ||= storage_for(object_store)
end
def storage_for(store)
case store
when Store::REMOTE
raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
CarrierWave::Storage::Fog.new(self)
when Store::LOCAL
CarrierWave::Storage::File.new(self)
else
raise UnknownStoreError
end
end
def exclusive_lease_key
"object_storage_migrate:#{model.class}:#{model.id}"
end
#
# Move the file to another store
#
# new_store: Enum (Store::LOCAL, Store::REMOTE)
#
def unsafe_migrate!(new_store)
return unless object_store != new_store
return unless file
new_file = nil
file_to_delete = file
from_object_store = object_store
self.object_store = new_store # changes the storage and file
cache_stored_file! if file_storage?
with_callbacks(:migrate, file_to_delete) do
with_callbacks(:store, file_to_delete) do # for #store_versions!
new_file = storage.store!(file)
persist_object_store!
self.file = new_file
end
end
file
rescue => e
# in case of failure delete new file
new_file.delete unless new_file.nil?
# revert back to the old file
self.object_store = from_object_store
self.file = file_to_delete
raise e
end
end
end
......@@ -4,7 +4,7 @@ class PersonalFileUploader < FileUploader
options.storage_path
end
def self.base_dir(model)
def self.base_dir(model, _store = nil)
File.join(options.base_dir, model_path_segment(model))
end
......@@ -14,6 +14,12 @@ class PersonalFileUploader < FileUploader
File.join(model.class.to_s.underscore, model.id.to_s)
end
def object_store
return Store::LOCAL unless model
super
end
# model_path_segment does not require a model to be passed, so we can always
# generate a path, even when there's no model.
def model_valid?
......@@ -22,7 +28,14 @@ class PersonalFileUploader < FileUploader
# Revert-Override
def store_dir
File.join(base_dir, dynamic_segment)
store_dirs[object_store]
end
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join(self.class.model_path_segment(model), dynamic_segment)
}
end
private
......
......@@ -24,8 +24,7 @@ module RecordsUploads
uploads.where(path: upload_path).delete_all
upload.destroy! if upload
self.upload = build_upload
upload.save!
self.upload = build_upload.tap(&:save!)
end
end
......
......@@ -35,7 +35,7 @@
= link_to download_project_job_artifacts_path(@project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do
Download
- if @build.artifacts_metadata?
- if @build.browsable_artifacts?
= link_to browse_project_job_artifacts_path(@project, @build), class: 'btn btn-sm btn-default' do
Browse
......
......@@ -39,6 +39,10 @@
- github_importer:github_import_stage_import_pull_requests
- github_importer:github_import_stage_import_repository
- object_storage_upload
- object_storage:object_storage_background_move
- object_storage:object_storage_migrate_uploads
- pipeline_cache:expire_job_cache
- pipeline_cache:expire_pipeline_cache
- pipeline_creation:create_pipeline
......
# Concern for setting Sidekiq settings for the various GitLab ObjectStorage workers.
module ObjectStorageQueue
extend ActiveSupport::Concern
included do
queue_namespace :object_storage
end
end
module ObjectStorage
class BackgroundMoveWorker
include ApplicationWorker
include ObjectStorageQueue
sidekiq_options retry: 5
def perform(uploader_class_name, subject_class_name, file_field, subject_id)
uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find(subject_id)
uploader = build_uploader(subject, file_field&.to_sym)
uploader.migrate!(ObjectStorage::Store::REMOTE)
end
def build_uploader(subject, mount_point)
case subject
when Upload then subject.build_uploader(mount_point)
else
subject.send(mount_point) # rubocop:disable GitlabSecurity/PublicSend
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Metrics/LineLength
# rubocop:disable Style/Documentation
module ObjectStorage
class MigrateUploadsWorker
include ApplicationWorker
include ObjectStorageQueue
SanityCheckError = Class.new(StandardError)
class Upload < ActiveRecord::Base
# Upper limit for foreground checksum processing
CHECKSUM_THRESHOLD = 100.megabytes
belongs_to :model, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
validates :size, presence: true
validates :path, presence: true
validates :model, presence: true
validates :uploader, presence: true
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
scope :stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
scope :stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
def self.hexdigest(path)
Digest::SHA256.file(path).hexdigest
end
def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
end
def calculate_checksum!
self.checksum = nil
return unless checksummable?
self.checksum = self.class.hexdigest(absolute_path)
end
def build_uploader(mounted_as = nil)
uploader_class.new(model, mounted_as).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
end
def exist?
File.exist?(absolute_path)
end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
private
def checksummable?
checksum.nil? && local? && exist?
end
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
def schedule_checksum
UploadChecksumWorker.perform_async(id)
end
def relative_path?
!path.start_with?('/')
end
def identifier
File.basename(path)
end
def uploader_class
Object.const_get(uploader)
end
end
class MigrationResult
attr_reader :upload
attr_accessor :error
def initialize(upload, error = nil)
@upload, @error = upload, error
end
def success?
error.nil?
end
def to_s
success? ? "Migration successful." : "Error while migrating #{upload.id}: #{error.message}"
end
end
module Report
class MigrationFailures < StandardError
attr_reader :errors
def initialize(errors)
@errors = errors
end
def message
errors.map(&:message).join("\n")
end
end
def report!(results)
success, failures = results.partition(&:success?)
Rails.logger.info header(success, failures)
Rails.logger.warn failures(failures)
raise MigrationFailures.new(failures.map(&:error)) if failures.any?
end
def header(success, failures)
"Migrated #{success.count}/#{success.count + failures.count} files."
end
def failures(failures)
failures.map { |f| "\t#{f}" }.join('\n')
end
end
include Report
def self.enqueue!(uploads, mounted_as, to_store)
sanity_check!(uploads, mounted_as)
perform_async(uploads.ids, mounted_as, to_store)
end
# We need to be sure all the uploads are for the same uploader and model type
# and that the mount point exists if provided.
#
def self.sanity_check!(uploads, mounted_as)
upload = uploads.first
uploader_class = upload.uploader.constantize
model_class = uploads.first.model_type.constantize
uploader_types = uploads.map(&:uploader).uniq
model_types = uploads.map(&:model_type).uniq
model_has_mount = mounted_as.nil? || model_class.uploaders[mounted_as] == uploader_class
raise(SanityCheckError, "Multiple uploaders found: #{uploader_types}") unless uploader_types.count == 1
raise(SanityCheckError, "Multiple model types found: #{model_types}") unless model_types.count == 1
raise(SanityCheckError, "Mount point #{mounted_as} not found in #{model_class}.") unless model_has_mount
end
def perform(ids, mounted_as, to_store)
@mounted_as = mounted_as&.to_sym
@to_store = to_store
uploads = Upload.preload(:model).where(id: ids)
sanity_check!(uploads)
results = migrate(uploads)
report!(results)
rescue SanityCheckError => e
# do not retry: the job is insane
Rails.logger.warn "#{self.class}: Sanity check error (#{e.message})"
end
def sanity_check!(uploads)
self.class.sanity_check!(uploads, @mounted_as)
end
def build_uploaders(uploads)
uploads.map { |upload| upload.build_uploader(@mounted_as) }
end
def migrate(uploads)
build_uploaders(uploads).map(&method(:process_uploader))
end
def process_uploader(uploader)
MigrationResult.new(uploader.upload).tap do |result|
begin
uploader.migrate!(@to_store)
rescue => e
result.error = e
end
end
end
end
end
# @Deprecated - remove once the `object_storage_upload` queue is empty
# The queue has been renamed `object_storage:object_storage_background_upload`
#
class ObjectStorageUploadWorker
include ApplicationWorker
sidekiq_options retry: 5
def perform(uploader_class_name, subject_class_name, file_field, subject_id)
uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find(subject_id)
uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
uploader.migrate!(ObjectStorage::Store::REMOTE)
end
end
---
title: Update CI/CD secret variables list to be dynamic and save without reloading
the page
merge_request: 4110
author:
type: added
---
title: Fix JavaScript bundle running on Cluster update/destroy pages
merge_request:
author:
type: fixed
---
title: Authorize project access with an external service
merge_request: 4675
author:
type: added
---
title: Add object storage support for LFS objects, CI artifacts, and uploads.
merge_request: 17358
author:
type: added
---
title: File uploads in remote storage now support project renaming.
merge_request: 4597
author:
type: fixed
......@@ -145,18 +145,55 @@ production: &base
enabled: true
# The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts
# object_store:
# enabled: false
# remote_directory: artifacts # The bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
# connection:
# provider: AWS # Only AWS supported at the moment
# aws_access_key_id: AWS_ACCESS_KEY_ID
# aws_secret_access_key: AWS_SECRET_ACCESS_KEY
# region: eu-central-1
## Git LFS
lfs:
enabled: true
# The location where LFS objects are stored (default: shared/lfs-objects).
# storage_path: shared/lfs-objects
object_store:
enabled: false
remote_directory: lfs-objects # Bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
connection:
provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
# Use the following options to configure an AWS compatible host
# host: 'localhost' # default: s3.amazonaws.com
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## Uploads (attachments, avatars, etc...)
uploads:
# The location where uploads objects are stored (default: public/).
# storage_path: public/
# base_dir: uploads/-/system
object_store:
enabled: false
# remote_directory: uploads # Bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
# connection:
# provider: AWS
# aws_access_key_id: AWS_ACCESS_KEY_ID
# aws_secret_access_key: AWS_SECRET_ACCESS_KEY
# region: eu-central-1
# host: 'localhost' # default: s3.amazonaws.com
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## GitLab Pages
pages:
......@@ -655,10 +692,39 @@ test:
enabled: true
lfs:
enabled: false
# The location where LFS objects are stored (default: shared/lfs-objects).
# storage_path: shared/lfs-objects
object_store:
enabled: false
remote_directory: lfs-objects # The bucket name
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
artifacts:
path: tmp/tests/artifacts
enabled: true
# The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts
object_store:
enabled: false
remote_directory: artifacts # The bucket name
background_upload: false
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
uploads:
storage_path: tmp/tests/public
object_store:
enabled: false
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
gitlab:
host: localhost
port: 80
......
......@@ -305,6 +305,13 @@ Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values
# Settings.artifact['path'] is deprecated, use `storage_path` instead
Settings.artifacts['path'] = Settings.artifacts['storage_path']
Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
Settings.artifacts['object_store']['proxy_download'] = false if Settings.artifacts['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
#
# Registry
......@@ -340,6 +347,13 @@ Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pa
Settings['lfs'] ||= Settingslogic.new({})
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
Settings.lfs['object_store'] ||= Settingslogic.new({})
Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
Settings.lfs['object_store']['remote_directory'] ||= nil
Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
Settings.lfs['object_store']['proxy_download'] = false if Settings.lfs['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy
Settings.lfs['object_store']['connection']&.deep_stringify_keys!
#
# Uploads
......@@ -347,6 +361,13 @@ Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] ||
Settings['uploads'] ||= Settingslogic.new({})
Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
Settings.uploads['object_store'] ||= Settingslogic.new({})
Settings.uploads['object_store']['enabled'] = false if Settings.uploads['object_store']['enabled'].nil?
Settings.uploads['object_store']['remote_directory'] ||= 'uploads'
Settings.uploads['object_store']['background_upload'] = true if Settings.uploads['object_store']['background_upload'].nil?
Settings.uploads['object_store']['proxy_download'] = false if Settings.uploads['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy
Settings.uploads['object_store']['connection']&.deep_stringify_keys!
#
# Mattermost
......
#
# Monkey patching the https support for private urls
# See https://gitlab.com/gitlab-org/gitlab-ee/issues/4879
#
module Fog
module Storage
class GoogleXML
class File < Fog::Model
module MonkeyPatch
def url(expires)
requires :key
collection.get_https_url(key, expires)
end
end
prepend MonkeyPatch
end
end
end
end
......@@ -68,5 +68,7 @@
- [project_migrate_hashed_storage, 1]
- [storage_migrator, 1]
- [pages_domain_verification, 1]
- [object_storage_upload, 1]
- [object_storage, 1]
- [plugin, 1]
- [pipeline_background, 1]
class AddArtifactsStoreToCiBuild < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column(:ci_builds, :artifacts_file_store, :integer)
add_column(:ci_builds, :artifacts_metadata_store, :integer)
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddFileStoreToLfsObjects < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
# When a migration requires downtime you **must** uncomment the following
# constant and define a short and easy to understand explanation as to why the
# migration requires downtime.
# DOWNTIME_REASON = ''
# When using the methods "add_concurrent_index", "remove_concurrent_index" or
# "add_column_with_default" you must disable the use of transactions
# as these methods can not run in an existing transaction.
# When using "add_concurrent_index" or "remove_concurrent_index" methods make sure
# that either of them is the _only_ method called in the migration,
# any other changes should go in a separate migration.
# This ensures that upon failure _only_ the index creation or removing fails
# and can be retried or reverted easily.
#
# To disable transactions uncomment the following line and remove these
# comments:
# disable_ddl_transaction!
def change
add_column(:lfs_objects, :file_store, :integer)
end
end
class AddFileStoreJobArtifacts < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
def change
add_column(:ci_job_artifacts, :file_store, :integer)
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddStoreColumnToUploads < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column(:uploads, :store, :integer)
end
end
......@@ -307,6 +307,8 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.integer "auto_canceled_by_id"
t.boolean "retried"
t.integer "stage_id"
t.integer "artifacts_file_store"
t.integer "artifacts_metadata_store"
t.boolean "protected"
t.integer "failure_reason"
end
......@@ -345,6 +347,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.integer "project_id", null: false
t.integer "job_id", null: false
t.integer "file_type", null: false
t.integer "file_store"
t.integer "size", limit: 8
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
......@@ -1009,6 +1012,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.datetime "created_at"
t.datetime "updated_at"
t.string "file"
t.integer "file_store"
end
add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree
......@@ -1824,6 +1828,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.datetime "created_at", null: false
t.string "mount_point"
t.string "secret"
t.integer "store"
end
add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree
......
......@@ -87,10 +87,124 @@ _The artifacts are stored by default in
### Using object storage
>**Notes:**
- [Introduced][ee-1762] in [GitLab Premium][eep] 9.4.
- Since version 9.5, artifacts are [browsable], when object storage is enabled.
9.4 lacks this feature.
> Available in [GitLab Premium](https://about.gitlab.com/products/) and
[GitLab.com Silver](https://about.gitlab.com/gitlab-com/).
> Since version 10.6, available in [GitLab CE](https://about.gitlab.com/products/)
If you don't want to use the local disk where GitLab is installed to store the
artifacts, you can use an object storage like AWS S3 instead.
This configuration relies on valid AWS credentials to be configured already.
Use an [Object storage option][os] like AWS S3 to store job artifacts.
### Object Storage Settings
For source installations the following settings are nested under `artifacts:` and then `object_store:`. On omnibus installs they are prefixed by `artifacts_object_store_`.
| Setting | Description | Default |
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Artfacts will be stored| |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
#### S3 compatible connection settings
The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
| Setting | Description | Default |
|---------|-------------|---------|
| `provider` | Always `AWS` for compatible hosts | AWS |
| `aws_access_key_id` | AWS credentials, or compatible | |
| `aws_secret_access_key` | AWS credentials, or compatible | |
| `region` | AWS region | us-east-1 |
| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
**In Omnibus installations:**
_The artifacts are stored by default in
`/var/opt/gitlab/gitlab-rails/shared/artifacts`._
1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
the values you want:
```ruby
gitlab_rails['artifacts_enabled'] = true
gitlab_rails['artifacts_object_store_enabled'] = true
gitlab_rails['artifacts_object_store_remote_directory'] = "artifacts"
gitlab_rails['artifacts_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'aws_access_key_id' => 'AWS_ACCESS_KEY_ID',
'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY'
}
```
NOTE: For GitLab 9.4+, if you are using AWS IAM profiles, be sure to omit the
AWS access key and secret acces key/value pairs. For example:
```ruby
gitlab_rails['artifacts_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'use_iam_profile' => true
}
```
1. Save the file and [reconfigure GitLab][] for the changes to take effect.
1. Migrate any existing local artifacts to the object storage:
```bash
gitlab-rake gitlab:artifacts:migrate
```
Currently this has to be executed manually and it will allow you to
migrate the existing artifacts to the object storage, but all new
artifacts will still be stored on the local disk. In the future
you will be given an option to define a default storage artifacts for all
new files.
---
**In installations from source:**
_The artifacts are stored by default in
`/home/git/gitlab/shared/artifacts`._
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
lines:
```yaml
artifacts:
enabled: true
object_store:
enabled: true
remote_directory: "artifacts" # The bucket name
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
```
1. Save the file and [restart GitLab][] for the changes to take effect.
1. Migrate any existing local artifacts to the object storage:
```bash
sudo -u git -H bundle exec rake gitlab:artifacts:migrate RAILS_ENV=production
```
Use an [Object storage option][ee-os] like AWS S3 to store job artifacts.
Currently this has to be executed manually and it will allow you to
migrate the existing artifacts to the object storage, but all new
artifacts will still be stored on the local disk. In the future
you will be given an option to define a default storage artifacts for all
new files.
## Expiring artifacts
......@@ -194,7 +308,7 @@ When clicking on a specific file, [GitLab Workhorse] extracts it
from the archive and the download begins. This implementation saves space,
memory and disk I/O.
[reconfigure gitlab]: restart_gitlab.md "How to restart GitLab"
[restart gitlab]: restart_gitlab.md "How to restart GitLab"
[reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
[restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
[gitlab workhorse]: https://gitlab.com/gitlab-org/gitlab-workhorse "GitLab Workhorse repository"
[ee-os]: https://docs.gitlab.com/ee/administration/job_artifacts.html#using-object-storage
[os]: https://docs.gitlab.com/administration/job_artifacts.html#using-object-storage
# Uploads Migrate Rake Task
## Migrate to Object Storage
After [configuring the object storage](../../uploads.md#using-object-storage) for GitLab's uploads, you may use this task to migrate existing uploads from the local storage to the remote storage.
>**Note:**
All of the processing will be done in a background worker and requires **no downtime**.
This tasks uses 3 parameters to find uploads to migrate.
>**Note:**
These parameters are mainly internal to GitLab's structure, you may want to refer to the task list instead below.
Parameter | Type | Description
--------- | ---- | -----------
`uploader_class` | string | Type of the uploader to migrate from
`model_class` | string | Type of the model to migrate from
`mount_point` | string/symbol | Name of the model's column on which the uploader is mounted on.
This task also accepts some environment variables which you can use to override
certain values:
Variable | Type | Description
-------- | ---- | -----------
`BATCH` | integer | Specifies the size of the batch. Defaults to 200.
** Omnibus Installation**
```bash
# gitlab-rake gitlab:uploads:migrate[uploader_class, model_class, mount_point]
# Avatars
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
# Attachments
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
# Markdown
gitlab-rake "gitlab:uploads:migrate[FileUploader, Project]"
gitlab-rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
gitlab-rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
gitlab-rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
```
**Source Installation**
>**Note:**
Use `RAILS_ENV=production` for every task.
```bash
# sudo -u git -H bundle exec rake gitlab:uploads:migrate
# Avatars
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
# Attachments
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
# Markdown
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, Project]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
```
# Uploads administration
>**Notes:**
Uploads represent all user data that may be sent to GitLab as a single file. As an example, avatars and notes' attachments are uploads. Uploads are integral to GitLab functionality, and therefore cannot be disabled.
### Using local storage
>**Notes:**
This is the default configuration
To change the location where the uploads are stored locally, follow the steps
below.
---
**In Omnibus installations:**
>**Notes:**
For historical reasons, uploads are stored into a base directory, which by default is `uploads/-/system`. It is strongly discouraged to change this configuration option on an existing GitLab installation.
_The uploads are stored by default in `/var/opt/gitlab/gitlab-rails/public/uploads/-/system`._
1. To change the storage path for example to `/mnt/storage/uploads`, edit
`/etc/gitlab/gitlab.rb` and add the following line:
```ruby
gitlab_rails['uploads_storage_path'] = "/mnt/storage/"
gitlab_rails['uploads_base_dir'] = "uploads"
```
1. Save the file and [reconfigure GitLab][] for the changes to take effect.
---
**In installations from source:**
_The uploads are stored by default in
`/home/git/gitlab/public/uploads/-/system`._
1. To change the storage path for example to `/mnt/storage/uploads`, edit
`/home/git/gitlab/config/gitlab.yml` and add or amend the following lines:
```yaml
uploads:
storage_path: /mnt/storage
base_dir: uploads
```
1. Save the file and [restart GitLab][] for the changes to take effect.
### Using object storage
>**Notes:**
- [Introduced][ee-3867] in [GitLab Enterprise Edition Premium][eep] 10.5.
If you don't want to use the local disk where GitLab is installed to store the
uploads, you can use an object storage provider like AWS S3 instead.
This configuration relies on valid AWS credentials to be configured already.
### Object Storage Settings
For source installations the following settings are nested under `uploads:` and then `object_store:`. On omnibus installs they are prefixed by `uploads_object_store_`.
| Setting | Description | Default |
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Uploads will be stored| |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
#### S3 compatible connection settings
The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
| Setting | Description | Default |
|---------|-------------|---------|
| `provider` | Always `AWS` for compatible hosts | AWS |
| `aws_access_key_id` | AWS credentials, or compatible | |
| `aws_secret_access_key` | AWS credentials, or compatible | |
| `region` | AWS region | us-east-1 |
| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
**In Omnibus installations:**
_The uploads are stored by default in
`/var/opt/gitlab/gitlab-rails/public/uploads/-/system`._
1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
the values you want:
```ruby
gitlab_rails['uploads_object_store_enabled'] = true
gitlab_rails['uploads_object_store_remote_directory'] = "uploads"
gitlab_rails['uploads_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'aws_access_key_id' => 'AWS_ACCESS_KEY_ID',
'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY'
}
```
>**Note:**
If you are using AWS IAM profiles, be sure to omit the AWS access key and secret acces key/value pairs.
```ruby
gitlab_rails['uploads_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'use_iam_profile' => true
}
```
1. Save the file and [reconfigure GitLab][] for the changes to take effect.
1. Migrate any existing local uploads to the object storage:
>**Notes:**
These task complies with the `BATCH` environment variable to process uploads in batch (200 by default). All of the processing will be done in a background worker and requires **no downtime**.
```bash
# gitlab-rake gitlab:uploads:migrate[uploader_class, model_class, mount_point]
# Avatars
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
gitlab-rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
# Attachments
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
# Markdown
gitlab-rake "gitlab:uploads:migrate[FileUploader, Project]"
gitlab-rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
gitlab-rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
gitlab-rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
```
Currently this has to be executed manually and it will allow you to
migrate the existing uploads to the object storage, but all new
uploads will still be stored on the local disk. In the future
you will be given an option to define a default storage for all
new files.
---
**In installations from source:**
_The uploads are stored by default in
`/home/git/gitlab/public/uploads/-/system`._
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
lines:
```yaml
uploads:
object_store:
enabled: true
remote_directory: "uploads" # The bucket name
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
```
1. Save the file and [restart GitLab][] for the changes to take effect.
1. Migrate any existing local uploads to the object storage:
>**Notes:**
- These task comply with the `BATCH` environment variable to process uploads in batch (200 by default). All of the processing will be done in a background worker and requires **no downtime**.
- To migrate in production use `RAILS_ENV=production` environment variable.
```bash
# sudo -u git -H bundle exec rake gitlab:uploads:migrate
# Avatars
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
# Attachments
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
# Markdown
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, Project]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
```
Currently this has to be executed manually and it will allow you to
migrate the existing uploads to the object storage, but all new
uploads will still be stored on the local disk. In the future
you will be given an option to define a default storage for all
new files.
[reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
[restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
[eep]: https://about.gitlab.com/gitlab-ee/ "GitLab Enterprise Edition Premium"
[ee-3867]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/3867
......@@ -14,3 +14,4 @@ comments: false
- [Webhooks](web_hooks.md)
- [Import](import.md) of git repositories in bulk
- [Rebuild authorized_keys file](http://docs.gitlab.com/ce/raketasks/maintenance.html#rebuild-authorized_keys-file) task for administrators
- [Migrate Uploads](../administration/raketasks/uploads/migrate.md)
......@@ -5,6 +5,7 @@ Documentation on how to use Git LFS are under [Managing large binary files with
## Requirements
* Git LFS is supported in GitLab starting with version 8.2.
* Support for object storage, such as AWS S3, was introduced in 10.0.
* Users need to install [Git LFS client](https://git-lfs.github.com) version 1.0.1 and up.
## Configuration
......@@ -12,16 +13,18 @@ Documentation on how to use Git LFS are under [Managing large binary files with
Git LFS objects can be large in size. By default, they are stored on the server
GitLab is installed on.
There are two configuration options to help GitLab server administrators:
There are various configuration options to help GitLab server administrators:
* Enabling/disabling Git LFS support
* Changing the location of LFS object storage
* Setting up AWS S3 compatible object storage
### Omnibus packages
In `/etc/gitlab/gitlab.rb`:
```ruby
# Change to true to enable lfs
gitlab_rails['lfs_enabled'] = false
# Optionally, change the storage path location. Defaults to
......@@ -35,11 +38,114 @@ gitlab_rails['lfs_storage_path'] = "/mnt/storage/lfs-objects"
In `config/gitlab.yml`:
```yaml
# Change to true to enable lfs
lfs:
enabled: false
storage_path: /mnt/storage/lfs-objects
```
## Setting up S3 compatible object storage
> **Note:** [Introduced][ee-2760] in [GitLab Premium][eep] 10.0.
> Available in [GitLab CE][ce] 10.7
It is possible to store LFS objects on remote object storage instead of on a local disk.
This allows you to offload storage to an external AWS S3 compatible service, freeing up disk space locally. You can also host your own S3 compatible storage decoupled from GitLab, with with a service such as [Minio](https://www.minio.io/).
Object storage currently transfers files first to GitLab, and then on the object storage in a second stage. This can be done either by using a rake task to transfer existing objects, or in a background job after each file is received.
### Object Storage Settings
For source installations the following settings are nested under `lfs:` and then `object_store:`. On omnibus installs they are prefixed by `lfs_object_store_`.
| Setting | Description | Default |
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where LFS objects will be stored| |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
#### S3 compatible connection settings
The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
| Setting | Description | Default |
|---------|-------------|---------|
| `provider` | Always `AWS` for compatible hosts | AWS |
| `aws_access_key_id` | AWS credentials, or compatible | |
| `aws_secret_access_key` | AWS credentials, or compatible | |
| `region` | AWS region | us-east-1 |
| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
### From source
1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
lines:
```yaml
lfs:
enabled: true
object_store:
enabled: false
remote_directory: lfs-objects # Bucket name
connection:
provider: AWS
aws_access_key_id: 1ABCD2EFGHI34JKLM567N
aws_secret_access_key: abcdefhijklmnopQRSTUVwxyz0123456789ABCDE
region: eu-central-1
# Use the following options to configure an AWS compatible host such as Minio
host: 'localhost'
endpoint: 'http://127.0.0.1:9000'
path_style: true
```
1. Save the file and [restart GitLab][] for the changes to take effect.
1. Migrate any existing local LFS objects to the object storage:
```bash
sudo -u git -H bundle exec rake gitlab:lfs:migrate RAILS_ENV=production
```
This will migrate existing LFS objects to object storage. New LFS objects
will be forwarded to object storage unless
`gitlab_rails['lfs_object_store_background_upload']` is set to false.
### In Omnibus
1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
the values you want:
```ruby
gitlab_rails['lfs_object_store_enabled'] = true
gitlab_rails['lfs_object_store_remote_directory'] = "lfs-objects"
gitlab_rails['lfs_object_store_connection'] = {
'provider' => 'AWS',
'region' => 'eu-central-1',
'aws_access_key_id' => '1ABCD2EFGHI34JKLM567N',
'aws_secret_access_key' => 'abcdefhijklmnopQRSTUVwxyz0123456789ABCDE',
# The below options configure an S3 compatible host instead of AWS
'host' => 'localhost',
'endpoint' => 'http://127.0.0.1:9000',
'path_style' => true
}
```
1. Save the file and [reconfigure GitLab]s for the changes to take effect.
1. Migrate any existing local LFS objects to the object storage:
```bash
gitlab-rake gitlab:lfs:migrate
```
This will migrate existing LFS objects to object storage. New LFS objects
will be forwarded to object storage unless
`gitlab_rails['lfs_object_store_background_upload']` is set to false.
## Storage statistics
You can see the total storage used for LFS objects on groups and projects
......@@ -48,10 +154,13 @@ and [projects APIs](../../api/projects.md).
## Known limitations
* Currently, storing GitLab Git LFS objects on a non-local storage (like S3 buckets)
is not supported
* Support for removing unreferenced LFS objects was added in 8.14 onwards.
* LFS authentications via SSH was added with GitLab 8.12
* Only compatible with the GitLFS client versions 1.1.0 and up, or 1.0.2.
* The storage statistics currently count each LFS object multiple times for
every project linking to it
[reconfigure gitlab]: ../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
[restart gitlab]: ../../administration/restart_gitlab.md#installations-from-source "How to restart GitLab"
[eep]: https://about.gitlab.com/products/ "GitLab Premium"
[ee-2760]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/2760
......@@ -410,7 +410,7 @@ module API
)
end
def present_file!(path, filename, content_type = 'application/octet-stream')
def present_disk_file!(path, filename, content_type = 'application/octet-stream')
filename ||= File.basename(path)
header['Content-Disposition'] = "attachment; filename=#{filename}"
header['Content-Transfer-Encoding'] = 'binary'
......@@ -426,13 +426,17 @@ module API
end
end
def present_artifacts!(artifacts_file)
return not_found! unless artifacts_file.exists?
def present_carrierwave_file!(file, supports_direct_download: true)
return not_found! unless file.exists?
if artifacts_file.file_storage?
present_file!(artifacts_file.path, artifacts_file.filename)
if file.file_storage?
present_disk_file!(file.path, file.filename)
elsif supports_direct_download && file.class.direct_download_enabled?
redirect(file.url)
else
redirect_to(artifacts_file.url)
header(*Gitlab::Workhorse.send_url(file.url))
status :ok
body
end
end
......
......@@ -28,7 +28,7 @@ module API
builds = user_project.latest_successful_builds_for(params[:ref_name])
latest_build = builds.find_by!(name: params[:job])
present_artifacts!(latest_build.artifacts_file)
present_carrierwave_file!(latest_build.artifacts_file)
end
desc 'Download the artifacts archive from a job' do
......@@ -43,7 +43,7 @@ module API
build = find_build!(params[:job_id])
present_artifacts!(build.artifacts_file)
present_carrierwave_file!(build.artifacts_file)
end
desc 'Download a specific file from artifacts archive' do
......
......@@ -72,7 +72,7 @@ module API
present build, with: Entities::Job
end
# TODO: We should use `present_file!` and leave this implementation for backward compatibility (when build trace
# TODO: We should use `present_disk_file!` and leave this implementation for backward compatibility (when build trace
# is saved in the DB instead of file). But before that, we need to consider how to replace the value of
# `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse.
desc 'Get a trace of a specific job of a project'
......
......@@ -25,7 +25,7 @@ module API
render_api_error!('404 Not found or has expired', 404) unless path
present_file!(path, File.basename(path), 'application/gzip')
present_disk_file!(path, File.basename(path), 'application/gzip')
end
desc 'Start export' do
......
......@@ -244,11 +244,12 @@ module API
params do
requires :id, type: Integer, desc: %q(Job's ID)
optional :token, type: String, desc: %q(Job's authentication token)
optional :direct_download, default: false, type: Boolean, desc: %q(Perform direct download from remote storage instead of proxying artifacts)
end
get '/:id/artifacts' do
job = authenticate_job!
present_artifacts!(job.artifacts_file)
present_carrierwave_file!(job.artifacts_file, supports_direct_download: params[:direct_download])
end
end
end
......
......@@ -85,7 +85,7 @@ module API
build = get_build!(params[:build_id])
present_artifacts!(build.artifacts_file)
present_carrierwave_file!(build.artifacts_file)
end
desc 'Download the artifacts file from build' do
......@@ -102,10 +102,10 @@ module API
builds = user_project.latest_successful_builds_for(params[:ref_name])
latest_build = builds.find_by!(name: params[:job])
present_artifacts!(latest_build.artifacts_file)
present_carrierwave_file!(latest_build.artifacts_file)
end
# TODO: We should use `present_file!` and leave this implementation for backward compatibility (when build trace
# TODO: We should use `present_disk_file!` and leave this implementation for backward compatibility (when build trace
# is saved in the DB instead of file). But before that, we need to consider how to replace the value of
# `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse.
desc 'Get a trace of a specific build of a project'
......
##
# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
# source: https://gitlab.com/snippets/1685610
module Gitlab
module Ci
class Trace
class HttpIO
BUFFER_SIZE = 128.kilobytes
InvalidURLError = Class.new(StandardError)
FailedToGetChunkError = Class.new(StandardError)
attr_reader :uri, :size
attr_reader :tell
attr_reader :chunk, :chunk_range
alias_method :pos, :tell
def initialize(url, size)
raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url)
@uri = URI(url)
@size = size
@tell = 0
end
def close
# no-op
end
def binmode
# no-op
end
def binmode?
true
end
def path
nil
end
def url
@uri.to_s
end
def seek(pos, where = IO::SEEK_SET)
new_pos =
case where
when IO::SEEK_END
size + pos
when IO::SEEK_SET
pos
when IO::SEEK_CUR
tell + pos
else
-1
end
raise 'new position is outside of file' if new_pos < 0 || new_pos > size
@tell = new_pos
end
def eof?
tell == size
end
def each_line
until eof?
line = readline
break if line.nil?
yield(line)
end
end
def read(length = nil)
out = ""
until eof? || (length && out.length >= length)
data = get_chunk
break if data.empty?
out << data
@tell += data.bytesize
end
out = out[0, length] if length && out.length > length
out
end
def readline
out = ""
until eof?
data = get_chunk
new_line = data.index("\n")
if !new_line.nil?
out << data[0..new_line]
@tell += new_line + 1
break
else
out << data
@tell += data.bytesize
end
end
out
end
def write(data)
raise NotImplementedError
end
def truncate(offset)
raise NotImplementedError
end
def flush
raise NotImplementedError
end
def present?
true
end
private
##
# The below methods are not implemented in IO class
#
def in_range?
@chunk_range&.include?(tell)
end
def get_chunk
unless in_range?
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http|
http.request(request)
end
raise FailedToGetChunkError unless response.code == '200' || response.code == '206'
@chunk = response.body.force_encoding(Encoding::BINARY)
@chunk_range = response.content_range
##
# Note: If provider does not return content_range, then we set it as we requested
# Provider: minio
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: AWS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: GCS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPOK 200
@chunk_range ||= (chunk_start...(chunk_start + @chunk.length))
end
@chunk[chunk_offset..BUFFER_SIZE]
end
def request
Net::HTTP::Get.new(uri).tap do |request|
request.set_range(chunk_start, BUFFER_SIZE)
end
end
def chunk_offset
tell % BUFFER_SIZE
end
def chunk_start
(tell / BUFFER_SIZE) * BUFFER_SIZE
end
def chunk_end
[chunk_start + BUFFER_SIZE, size].min
end
end
end
end
end
......@@ -8,7 +8,7 @@ module Gitlab
attr_reader :stream
delegate :close, :tell, :seek, :size, :path, :truncate, to: :stream, allow_nil: true
delegate :close, :tell, :seek, :size, :path, :url, :truncate, to: :stream, allow_nil: true
delegate :valid?, to: :stream, as: :present?, allow_nil: true
......
......@@ -12,7 +12,7 @@ module Gitlab
private
def relation
LfsObject.all
LfsObject.with_files_stored_locally
end
def expected_checksum(lfs_object)
......
......@@ -12,7 +12,7 @@ module Gitlab
private
def relation
Upload.all
Upload.with_files_stored_locally
end
def expected_checksum(upload)
......
require 'logger'
require 'resolv-replace'
desc "GitLab | Migrate files for artifacts to comply with new storage format"
namespace :gitlab do
namespace :artifacts do
task migrate: :environment do
logger = Logger.new(STDOUT)
logger.info('Starting transfer of artifacts')
Ci::Build.joins(:project)
.with_artifacts_stored_locally
.find_each(batch_size: 10) do |build|
begin
build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
rescue => e
logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
end
end
end
end
end
require 'logger'
desc "GitLab | Migrate LFS objects to remote storage"
namespace :gitlab do
namespace :lfs do
task migrate: :environment do
logger = Logger.new(STDOUT)
logger.info('Starting transfer of LFS files to object storage')
LfsObject.with_files_stored_locally
.find_each(batch_size: 10) do |lfs_object|
begin
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
rescue => e
logger.error("Failed to transfer LFS object #{lfs_object.oid} with error: #{e.message}")
end
end
end
end
end
namespace :gitlab do
namespace :uploads do
desc 'GitLab | Uploads | Migrate the uploaded files to object storage'
task :migrate, [:uploader_class, :model_class, :mounted_as] => :environment do |task, args|
batch_size = ENV.fetch('BATCH', 200).to_i
@to_store = ObjectStorage::Store::REMOTE
@mounted_as = args.mounted_as&.gsub(':', '')&.to_sym
@uploader_class = args.uploader_class.constantize
@model_class = args.model_class.constantize
uploads.each_batch(of: batch_size, &method(:enqueue_batch)) # rubocop: disable Cop/InBatches
end
def enqueue_batch(batch, index)
job = ObjectStorage::MigrateUploadsWorker.enqueue!(batch,
@mounted_as,
@to_store)
puts "Enqueued job ##{index}: #{job}"
rescue ObjectStorage::MigrateUploadsWorker::SanityCheckError => e
# continue for the next batch
puts "Could not enqueue batch (#{batch.ids}) #{e.message}".color(:red)
end
def uploads
Upload.class_eval { include EachBatch } unless Upload < EachBatch
Upload
.where.not(store: @to_store)
.where(uploader: @uploader_class.to_s,
model_type: @model_class.base_class.sti_name)
end
end
end
require 'spec_helper'
describe SendFileUpload do
let(:uploader_class) do
Class.new(GitlabUploader) do
include ObjectStorage::Concern
storage_options Gitlab.config.uploads
private
# user/:id
def dynamic_segment
File.join(model.class.to_s.underscore, model.id.to_s)
end
end
end
let(:controller_class) do
Class.new do
include SendFileUpload
end
end
let(:object) { build_stubbed(:user) }
let(:uploader) { uploader_class.new(object, :file) }
describe '#send_upload' do
let(:controller) { controller_class.new }
let(:temp_file) { Tempfile.new('test') }
subject { controller.send_upload(uploader) }
before do
FileUtils.touch(temp_file)
end
after do
FileUtils.rm_f(temp_file)
end
context 'when local file is used' do
before do
uploader.store!(temp_file)
end
it 'sends a file' do
expect(controller).to receive(:send_file).with(uploader.path, anything)
subject
end
end
context 'when remote file is used' do
before do
stub_uploads_object_storage(uploader: uploader_class)
uploader.object_store = ObjectStorage::Store::REMOTE
uploader.store!(temp_file)
end
context 'and proxying is enabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { true }
end
it 'sends a file' do
headers = double
expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-url:/)
expect(controller).to receive(:headers) { headers }
expect(controller).to receive(:head).with(:ok)
subject
end
end
context 'and proxying is disabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { false }
end
it 'sends a file' do
expect(controller).to receive(:redirect_to).with(/#{uploader.path}/)
subject
end
end
end
end
end
......@@ -145,9 +145,23 @@ describe Projects::ArtifactsController do
context 'when using local file storage' do
it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
let(:store) { ObjectStorage::Store::LOCAL }
let(:archive_path) { JobArtifactUploader.root }
end
end
context 'when using remote file storage' do
before do
stub_artifacts_object_storage
end
it_behaves_like 'a valid file' do
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
let(:store) { ObjectStorage::Store::REMOTE }
let(:archive_path) { 'https://' }
end
end
end
end
......
# coding: utf-8
require 'spec_helper'
describe Projects::JobsController do
include ApiHelpers
include HttpIOHelpers
let(:project) { create(:project, :public) }
let(:pipeline) { create(:ci_pipeline, project: project) }
......@@ -203,6 +205,41 @@ describe Projects::JobsController do
end
end
context 'when trace artifact is in ObjectStorage' do
let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
before do
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
end
context 'when there are no network issues' do
before do
stub_remote_trace_206
get_trace
end
it 'returns a trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq job.id
expect(json_response['status']).to eq job.status
expect(json_response['html']).to eq(job.trace.html)
end
end
context 'when there is a network issue' do
before do
stub_remote_trace_500
end
it 'returns a trace' do
expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
end
def get_trace
get :trace, namespace_id: project.namespace,
project_id: project,
......@@ -446,14 +483,18 @@ describe Projects::JobsController do
end
describe 'GET raw' do
before do
get_raw
subject do
post :raw, namespace_id: project.namespace,
project_id: project,
id: job.id
end
context 'when job has a trace artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'returns a trace' do
response = subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq job.job_artifacts_trace.open.read
......@@ -464,6 +505,8 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'send a trace file' do
response = subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq 'BUILD TRACE'
......@@ -474,14 +517,22 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, pipeline: pipeline) }
it 'returns not_found' do
response = subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
def get_raw
post :raw, namespace_id: project.namespace,
project_id: project,
id: job.id
context 'when the trace artifact is in ObjectStorage' do
let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
before do
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
end
it 'redirect to the trace file url' do
expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
end
end
end
end
......@@ -8,10 +8,7 @@ describe Projects::RawController do
let(:id) { 'master/README.md' }
it 'delivers ASCII file' do
get(:show,
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
......@@ -25,10 +22,7 @@ describe Projects::RawController do
let(:id) { 'master/files/images/6049019_460s.jpg' }
it 'sets image content type header' do
get(:show,
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('image/jpeg')
......@@ -54,21 +48,40 @@ describe Projects::RawController do
it 'serves the file' do
expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
get(:show,
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
end
context 'and lfs uses object storage' do
before do
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
stub_lfs_object_storage
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect to file' do
get_show(public_project, id)
expect(response).to have_gitlab_http_status(302)
expect(response.location).to include(lfs_object.reload.file.path)
end
it 'sets content disposition' do
get_show(public_project, id)
file_uri = URI.parse(response.location)
params = CGI.parse(file_uri.query)
expect(params["response-content-disposition"].first).to eq 'attachment;filename="lfs_object.iso"'
end
end
end
context 'when project does not have access' do
it 'does not serve the file' do
get(:show,
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
get_show(public_project, id)
expect(response).to have_gitlab_http_status(404)
end
......@@ -81,10 +94,7 @@ describe Projects::RawController do
end
it 'delivers ASCII file' do
get(:show,
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
......@@ -95,4 +105,10 @@ describe Projects::RawController do
end
end
end
def get_show(project, id)
get(:show, namespace_id: project.namespace.to_param,
project_id: project,
id: id)
end
end
......@@ -2,8 +2,21 @@
FactoryBot.define do
factory :appearance do
title "MepMep"
description "This is my Community Edition instance"
title "GitLab Community Edition"
description "Open source software to collaborate on code"
new_project_guidelines "Custom project guidelines"
end
trait :with_logo do
logo { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :with_header_logo do
header_logo { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :with_logos do
with_logo
with_header_logo
end
end
......@@ -5,6 +5,10 @@ FactoryBot.define do
job factory: :ci_build
file_type :archive
trait :remote_store do
file_store JobArtifactUploader::Store::REMOTE
end
after :build do |artifact|
artifact.project ||= artifact.job.project
end
......
......@@ -15,4 +15,8 @@ FactoryBot.define do
trait :correct_oid do
oid 'b804383982bb89b00e828e3f44c038cc991d3d1768009fc39ba8e2c081b9fb75'
end
trait :object_storage do
file_store { LfsObjectUploader::Store::REMOTE }
end
end
......@@ -5,6 +5,7 @@ FactoryBot.define do
uploader "AvatarUploader"
mount_point :avatar
secret nil
store ObjectStorage::Store::LOCAL
# we should build a mount agnostic upload by default
transient do
......@@ -27,6 +28,10 @@ FactoryBot.define do
secret SecureRandom.hex
end
trait :object_storage do
store ObjectStorage::Store::REMOTE
end
trait :namespace_upload do
model { build(:group) }
path { File.join(secret, filename) }
......
require 'spec_helper'
describe 'Fog::Storage::GoogleXML::File' do
let(:storage) do
Fog.mock!
Fog::Storage.new({
google_storage_access_key_id: "asdf",
google_storage_secret_access_key: "asdf",
provider: "Google"
})
end
let(:file) do
directory = storage.directories.create(key: 'data')
directory.files.create(
body: 'Hello World!',
key: 'hello_world.txt'
)
end
it 'delegates to #get_https_url' do
expect(file.url(Time.now)).to start_with("https://")
end
end
require 'spec_helper'
describe Gitlab::Ci::Trace::HttpIO do
include HttpIOHelpers
let(:http_io) { described_class.new(url, size) }
let(:url) { remote_trace_url }
let(:size) { remote_trace_size }
describe '#close' do
subject { http_io.close }
it { is_expected.to be_nil }
end
describe '#binmode' do
subject { http_io.binmode }
it { is_expected.to be_nil }
end
describe '#binmode?' do
subject { http_io.binmode? }
it { is_expected.to be_truthy }
end
describe '#path' do
subject { http_io.path }
it { is_expected.to be_nil }
end
describe '#url' do
subject { http_io.url }
it { is_expected.to eq(url) }
end
describe '#seek' do
subject { http_io.seek(pos, where) }
context 'when moves pos to end of the file' do
let(:pos) { 0 }
let(:where) { IO::SEEK_END }
it { is_expected.to eq(size) }
end
context 'when moves pos to middle of the file' do
let(:pos) { size / 2 }
let(:where) { IO::SEEK_SET }
it { is_expected.to eq(size / 2) }
end
context 'when moves pos around' do
it 'matches the result' do
expect(http_io.seek(0)).to eq(0)
expect(http_io.seek(100, IO::SEEK_CUR)).to eq(100)
expect { http_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file')
end
end
end
describe '#eof?' do
subject { http_io.eof? }
context 'when current pos is at end of the file' do
before do
http_io.seek(size, IO::SEEK_SET)
end
it { is_expected.to be_truthy }
end
context 'when current pos is not at end of the file' do
before do
http_io.seek(0, IO::SEEK_SET)
end
it { is_expected.to be_falsey }
end
end
describe '#each_line' do
subject { http_io.each_line }
let(:string_io) { StringIO.new(remote_trace_body) }
before do
stub_remote_trace_206
end
it 'yields lines' do
expect { |b| http_io.each_line(&b) }.to yield_successive_args(*string_io.each_line.to_a)
end
context 'when buckets on GCS' do
context 'when BUFFER_SIZE is larger than file size' do
before do
stub_remote_trace_200
set_larger_buffer_size_than(size)
end
it 'calls get_chunk only once' do
expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original
http_io.each_line { |line| }
end
end
end
end
describe '#read' do
subject { http_io.read(length) }
context 'when there are no network issue' do
before do
stub_remote_trace_206
end
context 'when read whole size' do
let(:length) { nil }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
end
context 'when read only first 100 bytes' do
let(:length) { 100 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body[0, length])
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body[0, length])
end
end
end
context 'when tries to read oversize' do
let(:length) { size + 1000 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
end
context 'when tries to read 0 bytes' do
let(:length) { 0 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to be_empty
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to be_empty
end
end
end
end
context 'when there is anetwork issue' do
let(:length) { nil }
before do
stub_remote_trace_500
end
it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
end
describe '#readline' do
subject { http_io.readline }
let(:string_io) { StringIO.new(remote_trace_body) }
before do
stub_remote_trace_206
end
shared_examples 'all line matching' do
it 'reads a line' do
(0...remote_trace_body.lines.count).each do
expect(http_io.readline).to eq(string_io.readline)
end
end
end
context 'when there is anetwork issue' do
let(:length) { nil }
before do
stub_remote_trace_500
end
it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it_behaves_like 'all line matching'
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it_behaves_like 'all line matching'
end
context 'when pos is at middle of the file' do
before do
set_smaller_buffer_size_than(size)
http_io.seek(size / 2)
string_io.seek(size / 2)
end
it 'reads from pos' do
expect(http_io.readline).to eq(string_io.readline)
end
end
end
describe '#write' do
subject { http_io.write(nil) }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#truncate' do
subject { http_io.truncate(nil) }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#flush' do
subject { http_io.flush }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#present?' do
subject { http_io.present? }
it { is_expected.to be_truthy }
end
end
......@@ -265,7 +265,9 @@ CommitStatus:
- target_url
- description
- artifacts_file
- artifacts_file_store
- artifacts_metadata
- artifacts_metadata_store
- erased_by_id
- erased_at
- artifacts_expire_at
......
......@@ -31,5 +31,21 @@ describe Gitlab::Verify::LfsObjects do
expect(failures.keys).to contain_exactly(lfs_object)
expect(failure.to_s).to include('Checksum mismatch')
end
context 'with remote files' do
before do
stub_lfs_object_storage
end
it 'skips LFS objects in object storage' do
local_failure = create(:lfs_object)
create(:lfs_object, :object_storage)
failures = {}
described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) }
expect(failures.keys).to contain_exactly(local_failure)
end
end
end
end
......@@ -40,5 +40,21 @@ describe Gitlab::Verify::Uploads do
expect(failures.keys).to contain_exactly(upload)
expect(failure.to_s).to include('Checksum missing')
end
context 'with remote files' do
before do
stub_uploads_object_storage(AvatarUploader)
end
it 'skips uploads in object storage' do
local_failure = create(:upload)
create(:upload, :object_storage)
failures = {}
described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) }
expect(failures.keys).to contain_exactly(local_failure)
end
end
end
end
......@@ -19,6 +19,10 @@ describe RemoveEmptyForkNetworks, :migration do
deleted_project.destroy!
end
after do
Upload.reset_column_information
end
it 'deletes only the fork network without members' do
expect(fork_networks.count).to eq(2)
......
......@@ -198,6 +198,16 @@ describe Ci::Build do
end
context 'when legacy artifacts are used' do
let(:build) { create(:ci_build, :legacy_artifacts) }
subject { build.artifacts? }
context 'is expired' do
let(:build) { create(:ci_build, :legacy_artifacts, :expired) }
it { is_expected.to be_falsy }
end
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
......@@ -208,13 +218,25 @@ describe Ci::Build do
let(:build) { create(:ci_build, :legacy_artifacts) }
it { is_expected.to be_truthy }
end
end
end
context 'is expired' do
let(:build) { create(:ci_build, :legacy_artifacts, :expired) }
describe '#browsable_artifacts?' do
subject { build.browsable_artifacts? }
it { is_expected.to be_falsy }
end
context 'artifacts metadata does not exist' do
before do
build.update_attributes(legacy_artifacts_metadata: nil)
end
it { is_expected.to be_falsy }
end
context 'artifacts metadata does exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy }
end
end
......
......@@ -15,6 +15,50 @@ describe Ci::JobArtifact do
it { is_expected.to delegate_method(:open).to(:file) }
it { is_expected.to delegate_method(:exists?).to(:file) }
describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) }
describe '#schedule_background_upload' do
context 'when object storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
end
it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
subject
end
end
context 'when object storage is enabled' do
context 'when background upload is enabled' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
subject
end
end
context 'when background upload is disabled' do
before do
stub_artifacts_object_storage(background_upload: false)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
end
end
end
describe '#set_size' do
it 'sets the size' do
expect(artifact.size).to eq(106365)
......
require 'spec_helper'
describe LfsObject do
describe '#local_store?' do
it 'returns true when file_store is nil' do
subject.file_store = nil
expect(subject.local_store?).to eq true
end
it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
subject.file_store = LfsObjectUploader::Store::LOCAL
expect(subject.local_store?).to eq true
end
it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
subject.file_store = LfsObjectUploader::Store::REMOTE
expect(subject.local_store?).to eq false
end
end
describe '#schedule_background_upload' do
before do
stub_lfs_setting(enabled: true)
end
subject { create(:lfs_object, :with_file) }
context 'when object storage is disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
context 'when object storage is enabled' do
context 'when background upload is enabled' do
context 'when is licensed' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker)
.to receive(:perform_async)
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
.once
subject
end
it 'schedules the model for migration once' do
expect(ObjectStorage::BackgroundMoveWorker)
.to receive(:perform_async)
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
.once
lfs_object = create(:lfs_object)
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
end
end
end
context 'when background upload is disabled' do
before do
stub_lfs_object_storage(background_upload: false)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
end
end
end
require 'spec_helper'
describe API::Jobs do
include HttpIOHelpers
set(:project) do
create(:project, :repository, public_builds: false)
end
......@@ -112,6 +114,7 @@ describe API::Jobs do
let(:query) { Hash.new }
before do
job
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end
......@@ -335,10 +338,55 @@ describe API::Jobs do
end
end
context 'when artifacts are stored remotely' do
let(:proxy_download) { false }
before do
stub_artifacts_object_storage(proxy_download: proxy_download)
end
let(:job) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'when proxy download is enabled' do
let(:proxy_download) { true }
it 'responds with the workhorse send-url' do
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
end
end
context 'when proxy download is disabled' do
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302)
end
end
context 'authorized user' do
it 'returns the file remote URL' do
expect(response).to redirect_to(artifact.file.url)
end
end
context 'unauthorized user' do
let(:api_user) { nil }
it 'does not return specific job artifacts' do
expect(response).to have_gitlab_http_status(404)
end
end
end
it 'does not return job artifacts if not uploaded' do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(404)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
......@@ -349,6 +397,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
stub_artifacts_object_storage
job.success
end
......@@ -412,9 +461,24 @@ describe API::Jobs do
"attachment; filename=#{job.artifacts_file.filename}" }
end
it { expect(response).to have_gitlab_http_status(200) }
it { expect(response).to have_http_status(:ok) }
it { expect(response.headers).to include(download_headers) }
end
context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_http_status(:found)
end
end
end
context 'with regular branch' do
......@@ -451,6 +515,22 @@ describe API::Jobs do
end
context 'authorized user' do
context 'when trace is in ObjectStorage' do
let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
before do
stub_remote_trace_206
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
end
it 'returns specific job trace' do
expect(response).to have_gitlab_http_status(200)
expect(response.body).to eq(job.trace.raw)
end
end
context 'when trace is artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
......
......@@ -200,7 +200,7 @@ describe API::Runner do
let(:project) { create(:project, shared_runners_enabled: false) }
let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
let(:runner) { create(:ci_runner) }
let!(:job) do
let(:job) do
create(:ci_build, :artifacts, :extended_options,
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate")
end
......@@ -215,6 +215,7 @@ describe API::Runner do
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
before do
job
stub_container_registry_config(enabled: false)
end
......@@ -888,6 +889,7 @@ describe API::Runner do
let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') }
before do
stub_artifacts_object_storage
job.run!
end
......@@ -1179,27 +1181,67 @@ describe API::Runner do
describe 'GET /api/v4/jobs/:id/artifacts' do
let(:token) { job.token }
before do
download_artifact
end
context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts) }
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
let(:job) { create(:ci_build) }
let(:store) { JobArtifactUploader::Store::LOCAL }
before do
create(:ci_job_artifact, :archive, file_store: store, job: job)
end
context 'when using job token' do
it 'download artifacts' do
expect(response).to have_gitlab_http_status(200)
expect(response.headers).to include download_headers
context 'when artifacts are stored locally' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end
before do
download_artifact
end
it 'download artifacts' do
expect(response).to have_http_status(200)
expect(response.headers).to include download_headers
end
end
context 'when artifacts are stored remotely' do
let(:store) { JobArtifactUploader::Store::REMOTE }
let!(:job) { create(:ci_build) }
context 'when proxy download is being used' do
before do
download_artifact(direct_download: false)
end
it 'uses workhorse send-url' do
expect(response).to have_gitlab_http_status(200)
expect(response.headers).to include(
'Gitlab-Workhorse-Send-Data' => /send-url:/)
end
end
context 'when direct download is being used' do
before do
download_artifact(direct_download: true)
end
it 'receive redirect for downloading artifacts' do
expect(response).to have_gitlab_http_status(302)
expect(response.headers).to include('Location')
end
end
end
end
context 'when using runnners token' do
let(:token) { job.project.runners_token }
before do
download_artifact
end
it 'responds with forbidden' do
expect(response).to have_gitlab_http_status(403)
end
......@@ -1208,12 +1250,16 @@ describe API::Runner do
context 'when job does not has artifacts' do
it 'responds with not found' do
download_artifact
expect(response).to have_gitlab_http_status(404)
end
end
def download_artifact(params = {}, request_headers = headers)
params = params.merge(token: token)
job.reload
get api("/jobs/#{job.id}/artifacts"), params, request_headers
end
end
......
......@@ -216,6 +216,7 @@ describe API::V3::Builds do
describe 'GET /projects/:id/builds/:build_id/artifacts' do
before do
stub_artifacts_object_storage
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
......@@ -230,13 +231,24 @@ describe API::V3::Builds do
end
it 'returns specific job artifacts' do
expect(response).to have_gitlab_http_status(200)
expect(response).to have_http_status(200)
expect(response.headers).to include(download_headers)
expect(response.body).to match_file(build.artifacts_file.file.file)
end
end
end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
it 'returns location redirect' do
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(302)
end
end
context 'unauthorized user' do
let(:api_user) { nil }
......@@ -256,6 +268,7 @@ describe API::V3::Builds do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
stub_artifacts_object_storage
build.success
end
......@@ -318,9 +331,24 @@ describe API::V3::Builds do
"attachment; filename=#{build.artifacts_file.filename}" }
end
it { expect(response).to have_gitlab_http_status(200) }
it { expect(response).to have_http_status(200) }
it { expect(response.headers).to include(download_headers) }
end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
before do
build.reload
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_http_status(302)
end
end
end
context 'with regular branch' do
......
......@@ -191,10 +191,12 @@ describe 'Git LFS API and storage' do
describe 'when fetching lfs object' do
let(:project) { create(:project) }
let(:update_permissions) { }
let(:before_get) { }
before do
enable_lfs
update_permissions
before_get
get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
end
......@@ -239,6 +241,21 @@ describe 'Git LFS API and storage' do
end
it_behaves_like 'responds with a file'
context 'when LFS uses object storage' do
let(:before_get) do
stub_lfs_object_storage
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect' do
expect(response).to have_gitlab_http_status(302)
end
it 'responds with the file location' do
expect(response.location).to include(lfs_object.reload.file.path)
end
end
end
end
......@@ -978,6 +995,32 @@ describe 'Git LFS API and storage' do
end
end
context 'and workhorse requests upload finalize for a new lfs object' do
before do
lfs_object.destroy
end
context 'with object storage disabled' do
it "doesn't attempt to migrate file to object storage" do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
put_finalize(with_tempfile: true)
end
end
context 'with object storage enabled' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'schedules migration of file to object storage' do
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
put_finalize(with_tempfile: true)
end
end
end
context 'invalid tempfiles' do
it 'rejects slashes in the tempfile name (path traversal' do
put_finalize('foo/bar')
......@@ -1177,7 +1220,9 @@ describe 'Git LFS API and storage' do
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers
end
def put_finalize(lfs_tmp = lfs_tmp_file)
def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false)
setup_tempfile(lfs_tmp) if with_tempfile
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil,
headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact
end
......@@ -1185,6 +1230,13 @@ describe 'Git LFS API and storage' do
def lfs_tmp_file
"#{sample_oid}012345678"
end
def setup_tempfile(lfs_tmp)
upload_path = LfsObjectUploader.workhorse_upload_path
FileUtils.mkdir_p(upload_path)
FileUtils.touch(File.join(upload_path, lfs_tmp))
end
end
def enable_lfs
......
......@@ -117,6 +117,7 @@ describe PipelineSerializer do
shared_examples 'no N+1 queries' do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
expect(recorded.count).to be_within(1).of(36)
expect(recorded.cached_count).to eq(0)
end
......
......@@ -28,7 +28,8 @@ describe Ci::RetryBuildService do
%i[type lock_version target_url base_tags trace_sections
commit_id deployments erased_by_id last_deployment project_id
runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried failure_reason].freeze
user_id auto_canceled_by_id retried failure_reason
artifacts_file_store artifacts_metadata_store].freeze
shared_examples 'build duplication' do
let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
......
......@@ -6,7 +6,7 @@ describe Issues::MoveService do
let(:title) { 'Some issue' }
let(:description) { 'Some issue description' }
let(:old_project) { create(:project) }
let(:new_project) { create(:project, group: create(:group)) }
let(:new_project) { create(:project) }
let(:milestone1) { create(:milestone, project_id: old_project.id, title: 'v9.0') }
let(:old_issue) do
......
module HttpIOHelpers
def stub_remote_trace_206
WebMock.stub_request(:get, remote_trace_url)
.to_return { |request| remote_trace_response(request, 206) }
end
def stub_remote_trace_200
WebMock.stub_request(:get, remote_trace_url)
.to_return { |request| remote_trace_response(request, 200) }
end
def stub_remote_trace_500
WebMock.stub_request(:get, remote_trace_url)
.to_return(status: [500, "Internal Server Error"])
end
def remote_trace_url
"http://trace.com/trace"
end
def remote_trace_response(request, responce_status)
range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/)
{
status: responce_status,
headers: remote_trace_response_headers(responce_status, range[1].to_i, range[2].to_i),
body: range_trace_body(range[1].to_i, range[2].to_i)
}
end
def remote_trace_response_headers(responce_status, from, to)
headers = { 'Content-Type' => 'text/plain' }
if responce_status == 206
headers.merge('Content-Range' => "bytes #{from}-#{to}/#{remote_trace_size}")
end
headers
end
def range_trace_body(from, to)
remote_trace_body[from..to]
end
def remote_trace_body
@remote_trace_body ||= File.read(expand_fixture_path('trace/sample_trace'))
end
def remote_trace_size
remote_trace_body.length
end
def set_smaller_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks / 2) * 128
stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
end
def set_larger_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks * 2) * 128
stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
end
end
shared_context 'with storage' do |store, **stub_params|
before do
subject.object_store = store
end
end
shared_examples "migrates" do |to_store:, from_store: nil|
let(:to) { to_store }
let(:from) { from_store || subject.object_store }
def migrate(to)
subject.migrate!(to)
end
def checksum
Digest::SHA256.hexdigest(subject.read)
end
before do
migrate(from)
end
it 'returns corresponding file type' do
expect(subject).to be_an(CarrierWave::Uploader::Base)
expect(subject).to be_a(ObjectStorage::Concern)
if from == described_class::Store::REMOTE
expect(subject.file).to be_a(CarrierWave::Storage::Fog::File)
elsif from == described_class::Store::LOCAL
expect(subject.file).to be_a(CarrierWave::SanitizedFile)
else
raise 'Unexpected file type'
end
end
it 'does nothing when migrating to the current store' do
expect { migrate(from) }.not_to change { subject.object_store }.from(from)
end
it 'migrate to the specified store' do
from_checksum = checksum
expect { migrate(to) }.to change { subject.object_store }.from(from).to(to)
expect(checksum).to eq(from_checksum)
end
it 'removes the original file after the migration' do
original_file = subject.file.path
migrate(to)
expect(File.exist?(original_file)).to be_falsey
end
it 'can access to the original file during migration' do
file = subject.file
allow(subject).to receive(:delete_migrated_file) { } # Remove as a callback of :migrate
allow(subject).to receive(:record_upload) { } # Remove as a callback of :store (:record_upload)
expect(file.exists?).to be_truthy
expect { migrate(to) }.not_to change { file.exists? }
end
context 'when migrate! is not oqqupied by another process' do
it 'executes migrate!' do
expect(subject).to receive(:object_store=).at_least(1)
migrate(to)
end
end
context 'when migrate! is occupied by another process' do
let(:exclusive_lease_key) { "object_storage_migrate:#{subject.model.class}:#{subject.model.id}" }
before do
@uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain
end
it 'does not execute migrate!' do
expect(subject).not_to receive(:unsafe_migrate!)
expect { migrate(to) }.to raise_error('Already running')
end
after do
Gitlab::ExclusiveLease.cancel(exclusive_lease_key, @uuid)
end
end
context 'migration is unsuccessful' do
shared_examples "handles gracefully" do |error:|
it 'does not update the object_store' do
expect { migrate(to) }.to raise_error(error)
expect(subject.object_store).to eq(from)
end
it 'does not delete the original file' do
expect { migrate(to) }.to raise_error(error)
expect(subject.exists?).to be_truthy
end
end
context 'when the store is not supported' do
let(:to) { -1 } # not a valid store
include_examples "handles gracefully", error: ObjectStorage::UnknownStoreError
end
context 'upon a fog failure' do
before do
storage_class = subject.send(:storage_for, to).class
expect_any_instance_of(storage_class).to receive(:store!).and_raise("Store failure.")
end
include_examples "handles gracefully", error: "Store failure."
end
context 'upon a database failure' do
before do
expect(uploader).to receive(:persist_object_store!).and_raise("ActiveRecord failure.")
end
include_examples "handles gracefully", error: "ActiveRecord failure."
end
end
end
module StubConfiguration
def stub_object_storage_uploader(
config:, uploader:, remote_directory:,
enabled: true,
proxy_download: false,
background_upload: false)
Fog.mock!
allow(config).to receive(:enabled) { enabled }
allow(config).to receive(:proxy_download) { proxy_download }
allow(config).to receive(:background_upload) { background_upload }
return unless enabled
::Fog::Storage.new(uploader.object_store_credentials).tap do |connection|
begin
connection.directories.create(key: remote_directory)
rescue Excon::Error::Conflict
end
end
end
def stub_artifacts_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
uploader: JobArtifactUploader,
remote_directory: 'artifacts',
**params)
end
def stub_lfs_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.lfs.object_store,
uploader: LfsObjectUploader,
remote_directory: 'lfs-objects',
**params)
end
def stub_uploads_object_storage(uploader = described_class, **params)
stub_object_storage_uploader(config: Gitlab.config.uploads.object_store,
uploader: uploader,
remote_directory: 'uploads',
**params)
end
end
require 'rake_helper'
describe 'gitlab:artifacts namespace rake task' do
before(:context) do
Rake.application.rake_require 'tasks/gitlab/artifacts/migrate'
end
let(:object_storage_enabled) { false }
before do
stub_artifacts_object_storage(enabled: object_storage_enabled)
end
subject { run_rake_task('gitlab:artifacts:migrate') }
context 'legacy artifacts' do
describe 'migrate' do
let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
context 'when local storage is used' do
let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
let(:store) { nil }
it "migrates file to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
context 'and remote storage is defined' do
let(:object_storage_enabled) { true }
it "migrates file to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
context 'and remote storage is not defined' do
it "fails to migrate to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::LOCAL)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::LOCAL)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
end
end
context 'job artifacts' do
let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
context 'when local storage is used' do
let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
let(:store) { nil }
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
context 'and remote storage is defined' do
let(:object_storage_enabled) { true }
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
context 'and remote storage is not defined' do
it "fails to migrate to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
end
end
require 'rake_helper'
describe 'gitlab:lfs namespace rake task' do
before :all do
Rake.application.rake_require 'tasks/gitlab/lfs/migrate'
end
describe 'migrate' do
let(:local) { ObjectStorage::Store::LOCAL }
let(:remote) { ObjectStorage::Store::REMOTE }
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
def lfs_migrate
run_rake_task('gitlab:lfs:migrate')
end
context 'object storage disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it "doesn't migrate files" do
expect { lfs_migrate }.not_to change { lfs_object.reload.file_store }
end
end
context 'object storage enabled' do
before do
stub_lfs_object_storage
end
it 'migrates local file to object storage' do
expect { lfs_migrate }.to change { lfs_object.reload.file_store }.from(local).to(remote)
end
end
end
end
require 'rake_helper'
describe 'gitlab:uploads:migrate rake tasks' do
let!(:projects) { create_list(:project, 10, :with_avatar) }
let(:model_class) { Project }
let(:uploader_class) { AvatarUploader }
let(:mounted_as) { :avatar }
let(:batch_size) { 3 }
before do
stub_env('BATCH', batch_size.to_s)
stub_uploads_object_storage(uploader_class)
Rake.application.rake_require 'tasks/gitlab/uploads/migrate'
allow(ObjectStorage::MigrateUploadsWorker).to receive(:perform_async)
end
def run
args = [uploader_class.to_s, model_class.to_s, mounted_as].compact
run_rake_task("gitlab:uploads:migrate", *args)
end
it 'enqueue jobs in batch' do
expect(ObjectStorage::MigrateUploadsWorker).to receive(:enqueue!).exactly(4).times
run
end
end
......@@ -11,4 +11,26 @@ describe AttachmentUploader do
store_dir: %r[uploads/-/system/note/attachment/],
upload_path: %r[uploads/-/system/note/attachment/],
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/]
context "object_store is REMOTE" do
before do
stub_uploads_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[note/attachment/],
upload_path: %r[note/attachment/]
end
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
stub_uploads_object_storage
end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
require 'spec_helper'
describe AvatarUploader do
let(:model) { create(:user, :with_avatar) }
let(:model) { build_stubbed(:user) }
let(:uploader) { described_class.new(model, :avatar) }
let(:upload) { create(:upload, model: model) }
......@@ -12,15 +12,28 @@ describe AvatarUploader do
upload_path: %r[uploads/-/system/user/avatar/],
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/]
describe '#move_to_cache' do
it 'is false' do
expect(uploader.move_to_cache).to eq(false)
context "object_store is REMOTE" do
before do
stub_uploads_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[user/avatar/],
upload_path: %r[user/avatar/]
end
describe '#move_to_store' do
it 'is false' do
expect(uploader.move_to_store).to eq(false)
context "with a file" do
let(:project) { create(:project, :with_avatar) }
let(:uploader) { project.avatar }
let(:upload) { uploader.upload }
before do
stub_uploads_object_storage
end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
......@@ -36,6 +36,12 @@ describe FileMover do
it 'creates a new update record' do
expect { subject }.to change { Upload.count }.by(1)
end
it 'schedules a background migration' do
expect_any_instance_of(PersonalFileUploader).to receive(:schedule_background_upload).once
subject
end
end
context 'when update_markdown fails' do
......
......@@ -11,32 +11,41 @@ describe FileUploader do
shared_examples 'builds correct legacy storage paths' do
include_examples 'builds correct paths',
store_dir: %r{awesome/project/\h+},
upload_path: %r{\h+/<filename>},
absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg}
end
shared_examples 'uses hashed storage' do
context 'when rolled out attachments' do
let(:project) { build_stubbed(:project, namespace: group, name: 'project') }
context 'legacy storage' do
it_behaves_like 'builds correct legacy storage paths'
before do
allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed')
end
context 'uses hashed storage' do
context 'when rolled out attachments' do
let(:project) { build_stubbed(:project, namespace: group, name: 'project') }
it_behaves_like 'builds correct paths',
store_dir: %r{ca/fe/fe/ed/\h+},
absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg}
end
include_examples 'builds correct paths',
store_dir: %r{@hashed/\h{2}/\h{2}/\h+},
upload_path: %r{\h+/<filename>}
end
context 'when only repositories are rolled out' do
let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
context 'when only repositories are rolled out' do
let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
it_behaves_like 'builds correct legacy storage paths'
it_behaves_like 'builds correct legacy storage paths'
end
end
end
context 'legacy storage' do
it_behaves_like 'builds correct legacy storage paths'
include_examples 'uses hashed storage'
context 'object store is remote' do
before do
stub_uploads_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
# always use hashed storage path for remote uploads
it_behaves_like 'builds correct paths',
store_dir: %r{@hashed/\h{2}/\h{2}/\h+},
upload_path: %r{@hashed/\h{2}/\h{2}/\h+/\h+/<filename>}
end
describe 'initialize' do
......@@ -78,6 +87,16 @@ describe FileUploader do
end
end
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/dk.png')))
stub_uploads_object_storage
end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
describe '#upload=' do
let(:secret) { SecureRandom.hex }
let(:upload) { create(:upload, :issuable_upload, secret: secret, filename: 'file.txt') }
......@@ -93,15 +112,5 @@ describe FileUploader do
uploader.upload = upload
end
context 'uploader_context is empty' do
it 'fallbacks to regex based extraction' do
expect(upload).to receive(:uploader_context).and_return({})
uploader.upload = upload
expect(uploader.secret).to eq(secret)
expect(uploader.instance_variable_get(:@identifier)).to eq('file.txt')
end
end
end
end
require 'spec_helper'
describe JobArtifactUploader do
let(:job_artifact) { create(:ci_job_artifact) }
let(:store) { described_class::Store::LOCAL }
let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) }
subject { uploader }
......@@ -11,6 +12,17 @@ describe JobArtifactUploader do
cache_dir: %r[artifacts/tmp/cache],
work_dir: %r[artifacts/tmp/work]
context "object store is REMOTE" do
before do
stub_artifacts_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like "builds correct paths",
store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
end
describe '#open' do
subject { uploader.open }
......@@ -36,6 +48,17 @@ describe JobArtifactUploader do
end
end
end
context 'when trace is stored in Object storage' do
before do
allow(uploader).to receive(:file_storage?) { false }
allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
end
it 'returns http io stream' do
is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
end
end
end
context 'file is stored in valid local_path' do
......@@ -55,4 +78,14 @@ describe JobArtifactUploader do
it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/trace/sample_trace')))
stub_artifacts_object_storage
end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
require 'rails_helper'
describe LegacyArtifactUploader do
let(:job) { create(:ci_build) }
let(:store) { described_class::Store::LOCAL }
let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:local_path) { described_class.root }
......@@ -20,6 +21,17 @@ describe LegacyArtifactUploader do
cache_dir: %r[artifacts/tmp/cache],
work_dir: %r[artifacts/tmp/work]
context 'object store is remote' do
before do
stub_artifacts_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like "builds correct paths",
store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z]
end
describe '#filename' do
# we need to use uploader, as this makes to use mounter
# which initialises uploader.file object
......
......@@ -11,4 +11,62 @@ describe LfsObjectUploader do
store_dir: %r[\h{2}/\h{2}],
cache_dir: %r[/lfs-objects/tmp/cache],
work_dir: %r[/lfs-objects/tmp/work]
context "object store is REMOTE" do
before do
stub_lfs_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like "builds correct paths",
store_dir: %r[\h{2}/\h{2}]
end
describe 'migration to object storage' do
context 'with object storage disabled' do
it "is skipped" do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
lfs_object
end
end
context 'with object storage enabled' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'is scheduled to run after creation' do
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
lfs_object
end
end
end
describe 'remote file' do
let(:remote) { described_class::Store::REMOTE }
let(:lfs_object) { create(:lfs_object, file_store: remote) }
context 'with object storage enabled' do
before do
stub_lfs_object_storage
end
it 'can store file remotely' do
allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
store_file(lfs_object)
expect(lfs_object.file_store).to eq remote
expect(lfs_object.file.path).not_to be_blank
end
end
end
def store_file(lfs_object)
lfs_object.file = fixture_file_upload(Rails.root.join("spec/fixtures/dk.png"), "`/png")
lfs_object.save!
end
end
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment