Commit 594e6a0a authored by Micaël Bergeron's avatar Micaël Bergeron

Refactor the uploaders

I've demoted the ObjectStoreUploader to a concern that is mixed in
the concrete uploader classes that need to store files in a remote
object store.

I've been working on making the local -> remote migration working
first, which has been trivial compared to the remote -> local one.

The current implementation is heavily based on side-effects which
makes the code brittle and hard to reason about.

The current approach is to store the `store` field in the correct
`Upload` model once a migration has been done. To retrieve the field
I use the `has_many :uploads` relationship, with all the paths that
a certain file may have `uploads.where(path: paths).last`. This as
the drawback of adding a database query for every upload lookup, but
I feel that the generalization of this behavior is worth it. We should
be able to optimize this down the road quite easily.
parent bbcaf4ae
...@@ -2,6 +2,7 @@ module UploadsActions ...@@ -2,6 +2,7 @@ module UploadsActions
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
def create def create
# TODO why not pass a GitlabUploader instance
link_to_file = UploadService.new(model, params[:file], uploader_class).execute link_to_file = UploadService.new(model, params[:file], uploader_class).execute
respond_to do |format| respond_to do |format|
...@@ -17,34 +18,53 @@ module UploadsActions ...@@ -17,34 +18,53 @@ module UploadsActions
end end
end end
# This should either find the @file and redirect to its URL
def show def show
binding.pry
return render_404 unless uploader.exists? return render_404 unless uploader.exists?
disposition = uploader.image_or_video? ? 'inline' : 'attachment' # send to the remote URL
redirect_to uploader.url unless uploader.file_storage?
# or send the file
disposition = uploader.image_or_video? ? 'inline' : 'attachment'
expires_in 0.seconds, must_revalidate: true, private: true expires_in 0.seconds, must_revalidate: true, private: true
send_file uploader.file.path, disposition: disposition send_file uploader.file.path, disposition: disposition
end end
private private
def uploader_class
uploader.class
end
def upload_mount
mounted_as = params[:mounted_as]
upload_mounts = %w(avatar attachment file logo header_logo)
mounted_as if upload_mounts.include? mounted_as
end
# TODO: this method is too complex
#
def uploader def uploader
strong_memoize(:uploader) do @uploader ||= if upload_model_class < CarrierWave::Mount::Extension && upload_mount
return if show_model.nil? model.public_send(upload_mount)
elsif upload_model_class == PersonalSnippet
find_upload(PersonalFileUploader)&.build_uploader || PersonalFileUploader.new(model)
else
find_upload(FileUploader)&.build_uploader || FileUploader.new(model)
end
end
file_uploader = FileUploader.new(show_model, params[:secret]) def find_upload(uploader_class)
file_uploader.retrieve_from_store!(params[:filename]) return nil unless params[:secret] && params[:filename]
file_uploader upload_path = uploader_class.upload_path(params[:secret], params[:filename])
end Upload.where(uploader: uploader_class.to_s, path: upload_path)&.last
end end
def image_or_video? def image_or_video?
uploader && uploader.exists? && uploader.image_or_video? uploader && uploader.exists? && uploader.image_or_video?
end end
def uploader_class
FileUploader
end
end end
# Used out-of-context uploads
# see #upload_model_classs
#
class UploadsController < ApplicationController class UploadsController < ApplicationController
include UploadsActions include UploadsActions
UnknownUploadModelError = Class.new(StandardError)
rescue_from UnknownUploadModelError, with: :render_404
skip_before_action :authenticate_user! skip_before_action :authenticate_user!
before_action :upload_mount_satisfied?
before_action :find_model before_action :find_model
before_action :authorize_access!, only: [:show] before_action :authorize_access!, only: [:show]
before_action :authorize_create_access!, only: [:create] before_action :authorize_create_access!, only: [:create]
private
def find_model def find_model
return nil unless params[:id] return nil unless params[:id]
@model = upload_model_class.find(params[:id])
return render_404 unless upload_model && upload_mount
@model = upload_model.find(params[:id])
end end
def authorize_access! def authorize_access!
...@@ -53,8 +56,8 @@ class UploadsController < ApplicationController ...@@ -53,8 +56,8 @@ class UploadsController < ApplicationController
end end
end end
def upload_model def upload_model_class
upload_models = { model_classes = {
"user" => User, "user" => User,
"project" => Project, "project" => Project,
"note" => Note, "note" => Note,
...@@ -63,42 +66,17 @@ class UploadsController < ApplicationController ...@@ -63,42 +66,17 @@ class UploadsController < ApplicationController
"personal_snippet" => PersonalSnippet "personal_snippet" => PersonalSnippet
} }
upload_models[params[:model]] raise UnknownUploadModelError unless cls = model_classes[params[:model]]
cls
end end
def upload_mount def upload_model_class_has_mounts?
return true unless params[:mounted_as] upload_model_class < CarrierWave::Mount::Extension
upload_mounts = %w(avatar attachment file logo header_logo)
if upload_mounts.include?(params[:mounted_as])
params[:mounted_as]
end
end
def uploader
return @uploader if defined?(@uploader)
case model
when nil
@uploader = PersonalFileUploader.new(nil, params[:secret])
@uploader.retrieve_from_store!(params[:filename])
when PersonalSnippet
@uploader = PersonalFileUploader.new(model, params[:secret])
@uploader.retrieve_from_store!(params[:filename])
else
@uploader = @model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend
redirect_to @uploader.url unless @uploader.file_storage?
end
@uploader
end end
def uploader_class def upload_mount_satisfied?
PersonalFileUploader return true unless upload_model_class_has_mounts?
upload_model_class.uploader_options.has_key?(upload_mount)
end end
def model def model
......
...@@ -46,7 +46,7 @@ module Ci ...@@ -46,7 +46,7 @@ module Ci
end end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) } scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) } scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::LOCAL_STORE]) } scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) } scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) } scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) } scope :ref_protected, -> { where(protected: true) }
......
...@@ -7,7 +7,7 @@ class LfsObject < ActiveRecord::Base ...@@ -7,7 +7,7 @@ class LfsObject < ActiveRecord::Base
validates :oid, presence: true, uniqueness: true validates :oid, presence: true, uniqueness: true
scope :with_files_stored_locally, ->() { where(file_store: [nil, LfsObjectUploader::LOCAL_STORE]) } scope :with_files_stored_locally, ->() { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
mount_uploader :file, LfsObjectUploader mount_uploader :file, LfsObjectUploader
......
...@@ -90,7 +90,9 @@ class Note < ActiveRecord::Base ...@@ -90,7 +90,9 @@ class Note < ActiveRecord::Base
end end
end end
# @deprecated attachments are handler by the MarkdownUploader
mount_uploader :attachment, AttachmentUploader mount_uploader :attachment, AttachmentUploader
deprecate :attachment => 'Use the Markdown uploader instead'
# Scopes # Scopes
scope :searchable, -> { where(system: false) } scope :searchable, -> { where(system: false) }
......
...@@ -931,6 +931,14 @@ class Project < ActiveRecord::Base ...@@ -931,6 +931,14 @@ class Project < ActiveRecord::Base
end end
end end
def avatar_uploader(uploader)
return uploader unless avatar_identifier
paths = uploader.store_dirs.map {|store, path| File.join(path, avatar_identifier) }
uploader.upload = uploads.where(uploader: 'AvatarUploader', path: paths)&.last
uploader.object_store = uploader.upload&.store # TODO: move this to RecordsUploads
end
def avatar_in_git def avatar_in_git
repository.avatar repository.avatar
end end
......
...@@ -17,13 +17,15 @@ class Upload < ActiveRecord::Base ...@@ -17,13 +17,15 @@ class Upload < ActiveRecord::Base
end end
def self.record(uploader) def self.record(uploader)
remove_path(uploader.relative_path) upload = uploader.upload || new
create( binding.pry
upload.update_attributes(
size: uploader.file.size, size: uploader.file.size,
path: uploader.relative_path, path: uploader.dynamic_path,
model: uploader.model, model: uploader.model,
uploader: uploader.class.to_s uploader: uploader.class.to_s,
store: uploader.try(:object_store) || ObjectStorage::Store::LOCAL
) )
end end
...@@ -49,7 +51,15 @@ class Upload < ActiveRecord::Base ...@@ -49,7 +51,15 @@ class Upload < ActiveRecord::Base
File.exist?(absolute_path) File.exist?(absolute_path)
end end
private def build_uploader(from = nil)
uploader = from || uploader_class.new(model)
uploader.upload = self
uploader.object_store = store
uploader
end
#private
def foreground_checksum? def foreground_checksum?
size <= CHECKSUM_THRESHOLD size <= CHECKSUM_THRESHOLD
......
...@@ -25,7 +25,7 @@ module Geo ...@@ -25,7 +25,7 @@ module Geo
end end
def local_store_path def local_store_path
Pathname.new(LfsObjectUploader.local_store_path) Pathname.new(LfsObjectUploader.workhorse_upload_path)
end end
def relative_file_path def relative_file_path
......
...@@ -16,9 +16,9 @@ module Projects ...@@ -16,9 +16,9 @@ module Projects
@old_path = project.full_path @old_path = project.full_path
@new_path = project.disk_path @new_path = project.disk_path
origin = FileUploader.dynamic_path_segment(project) origin = FileUploader.model_path_segment(project)
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments] project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
target = FileUploader.dynamic_path_segment(project) target = FileUploader.model_path_segment(project)
result = move_folder!(origin, target) result = move_folder!(origin, target)
project.save! project.save!
......
class AttachmentUploader < GitlabUploader class AttachmentUploader < GitlabUploader
include RecordsUploads include RecordsUploads::Concern
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
include UploaderHelper include UploaderHelper
storage :file storage_options Gitlab.config.uploads
def store_dir private
"#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
def dynamic_segment
File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s)
end end
end end
class AvatarUploader < GitlabUploader class AvatarUploader < GitlabUploader
include RecordsUploads
include UploaderHelper include UploaderHelper
include RecordsUploads::Concern
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
storage :file storage_options Gitlab.config.uploads
def store_dir
"#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
end
def exists? def exists?
model.avatar.file && model.avatar.file.present? model.avatar.file && model.avatar.file.present?
...@@ -22,4 +20,10 @@ class AvatarUploader < GitlabUploader ...@@ -22,4 +20,10 @@ class AvatarUploader < GitlabUploader
def move_to_cache def move_to_cache
false false
end end
private
def dynamic_segment
File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s)
end
end end
...@@ -21,13 +21,12 @@ class FileMover ...@@ -21,13 +21,12 @@ class FileMover
end end
def update_markdown def update_markdown
updated_text = model.read_attribute(update_field).gsub(temp_file_uploader.to_markdown, uploader.to_markdown) binding.pry
updated_text = model.read_attribute(update_field)
.gsub(temp_file_uploader.markdown_link, uploader.markdown_link)
model.update_attribute(update_field, updated_text) model.update_attribute(update_field, updated_text)
true
rescue rescue
revert revert
false false
end end
......
# This class breaks the actual CarrierWave concept.
# Every uploader should use a base_dir that is model agnostic so we can build
# back URLs from base_dir-relative paths saved in the `Upload` model.
#
# As the `.base_dir` is model dependent and **not** saved in the upload model (see #upload_path)
# there is no way to build back the correct file path without the model, which defies
# CarrierWave way of storing files.
#
class FileUploader < GitlabUploader class FileUploader < GitlabUploader
include RecordsUploads
include UploaderHelper include UploaderHelper
include RecordsUploads::Concern
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)} MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}
DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)}
storage :file attr_accessor :model
attr_reader :secret
# TODO: remove this, FileUploader should not have storage_options, this class
# should be abstract, or even a Concern that simply add the secret
#
# Then create a new AdhocUploader that implement the base_dir logic of this class,
# which is wrong anyways.
storage_options Gitlab.config.uploads
def self.absolute_path(upload_record) def self.root
storage_options&.storage_path
end
def self.absolute_path(upload)
File.join( File.join(
self.dynamic_path_segment(upload_record.model), root,
upload_record.path base_dir(upload.model),
upload.path # this already contain the dynamic_segment, see #upload_path
) )
end end
# Not using `GitlabUploader.base_dir` because all project namespaces are in def self.base_dir(model)
# the `public/uploads` dir. model_path_segment(model)
#
def self.base_dir
root_dir
end end
# Returns the part of `store_dir` that can change based on the model's current # Returns the part of `store_dir` that can change based on the model's current
...@@ -29,59 +50,102 @@ class FileUploader < GitlabUploader ...@@ -29,59 +50,102 @@ class FileUploader < GitlabUploader
# model - Object that responds to `full_path` and `disk_path` # model - Object that responds to `full_path` and `disk_path`
# #
# Returns a String without a trailing slash # Returns a String without a trailing slash
def self.dynamic_path_segment(model) def self.model_path_segment(model)
if model.hashed_storage?(:attachments) if model.hashed_storage?(:attachments)
dynamic_path_builder(model.disk_path) model.disk_path
else else
dynamic_path_builder(model.full_path) model.full_path
end end
end end
# Auxiliary method to build dynamic path segment when not using a project model # Auxiliary method to build dynamic path segment when not using a project model
# #
# Prefer to use the `.dynamic_path_segment` as it includes Hashed Storage specific logic # Prefer to use the `.model_path_segment` as it includes Hashed Storage specific logic
#
# TODO: review this path?
# TODO: remove me this makes no sense
def self.dynamic_path_builder(path) def self.dynamic_path_builder(path)
File.join(CarrierWave.root, base_dir, path) File.join(root, path)
end end
attr_accessor :model def self.upload_path(secret, identifier)
attr_reader :secret File.join(secret, identifier)
end
def initialize(model, secret = nil) def initialize(model, secret = nil)
@model = model @model = model
@secret = secret || generate_secret @secret = secret
end end
def store_dir def base_dir
File.join(dynamic_path_segment, @secret) self.class.base_dir(@model)
end end
def relative_path # we don't need to know the actual path, an uploader instance should be
self.file.path.sub("#{dynamic_path_segment}/", '') # able to yield the file content on demand, so we should build the digest
def absolute_path
self.class.absolute_path(@upload)
end end
def to_markdown def upload_path
to_h[:markdown] self.class.upload_path(dynamic_segment, identifier)
end end
def to_h def model_path_segment
filename = image_or_video? ? self.file.basename : self.file.filename self.class.model_path_segment(@model)
escaped_filename = filename.gsub("]", "\\]") end
def store_dir
File.join(base_dir, dynamic_segment)
end
markdown = "[#{escaped_filename}](#{secure_url})" def markdown_link
markdown = "[#{markdown_name}](#{secure_url})"
markdown.prepend("!") if image_or_video? || dangerous? markdown.prepend("!") if image_or_video? || dangerous?
markdown
end
def to_h
{ {
alt: filename, alt: markdown_name,
url: secure_url, url: secure_url,
markdown: markdown markdown: markdown_link
} }
end end
def filename
self.file.filename
end
# This is weird: the upload do not hold the secret, but holds the path
# so we need to extract the secret from the path
def upload=(value)
if matches = DYNAMIC_PATH_PATTERN.match(value.path)
@secret = matches[:secret]
@identifier = matches[:identifier]
retrieve_from_store!(@identifier)
end
super
end
def secret
@secret ||= generate_secret
end
private private
def dynamic_path_segment def markdown_name
self.class.dynamic_path_segment(model) (image_or_video? ? File.basename(filename, File.extname(filename)) : filename).gsub("]", "\\]")
end
def identifier
@identifier ||= filename
end
def dynamic_segment
secret
end end
def generate_secret def generate_secret
......
class GitlabUploader < CarrierWave::Uploader::Base class GitlabUploader < CarrierWave::Uploader::Base
def self.absolute_path(upload_record) class << self
File.join(CarrierWave.root, upload_record.path) # DSL setter
end def storage_options(options = nil)
@storage_options = options if options
@storage_options
end
def self.root_dir def root
'uploads' storage_options&.storage_path
end end
# When object storage is used, keep the `root_dir` as `base_dir`. # represent the directory namespacing at the class level
# The files aren't really in folders there, they just have a name. def base_dir
# The files that contain user input in their name, also contain a hash, so storage_options&.base_dir || ""
# the names are still unique end
#
# This method is overridden in the `FileUploader`
def self.base_dir
return root_dir unless file_storage?
File.join(root_dir, '-', 'system') def file_storage?
end storage == CarrierWave::Storage::File
end
def self.file_storage? def absolute_path(upload_record)
self.storage == CarrierWave::Storage::File File.join(CarrierWave.root, upload_record.path)
end
end end
delegate :base_dir, :file_storage?, to: :class delegate :base_dir, :file_storage?, to: :class
...@@ -39,17 +40,6 @@ class GitlabUploader < CarrierWave::Uploader::Base ...@@ -39,17 +40,6 @@ class GitlabUploader < CarrierWave::Uploader::Base
true true
end end
# Designed to be overridden by child uploaders that have a dynamic path
# segment -- that is, a path that changes based on mutable attributes of its
# associated model
#
# For example, `FileUploader` builds the storage path based on the associated
# project model's `path_with_namespace` value, which can change when the
# project or its containing namespace is moved or renamed.
def relative_path
self.file.path.sub("#{root}/", '')
end
def exists? def exists?
file.present? file.present?
end end
...@@ -67,6 +57,17 @@ class GitlabUploader < CarrierWave::Uploader::Base ...@@ -67,6 +57,17 @@ class GitlabUploader < CarrierWave::Uploader::Base
private private
# Designed to be overridden by child uploaders that have a dynamic path
# segment -- that is, a path that changes based on mutable attributes of its
# associated model
#
# For example, `FileUploader` builds the storage path based on the associated
# project model's `path_with_namespace` value, which can change when the
# project or its containing namespace is moved or renamed.
def dynamic_segment
raise(NotImplementedError)
end
# To prevent files from moving across filesystems, override the default # To prevent files from moving across filesystems, override the default
# implementation: # implementation:
# http://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L181-L183 # http://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L181-L183
......
class JobArtifactUploader < ObjectStoreUploader class JobArtifactUploader < GitlabUploader
storage_options Gitlab.config.artifacts extend Workhorse::UploadPath
include ObjectStorage::Concern
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path storage_options Gitlab.config.artifacts
File.join(self.local_store_path, 'tmp/uploads/')
end
def size def size
return super if model.size.nil? return super if model.size.nil?
...@@ -17,7 +12,7 @@ class JobArtifactUploader < ObjectStoreUploader ...@@ -17,7 +12,7 @@ class JobArtifactUploader < ObjectStoreUploader
private private
def default_path def dynamic_segment
creation_date = model.created_at.utc.strftime('%Y_%m_%d') creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash, File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
......
class LegacyArtifactUploader < ObjectStoreUploader class LegacyArtifactUploader < GitlabUploader
storage_options Gitlab.config.artifacts extend Workhorse::UploadPath
include ObjectStorage::Concern
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path storage_options Gitlab.config.artifacts
File.join(self.local_store_path, 'tmp/uploads/')
end
private private
def default_path def dynamic_segment
File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s) File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end end
end end
class LfsObjectUploader < ObjectStoreUploader class LfsObjectUploader < GitlabUploader
storage_options Gitlab.config.lfs extend Workhorse::UploadPath
include ObjectStorage::Concern
def self.local_store_path storage_options Gitlab.config.lfs
Gitlab.config.lfs.storage_path
end
def filename def filename
model.oid[4..-1] model.oid[4..-1]
...@@ -11,7 +10,7 @@ class LfsObjectUploader < ObjectStoreUploader ...@@ -11,7 +10,7 @@ class LfsObjectUploader < ObjectStoreUploader
private private
def default_path def dynamic_segment
"#{model.oid[0, 2]}/#{model.oid[2, 2]}" File.join(model.oid[0, 2], model.oid[2, 2])
end end
end end
class NamespaceFileUploader < FileUploader class NamespaceFileUploader < FileUploader
def self.base_dir storage_options Gitlab.config.uploads
File.join(root_dir, '-', 'system', 'namespace')
def self.base_dir(model)
File.join(storage_options&.base_dir, 'namespace', model_path_segment(model))
end end
def self.dynamic_path_segment(model) def self.model_path_segment(model)
dynamic_path_builder(model.id.to_s) File.join(model.id.to_s)
end end
private # Re-Override
def store_dir
store_dirs[object_store]
end
def secure_url def store_dirs
File.join('/uploads', @secret, file.filename) {
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join('namespace', model_path_segment, dynamic_segment)
}
end end
end end
class PersonalFileUploader < FileUploader class PersonalFileUploader < FileUploader
def self.dynamic_path_segment(model) storage_options Gitlab.config.uploads
File.join(CarrierWave.root, model_path(model))
def self.base_dir(model)
File.join(storage_options&.base_dir, model_path_segment(model))
end end
def self.base_dir def self.model_path_segment(model)
File.join(root_dir, '-', 'system') return 'temp/' unless model
File.join(model.class.to_s.underscore, model.id.to_s)
end end
private def object_store
return Store::LOCAL unless model
def secure_url super
File.join(self.class.model_path(model), secret, file.filename) end
# Revert-Override
def store_dir
store_dirs[object_store]
end end
def self.model_path(model) def store_dirs
if model {
File.join("/#{base_dir}", model.class.to_s.underscore, model.id.to_s) Store::LOCAL => File.join(base_dir, dynamic_segment),
else Store::REMOTE => File.join(model_path_segment, dynamic_segment)
File.join("/#{base_dir}", 'temp') }
end end
private
def secure_url
File.join('/', base_dir, secret, file.filename)
end end
end end
module RecordsUploads module RecordsUploads
extend ActiveSupport::Concern module Concern
extend ActiveSupport::Concern
included do attr_accessor :upload
after :store, :record_upload
before :remove, :destroy_upload
end
# After storing an attachment, create a corresponding Upload record included do
# before :store, :destroy_upload
# NOTE: We're ignoring the argument passed to this callback because we want after :store, :record_upload
# the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the before :remove, :destroy_upload
# `Tempfile` object the callback gets. end
#
# Called `after :store` # After storing an attachment, create a corresponding Upload record
def record_upload(_tempfile = nil) #
return unless model # NOTE: We're ignoring the argument passed to this callback because we want
return unless file_storage? # the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the
return unless file.exists? # `Tempfile` object the callback gets.
#
Upload.record(self) # Called `after :store`
end def record_upload(_tempfile = nil)
return unless model
return unless file && file.exists?
Upload.record(self)
end
def upload_path
File.join(store_dir, filename.to_s)
end
private private
# Before removing an attachment, destroy any Upload records at the same path # Before removing an attachment, destroy any Upload records at the same path
# #
# Called `before :remove` # Called `before :remove`
def destroy_upload(*args) def destroy_upload(*args)
return unless file_storage? return unless file && file.exists?
return unless file
Upload.remove_path(relative_path) # that should be the old path?
Upload.remove_path(upload_path)
end
end end
end end
...@@ -32,14 +32,7 @@ module UploaderHelper ...@@ -32,14 +32,7 @@ module UploaderHelper
def extension_match?(extensions) def extension_match?(extensions)
return false unless file return false unless file
extension = extension = file.try(:extension) || File.extname(file.path).delete('.')
if file.respond_to?(:extension)
file.extension
else
# Not all CarrierWave storages respond to :extension
File.extname(file.path).delete('.')
end
extensions.include?(extension.downcase) extensions.include?(extension.downcase)
end end
end end
module Workhorse
module UploadPath
def workhorse_upload_path
File.join(root, base_dir, 'tmp/uploads/')
end
end
end
...@@ -3,7 +3,7 @@ class UploadChecksumWorker ...@@ -3,7 +3,7 @@ class UploadChecksumWorker
def perform(upload_id) def perform(upload_id)
upload = Upload.find(upload_id) upload = Upload.find(upload_id)
upload.calculate_checksum upload.calculate_checksum!
upload.save! upload.save!
rescue ActiveRecord::RecordNotFound rescue ActiveRecord::RecordNotFound
Rails.logger.error("UploadChecksumWorker: couldn't find upload #{upload_id}, skipping") Rails.logger.error("UploadChecksumWorker: couldn't find upload #{upload_id}, skipping")
......
...@@ -174,6 +174,25 @@ production: &base ...@@ -174,6 +174,25 @@ production: &base
# endpoint: 'http://127.0.0.1:9000' # default: nil # endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object' # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## Uploads (attachments, avatars, etc...)
uploads:
# The location where LFS objects are stored (default: shared/lfs-objects).
# storage_path: public/
# base_dir: uploads/-/system
object_store:
enabled: true
remote_directory: uploads # Bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
connection:
provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
# Use the following options to configure an AWS compatible host
# host: 'localhost' # default: s3.amazonaws.com
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## GitLab Pages ## GitLab Pages
pages: pages:
enabled: false enabled: false
...@@ -777,6 +796,11 @@ test: ...@@ -777,6 +796,11 @@ test:
aws_access_key_id: AWS_ACCESS_KEY_ID aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1 region: eu-central-1
uploads:
storage_path: tmp/tests/public/
enabled: true
object_store:
enabled: false
gitlab: gitlab:
host: localhost host: localhost
port: 80 port: 80
......
...@@ -334,20 +334,6 @@ Settings.gitlab_ci['url'] ||= Settings.__send__(:build_gitlab_ci ...@@ -334,20 +334,6 @@ Settings.gitlab_ci['url'] ||= Settings.__send__(:build_gitlab_ci
Settings['incoming_email'] ||= Settingslogic.new({}) Settings['incoming_email'] ||= Settingslogic.new({})
Settings.incoming_email['enabled'] = false if Settings.incoming_email['enabled'].nil? Settings.incoming_email['enabled'] = false if Settings.incoming_email['enabled'].nil?
#
# Build Artifacts
#
Settings['artifacts'] ||= Settingslogic.new({})
Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
Settings.artifacts['path'] = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts"))
Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
# Convert upload connection settings to use string keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
# #
# Registry # Registry
...@@ -382,19 +368,50 @@ Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pa ...@@ -382,19 +368,50 @@ Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pa
# #
Settings.gitlab['geo_status_timeout'] ||= 10 Settings.gitlab['geo_status_timeout'] ||= 10
#
# Build Artifacts
#
Settings['artifacts'] ||= Settingslogic.new({})
Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
# DEPRECATED use `storage_path`
Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values_at('path', 'storage_path').compact.first || File.join(Settings.shared['path'], "artifacts"))
Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
# Convert upload connection settings to use string keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
# #
# Git LFS # Git LFS
# #
Settings['lfs'] ||= Settingslogic.new({}) Settings['lfs'] ||= Settingslogic.new({})
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil? Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects")) Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
Settings.lfs['object_store'] ||= Settingslogic.new({}).tap do |object_store|
binding.pry
object_store['enabled'] ||= false
object_store['remote_directory'] ||= nil
object_store['background_upload'] ||= true
# Convert upload connection settings to use string keys, to make Fog happy
object_store['connection']&.deep_stringify_keys!
end
Settings.lfs['object_store'] ||= Settingslogic.new({}) #
Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil? # Uploads
Settings.lfs['object_store']['remote_directory'] ||= nil #
Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil? Settings['uploads'] ||= Settingslogic.new({})
Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
Settings.uploads['object_store'] ||= Settingslogic.new({})
Settings.uploads['object_store']['enabled'] = false if Settings.uploads['object_store']['enabled'].nil?
Settings.uploads['object_store']['remote_directory'] ||= nil
Settings.uploads['object_store']['background_upload'] = true if Settings.uploads['object_store']['background_upload'].nil?
# Convert upload connection settings to use string keys, to make Fog happy # Convert upload connection settings to use string keys, to make Fog happy
Settings.lfs['object_store']['connection']&.deep_stringify_keys! Settings.uploads['object_store']['connection']&.deep_stringify_keys!
# #
# Mattermost # Mattermost
......
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddStoreColumnToUploads < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :uploads, :store, :integer
end
def down
add_column :uploads, :store
end
end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20171206221519) do ActiveRecord::Schema.define(version: 20171214144320) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -173,11 +173,11 @@ ActiveRecord::Schema.define(version: 20171206221519) do ...@@ -173,11 +173,11 @@ ActiveRecord::Schema.define(version: 20171206221519) do
t.boolean "throttle_authenticated_web_enabled", default: false, null: false t.boolean "throttle_authenticated_web_enabled", default: false, null: false
t.integer "throttle_authenticated_web_requests_per_period", default: 7200, null: false t.integer "throttle_authenticated_web_requests_per_period", default: 7200, null: false
t.integer "throttle_authenticated_web_period_in_seconds", default: 3600, null: false t.integer "throttle_authenticated_web_period_in_seconds", default: 3600, null: false
t.boolean "password_authentication_enabled_for_web"
t.boolean "password_authentication_enabled_for_git", default: true
t.integer "gitaly_timeout_default", default: 55, null: false t.integer "gitaly_timeout_default", default: 55, null: false
t.integer "gitaly_timeout_medium", default: 30, null: false t.integer "gitaly_timeout_medium", default: 30, null: false
t.integer "gitaly_timeout_fast", default: 10, null: false t.integer "gitaly_timeout_fast", default: 10, null: false
t.boolean "password_authentication_enabled_for_web"
t.boolean "password_authentication_enabled_for_git", default: true, null: false
t.boolean "mirror_available", default: true, null: false t.boolean "mirror_available", default: true, null: false
end end
...@@ -402,12 +402,12 @@ ActiveRecord::Schema.define(version: 20171206221519) do ...@@ -402,12 +402,12 @@ ActiveRecord::Schema.define(version: 20171206221519) do
t.integer "project_id", null: false t.integer "project_id", null: false
t.integer "job_id", null: false t.integer "job_id", null: false
t.integer "file_type", null: false t.integer "file_type", null: false
t.integer "file_store"
t.integer "size", limit: 8 t.integer "size", limit: 8
t.datetime_with_timezone "created_at", null: false t.datetime "created_at", null: false
t.datetime_with_timezone "updated_at", null: false t.datetime "updated_at", null: false
t.datetime_with_timezone "expire_at" t.datetime "expire_at"
t.string "file" t.string "file"
t.integer "file_store"
end end
add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree
...@@ -2223,6 +2223,7 @@ ActiveRecord::Schema.define(version: 20171206221519) do ...@@ -2223,6 +2223,7 @@ ActiveRecord::Schema.define(version: 20171206221519) do
t.string "model_type" t.string "model_type"
t.string "uploader", null: false t.string "uploader", null: false
t.datetime "created_at", null: false t.datetime "created_at", null: false
t.integer "store"
end end
add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree
......
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>file_storage.html</title>
</head>
<body>
<h1>File Storage in GitLab</h1>
<p>We use the <a href="https://github.com/carrierwaveuploader/carrierwave">CarrierWave</a> gem to handle file upload, store and retrieval.</p>
<p>There are many places where file uploading is used, according to contexts:</p>
<ul>
<li>System
<ul>
<li>Instance Logo (logo visible in sign in/sign up pages)</li>
<li>Header Logo (one displayed in the navigation bar)</li>
</ul>
</li>
<li>Group
<ul>
<li>Group avatars</li>
</ul>
</li>
<li>User
<ul>
<li>User avatars</li>
<li>User snippet attachments</li>
</ul>
</li>
<li>Project
<ul>
<li>Project avatars</li>
<li>Issues/MR/Notes Markdown attachments</li>
<li>Issues/MR/Notes Legacy Markdown attachments</li>
<li>CI Build Artifacts</li>
<li>LFS Objects</li>
</ul>
</li>
</ul>
<h2>Disk storage</h2>
<p>GitLab started saving everything on local disk. While directory location changed from previous versions,
they are still not 100% standardized. You can see them below:</p>
<table>
<thead>
<tr>
<th> Description </th>
<th> In DB? </th>
<th> Relative path </th>
<th> Uploader class </th>
<th> model_type </th>
</tr>
</thead>
<tbody>
<tr>
<td> Instance logo </td>
<td> yes </td>
<td> uploads/-/system/appearance/logo/:id/:filename </td>
<td> <code>AttachmentUploader</code> </td>
<td> Appearance </td>
</tr>
<tr>
<td> Header logo </td>
<td> yes </td>
<td> uploads/-/system/appearance/header_logo/:id/:filename </td>
<td> <code>AttachmentUploader</code> </td>
<td> Appearance </td>
</tr>
<tr>
<td> Group avatars </td>
<td> yes </td>
<td> uploads/-/system/group/avatar/:id/:filename </td>
<td> <code>AvatarUploader</code> </td>
<td> Group </td>
</tr>
<tr>
<td> User avatars </td>
<td> yes </td>
<td> uploads/-/system/user/avatar/:id/:filename </td>
<td> <code>AvatarUploader</code> </td>
<td> User </td>
</tr>
<tr>
<td> User snippet attachments </td>
<td> yes </td>
<td> uploads/-/system/personal_snippet/:id/:random_hex/:filename </td>
<td> <code>PersonalFileUploader</code> </td>
<td> Snippet </td>
</tr>
<tr>
<td> Project avatars </td>
<td> yes </td>
<td> uploads/-/system/project/avatar/:id/:filename </td>
<td> <code>AvatarUploader</code> </td>
<td> Project </td>
</tr>
<tr>
<td> Issues/MR/Notes Markdown attachments </td>
<td> yes </td>
<td> uploads/:project_path_with_namespace/:random_hex/:filename </td>
<td> <code>FileUploader</code> </td>
<td> Project </td>
</tr>
<tr>
<td> Issues/MR/Notes Legacy Markdown attachments </td>
<td> no </td>
<td> uploads/-/system/note/attachment/:id/:filename </td>
<td> <code>AttachmentUploader</code> </td>
<td> Note </td>
</tr>
<tr>
<td> CI Artifacts (CE) </td>
<td> yes </td>
<td> shared/artifacts/:year_:month/:project_id/:id </td>
<td> <code>ArtifactUploader</code> </td>
<td> Ci::Build </td>
</tr>
<tr>
<td> LFS Objects (CE) </td>
<td> yes </td>
<td> shared/lfs-objects/:hex/:hex/:object_hash </td>
<td> <code>LfsObjectUploader</code> </td>
<td> LfsObject </td>
</tr>
</tbody>
</table>
<p>CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the <code>GitlabUploader</code>
while in EE they inherit the <code>ObjectStoreUploader</code> and store files in and S3 API compatible object store.</p>
<p>In the case of Issues/MR/Notes Markdown attachments, there is a different approach using the <a href="../administration/repository_storage_types.md">Hashed Storage</a> layout,
instead of basing the path into a mutable variable <code>:project_path_with_namespace</code>, it&rsquo;s possible to use the
hash of the project ID instead, if project migrates to the new approach (introduced in 10.2).</p>
</body>
</html>
...@@ -14,8 +14,8 @@ There are many places where file uploading is used, according to contexts: ...@@ -14,8 +14,8 @@ There are many places where file uploading is used, according to contexts:
- User snippet attachments - User snippet attachments
* Project * Project
- Project avatars - Project avatars
- Issues/MR Markdown attachments - Issues/MR/Notes Markdown attachments
- Issues/MR Legacy Markdown attachments - Issues/MR/Notes Legacy Markdown attachments
- CI Build Artifacts - CI Build Artifacts
- LFS Objects - LFS Objects
...@@ -25,7 +25,7 @@ There are many places where file uploading is used, according to contexts: ...@@ -25,7 +25,7 @@ There are many places where file uploading is used, according to contexts:
GitLab started saving everything on local disk. While directory location changed from previous versions, GitLab started saving everything on local disk. While directory location changed from previous versions,
they are still not 100% standardized. You can see them below: they are still not 100% standardized. You can see them below:
| Description | In DB? | Relative path | Uploader class | model_type | | Description | In DB? | Relative path (from CarrierWave.root) | Uploader class | model_type |
| ------------------------------------- | ------ | ----------------------------------------------------------- | ---------------------- | ---------- | | ------------------------------------- | ------ | ----------------------------------------------------------- | ---------------------- | ---------- |
| Instance logo | yes | uploads/-/system/appearance/logo/:id/:filename | `AttachmentUploader` | Appearance | | Instance logo | yes | uploads/-/system/appearance/logo/:id/:filename | `AttachmentUploader` | Appearance |
| Header logo | yes | uploads/-/system/appearance/header_logo/:id/:filename | `AttachmentUploader` | Appearance | | Header logo | yes | uploads/-/system/appearance/header_logo/:id/:filename | `AttachmentUploader` | Appearance |
...@@ -33,17 +33,105 @@ they are still not 100% standardized. You can see them below: ...@@ -33,17 +33,105 @@ they are still not 100% standardized. You can see them below:
| User avatars | yes | uploads/-/system/user/avatar/:id/:filename | `AvatarUploader` | User | | User avatars | yes | uploads/-/system/user/avatar/:id/:filename | `AvatarUploader` | User |
| User snippet attachments | yes | uploads/-/system/personal_snippet/:id/:random_hex/:filename | `PersonalFileUploader` | Snippet | | User snippet attachments | yes | uploads/-/system/personal_snippet/:id/:random_hex/:filename | `PersonalFileUploader` | Snippet |
| Project avatars | yes | uploads/-/system/project/avatar/:id/:filename | `AvatarUploader` | Project | | Project avatars | yes | uploads/-/system/project/avatar/:id/:filename | `AvatarUploader` | Project |
| Issues/MR Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project | | Issues/MR/Notes Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project |
| Issues/MR Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note | | Issues/MR/Notes Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note |
| CI Artifacts (CE) | yes | shared/artifacts/:year_:month/:project_id/:id | `ArtifactUploader` | Ci::Build | | CI Artifacts (CE) | yes | shared/artifacts/:year_:month/:project_id/:id | `ArtifactUploader` | Ci::Build |
| LFS Objects (CE) | yes | shared/lfs-objects/:hex/:hex/:object_hash | `LfsObjectUploader` | LfsObject | | LFS Objects (CE) | yes | shared/lfs-objects/:hex/:hex/:object_hash | `LfsObjectUploader` | LfsObject |
CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader` CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader`
while in EE they inherit the `ObjectStoreUploader` and store files in and S3 API compatible object store. while in EE they inherit the `ObjectStorage` and store files in and S3 API compatible object store.
In the case of Issues/MR Markdown attachments, there is a different approach using the [Hashed Storage] layout, In the case of Issues/MR/Notes Markdown attachments, there is a different approach using the [Hashed Storage] layout,
instead of basing the path into a mutable variable `:project_path_with_namespace`, it's possible to use the instead of basing the path into a mutable variable `:project_path_with_namespace`, it's possible to use the
hash of the project ID instead, if project migrates to the new approach (introduced in 10.2). hash of the project ID instead, if project migrates to the new approach (introduced in 10.2).
### Path segments
Files are stored at multiple locations and use different path schemes.
All the `GitlabUploader` derived classes should comply with this path segment schema:
```
| GitlabUploader
| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
| `<gitlab_root>/public/` | `uploads/-/system/` | `user/avatar/:id/` | `:filename` |
| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
| `CarrierWave.root` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` |
| | `CarrierWave::Uploader#store_dir` | |
| FileUploader
| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
| `<gitlab_root>/shared/` | `artifacts/` | `:year_:month/:id` | `:filename` |
| `<gitlab_root>/shared/` | `snippets/` | `:secret/` | `:filename` |
| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
| `CarrierWave.root` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` |
| | `CarrierWave::Uploader#store_dir` | |
| | | `FileUploader#upload_path |
| ObjectStore::Concern (store = remote)
| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
| `<bucket_name>` | <ignored> | `user/avatar/:id/` | `:filename` |
| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
| `#fog_dir` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` |
| | | `ObjectStorage::Concern#store_dir` | |
| | | `ObjectStorage::Concern#upload_path |
```
The `RecordsUploads::Concern` concern will create an `Upload` entry for every file stored by a `GitlabUploader` persisting the dynamic parts of the path using
`GitlabUploader#dynamic_path`. You may then use the `Upload#build_uploader` method to manipulate the file.
## Object Storage
By including the `ObjectStorage::Concern` in the `GitlabUploader` derived class, you may enable the object storage for this uploader. To enable the object storage
in your uploader, you need to either 1) include `RecordsUploads::Concern` or 2) mount the uploader and create a new field named `<mount>_store`.
The `CarrierWave::Uploader#store_dir` is overriden to
- `GitlabUploader.base_dir` + `GitlabUploader.dynamic_segment` when the store is LOCAL
- `GitlabUploader.dynamic_segment` when the store is REMOTE (the bucket name is used to namespace)
### Using `RecordsUploads::Concern`
The `ObjectStorage::Concern` uploader will search for the correct `Upload` model in the `RecordsUploads::Concern#uploads` relationship to select the correct object store.
`Upload` is mapped using the `CarrierWave::Uploader#upload_path` for each store (LOCAL/REMOTE).
```ruby
class SongUploader < GitlabUploader
include ObjectStorage::Concern
include RecordsUploads::Concern
...
end
class Thing < ActiveRecord::Base
mount :theme, SongUploader # we have a great theme song!
...
end
```
### Using a mounted uploader
The `ObjectStorage::Concern` will query the `model.<mount>_store' attribute to select the correct object store.
```ruby
class SongUploader < GitlabUploader
include ObjectStorage::Concern
...
end
class Thing < ActiveRecord::Base
attr_reader :theme_store # this is an ActiveRecord attribute
mount :theme, SongUploader # we have a great theme song!
def theme_store
super || ObjectStorage::Store::REMOTE # send new files to object store
end
...
end
```
[CarrierWave]: https://github.com/carrierwaveuploader/carrierwave [CarrierWave]: https://github.com/carrierwaveuploader/carrierwave
[Hashed Storage]: ../administration/repository_storage_types.md [Hashed Storage]: ../administration/repository_storage_types.md
...@@ -73,7 +73,7 @@ module Geo ...@@ -73,7 +73,7 @@ module Geo
Geo::Fdw::LfsObject.joins("LEFT OUTER JOIN file_registry Geo::Fdw::LfsObject.joins("LEFT OUTER JOIN file_registry
ON file_registry.file_id = #{fdw_table}.id ON file_registry.file_id = #{fdw_table}.id
AND file_registry.file_type = 'lfs'") AND file_registry.file_type = 'lfs'")
.where("#{fdw_table}.file_store IS NULL OR #{fdw_table}.file_store = #{LfsObjectUploader::LOCAL_STORE}") .where("#{fdw_table}.file_store IS NULL OR #{fdw_table}.file_store = #{LfsObjectUploader::Store::LOCAL}")
.where('file_registry.file_id IS NULL') .where('file_registry.file_id IS NULL')
end end
......
...@@ -11,7 +11,7 @@ module EE ...@@ -11,7 +11,7 @@ module EE
end end
def local_store? def local_store?
[nil, LfsObjectUploader::LOCAL_STORE].include?(self.file_store) [nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store)
end end
private private
......
require 'fog/aws'
require 'carrierwave/storage/fog'
#
# This concern should add object storage support
# to the GitlabUploader class
#
module ObjectStorage
RemoteStoreError = Class.new(StandardError)
UnknownStoreError = Class.new(StandardError)
ObjectStoreUnavailable = Class.new(StandardError)
module Store
LOCAL = 1
REMOTE = 2
end
module Extension
# this extension is the glue between the ObjectStorage::Concern and RecordsUploads::Concern
module RecordsUploads
extend ActiveSupport::Concern
included do |base|
raise ObjectStoreUnavailable, "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern
base.include(::RecordsUploads::Concern)
end
def upload=(upload)
return unless upload
self.object_store = upload.store
super
end
end
end
module Concern
extend ActiveSupport::Concern
included do |base|
base.include(ObjectStorage)
before :store, :verify_license!
end
attr_reader :object_store
def initialize(model=nil, mounted_as=nil)
super
self.upload = model&.try(:"#{mounted_as}_upload", self)
end
class_methods do
def object_store_options
storage_options&.object_store
end
def object_store_enabled?
object_store_options&.enabled
end
def background_upload_enabled?
object_store_options&.background_upload
end
def object_store_credentials
object_store_options&.connection&.to_hash&.deep_symbolize_keys
end
def remote_store_path
object_store_options&.remote_directory
end
def licensed?
License.feature_available?(:object_storage)
end
end
def file_storage?
storage.is_a?(CarrierWave::Storage::File)
end
def file_cache_storage?
cache_storage.is_a?(CarrierWave::Storage::File)
end
def object_store
@object_store ||= model.try(store_serialization_column) || Store::LOCAL
end
def object_store=(value)
@object_store = value || Store::LOCAL
@storage = storage_for(@object_store)
end
# Return true if the current file is part or the model (i.e. is mounted in the model)
#
def persist_object_store?
model.respond_to?(:"#{store_serialization_column}=")
end
# Save the current @object_store to the model <mounted_as>_store column
def persist_object_store!
return unless persist_object_store?
updated = model.update_column(store_serialization_column, @object_store)
raise ActiveRecordError unless updated
end
def use_file
if file_storage?
return yield path
end
begin
cache_stored_file!
yield cache_path
ensure
cache_storage.delete_dir!(cache_path(nil))
end
end
def filename
super || file&.filename
end
#
# Move the file to another store
#
# new_store: Enum (Store::LOCAL, Store::REMOTE)
#
def migrate!(new_store)
return unless object_store != new_store
return unless file
file_to_delete = file
self.object_store = new_store # this changes the storage and file
cache_stored_file! if file_storage?
with_callbacks(:store, file_to_delete) do # for #store_versions!
storage.store!(file).tap do |new_file|
@file = new_file
begin
# Triggering a model.save! will cause the new_file to be deleted.
# I still need to investigate exactly why, but this seems like a weird interaction
# between activerecord and carrierwave
persist_object_store!
file_to_delete.delete if new_file.exists?
rescue => e
# since we change storage store the new storage
# in case of failure delete new file
new_file.delete
raise e
end
end
end
file
end
def schedule_migration_to_object_storage(*args)
return unless self.class.object_store_enabled?
return unless self.class.background_upload_enabled?
return unless self.class.licensed?
return unless self.file_storage?
ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
end
def fog_directory
self.class.remote_store_path
end
def fog_credentials
self.class.object_store_credentials
end
def fog_public
false
end
def move_to_store
return true if Store::LOCAL
file.try(:storage) == storage
end
def move_to_cache
return true if object_store == Store::LOCAL
file.try(:storage) == cache_storage
end
def verify_license!(_file)
return if file_storage?
raise 'Object Storage feature is missing' unless self.class.licensed?
end
def exists?
file.present?
end
def cache_dir
File.join(root, base_dir, 'tmp/cache')
end
# Override this if you don't want to save local files by default to the Rails.root directory
def work_dir
# Default path set by CarrierWave:
# https://github.com/carrierwaveuploader/carrierwave/blob/v1.1.0/lib/carrierwave/uploader/cache.rb#L182
# CarrierWave.tmp_path
File.join(root, base_dir, 'tmp/work')
end
def store_dir(store = nil)
store_dirs[store || object_store]
end
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join(dynamic_segment)
}
end
private
def serialization_column
model.class.uploader_options.dig(mounted_as, :mount_on) || mounted_as
end
# Returns the column where the 'store' is saved
# defaults to 'store'
def store_serialization_column
[serialization_column, 'store'].compact.join('_').to_sym
end
def storage
@storage ||= storage_for(object_store)
end
def storage_for(store)
case store
when Store::REMOTE
raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
CarrierWave::Storage::Fog.new(self)
when Store::LOCAL
CarrierWave::Storage::File.new(self)
else
raise UnknownStoreError
end
end
# To prevent files in local storage from moving across filesystems, override
# the default implementation:
# http://github.com/carrierwaveuploader/carrierwave/blob/v1.1.0/lib/carrierwave/uploader/cache.rb#L181-L183
def workfile_path(for_file = original_filename)
# To be safe, keep this directory outside of the the cache directory
# because calling CarrierWave.clean_cache_files! will remove any files in
# the cache directory.
File.join(work_dir, @cache_id, version_name.to_s, for_file)
end
end
end
require 'fog/aws'
require 'carrierwave/storage/fog'
class ObjectStoreUploader < CarrierWave::Uploader::Base
before :store, :set_default_local_store
before :store, :verify_license!
LOCAL_STORE = 1
REMOTE_STORE = 2
class << self
def storage_options(options)
@storage_options = options
end
def object_store_options
@storage_options&.object_store
end
def object_store_enabled?
object_store_options&.enabled
end
def background_upload_enabled?
object_store_options&.background_upload
end
def object_store_credentials
@object_store_credentials ||= object_store_options&.connection&.to_hash&.deep_symbolize_keys
end
def object_store_directory
object_store_options&.remote_directory
end
def local_store_path
raise NotImplementedError
end
end
def file_storage?
storage.is_a?(CarrierWave::Storage::File)
end
def file_cache_storage?
cache_storage.is_a?(CarrierWave::Storage::File)
end
def real_object_store
model.public_send(store_serialization_column) # rubocop:disable GitlabSecurity/PublicSend
end
def object_store
real_object_store || LOCAL_STORE
end
def object_store=(value)
@storage = nil
model.public_send(:"#{store_serialization_column}=", value) # rubocop:disable GitlabSecurity/PublicSend
end
def store_dir
if file_storage?
default_local_path
else
default_path
end
end
def use_file
if file_storage?
return yield path
end
begin
cache_stored_file!
yield cache_path
ensure
cache_storage.delete_dir!(cache_path(nil))
end
end
def filename
super || file&.filename
end
def migrate!(new_store)
raise 'Undefined new store' unless new_store
return unless object_store != new_store
return unless file
old_file = file
old_store = object_store
# for moving remote file we need to first store it locally
cache_stored_file! unless file_storage?
# change storage
self.object_store = new_store
with_callbacks(:store, file) do
storage.store!(file).tap do |new_file|
# since we change storage store the new storage
# in case of failure delete new file
begin
model.save!
rescue => e
new_file.delete
self.object_store = old_store
raise e
end
old_file.delete
end
end
end
def schedule_migration_to_object_storage(*args)
return unless self.class.object_store_enabled?
return unless self.class.background_upload_enabled?
return unless self.licensed?
return unless self.file_storage?
ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
end
def fog_directory
self.class.object_store_directory
end
def fog_credentials
self.class.object_store_credentials
end
def fog_public
false
end
def move_to_store
return true if object_store == LOCAL_STORE
file.try(:storage) == storage
end
def move_to_cache
return true if object_store == LOCAL_STORE
file.try(:storage) == cache_storage
end
# We block storing artifacts on Object Storage, not receiving
def verify_license!(new_file)
return if file_storage?
raise 'Object Storage feature is missing' unless licensed?
end
def exists?
file.present?
end
def cache_dir
File.join(self.class.local_store_path, 'tmp/cache')
end
# Override this if you don't want to save local files by default to the Rails.root directory
def work_dir
# Default path set by CarrierWave:
# https://github.com/carrierwaveuploader/carrierwave/blob/v1.1.0/lib/carrierwave/uploader/cache.rb#L182
# CarrierWave.tmp_path
File.join(self.class.local_store_path, 'tmp/work')
end
def licensed?
License.feature_available?(:object_storage)
end
private
def set_default_local_store(new_file)
self.object_store = LOCAL_STORE unless self.real_object_store
end
def default_local_path
File.join(self.class.local_store_path, default_path)
end
def default_path
raise NotImplementedError
end
def serialization_column
model.class.uploader_option(mounted_as, :mount_on) || mounted_as
end
def store_serialization_column
:"#{serialization_column}_store"
end
def storage
@storage ||=
if object_store == REMOTE_STORE
remote_storage
else
local_storage
end
end
def remote_storage
raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
CarrierWave::Storage::Fog.new(self)
end
def local_storage
CarrierWave::Storage::File.new(self)
end
# To prevent files in local storage from moving across filesystems, override
# the default implementation:
# http://github.com/carrierwaveuploader/carrierwave/blob/v1.1.0/lib/carrierwave/uploader/cache.rb#L181-L183
def workfile_path(for_file = original_filename)
# To be safe, keep this directory outside of the the cache directory
# because calling CarrierWave.clean_cache_files! will remove any files in
# the cache directory.
File.join(work_dir, @cache_id, version_name.to_s, for_file)
end
end
...@@ -7,16 +7,16 @@ class ObjectStorageUploadWorker ...@@ -7,16 +7,16 @@ class ObjectStorageUploadWorker
uploader_class = uploader_class_name.constantize uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize subject_class = subject_class_name.constantize
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled? return unless uploader_class.object_store_enabled?
return unless uploader_class.licensed?
return unless uploader_class.background_upload_enabled? return unless uploader_class.background_upload_enabled?
subject = subject_class.find_by(id: subject_id) subject = subject_class.find(subject_id)
return unless subject uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
uploader.migrate!(ObjectStorage::Store::REMOTE)
file = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend rescue RecordNotFound
# do not retry when the record do not exists
return unless file.licensed? Rails.logger.warn("Cannot find subject #{subject_class} with id=#{subject_id}.")
file.migrate!(uploader_class::REMOTE_STORE)
end end
end end
...@@ -215,7 +215,7 @@ module API ...@@ -215,7 +215,7 @@ module API
job = authenticate_job! job = authenticate_job!
forbidden!('Job is not running!') unless job.running? forbidden!('Job is not running!') unless job.running?
artifacts_upload_path = JobArtifactUploader.artifacts_upload_path artifacts_upload_path = JobArtifactUploader.workhorse_upload_path
artifacts = uploaded_file(:file, artifacts_upload_path) artifacts = uploaded_file(:file, artifacts_upload_path)
metadata = uploaded_file(:metadata, artifacts_upload_path) metadata = uploaded_file(:metadata, artifacts_upload_path)
......
...@@ -3,7 +3,7 @@ require 'backup/files' ...@@ -3,7 +3,7 @@ require 'backup/files'
module Backup module Backup
class Artifacts < Files class Artifacts < Files
def initialize def initialize
super('artifacts', LegacyArtifactUploader.local_store_path) super('artifacts', LegacyArtifactUploader.workhorse_upload_path)
end end
def create_files_dir def create_files_dir
......
...@@ -2,12 +2,12 @@ module Gitlab ...@@ -2,12 +2,12 @@ module Gitlab
module Geo module Geo
class FileTransfer < Transfer class FileTransfer < Transfer
def initialize(file_type, upload) def initialize(file_type, upload)
uploader = upload.uploader.constantize
@file_type = file_type @file_type = file_type
@file_id = upload.id @file_id = upload.id
@filename = uploader.absolute_path(upload) @filename = upload.absolute_path
@request_data = build_request_data(upload) @request_data = build_request_data(upload)
rescue ObjectStorage::RemoteStoreError
Rails.logger.warn "Cannot transfer a remote object."
end end
private private
......
...@@ -201,7 +201,7 @@ module Gitlab ...@@ -201,7 +201,7 @@ module Gitlab
end end
def handle_lfs_object_deleted_event(event, created_at) def handle_lfs_object_deleted_event(event, created_at)
file_path = File.join(LfsObjectUploader.local_store_path, event.file_path) file_path = File.join(LfsObjectUploader.workhorse_upload_path, event.file_path)
job_id = ::Geo::FileRemovalWorker.perform_async(file_path) job_id = ::Geo::FileRemovalWorker.perform_async(file_path)
......
...@@ -27,7 +27,7 @@ module Gitlab ...@@ -27,7 +27,7 @@ module Gitlab
with_link_in_tmp_dir(file.file) do |open_tmp_file| with_link_in_tmp_dir(file.file) do |open_tmp_file|
new_uploader.store!(open_tmp_file) new_uploader.store!(open_tmp_file)
end end
new_uploader.to_markdown new_uploader.markdown_link
end end
end end
......
...@@ -23,8 +23,9 @@ module Gitlab ...@@ -23,8 +23,9 @@ module Gitlab
File.join(@shared.export_path, 'uploads') File.join(@shared.export_path, 'uploads')
end end
# this is not all uploads
def uploads_path def uploads_path
FileUploader.dynamic_path_segment(@project) FileUploader.new(@project).store_dir
end end
end end
end end
......
module Gitlab module Gitlab
class UploadsTransfer < ProjectTransfer class UploadsTransfer < ProjectTransfer
def root_dir def root_dir
File.join(CarrierWave.root, FileUploader.base_dir) File.join(*Gitlab.config.uploads.values_at('storage_path', 'base_dir'))
end end
end end
end end
...@@ -51,14 +51,14 @@ module Gitlab ...@@ -51,14 +51,14 @@ module Gitlab
def lfs_upload_ok(oid, size) def lfs_upload_ok(oid, size)
{ {
StoreLFSPath: "#{Gitlab.config.lfs.storage_path}/tmp/upload", StoreLFSPath: LfsObjectUploader.workhorse_upload_path,
LfsOid: oid, LfsOid: oid,
LfsSize: size LfsSize: size
} }
end end
def artifact_upload_ok def artifact_upload_ok
{ TempPath: JobArtifactUploader.artifacts_upload_path } { TempPath: JobArtifactUploader.workhorse_upload_path }
end end
def send_git_blob(repository, blob) def send_git_blob(repository, blob)
......
...@@ -12,8 +12,8 @@ namespace :gitlab do ...@@ -12,8 +12,8 @@ namespace :gitlab do
.with_artifacts_stored_locally .with_artifacts_stored_locally
.find_each(batch_size: 10) do |build| .find_each(batch_size: 10) do |build|
begin begin
build.artifacts_file.migrate!(ObjectStoreUploader::REMOTE_STORE) build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
build.artifacts_metadata.migrate!(ObjectStoreUploader::REMOTE_STORE) build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage") logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
rescue => e rescue => e
......
...@@ -10,7 +10,7 @@ namespace :gitlab do ...@@ -10,7 +10,7 @@ namespace :gitlab do
LfsObject.with_files_stored_locally LfsObject.with_files_stored_locally
.find_each(batch_size: 10) do |lfs_object| .find_each(batch_size: 10) do |lfs_object|
begin begin
lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE) lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage") logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
rescue => e rescue => e
......
{"version":"1","format":"fs","fs":{"version":"1"}}
\ No newline at end of file
...@@ -145,8 +145,8 @@ describe Projects::ArtifactsController do ...@@ -145,8 +145,8 @@ describe Projects::ArtifactsController do
context 'when using local file storage' do context 'when using local file storage' do
it_behaves_like 'a valid file' do it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
let(:store) { ObjectStoreUploader::LOCAL_STORE } let(:store) { ObjectStorage::Store::LOCAL }
let(:archive_path) { JobArtifactUploader.local_store_path } let(:archive_path) { JobArtifactUploader.workhorse_upload_path }
end end
end end
...@@ -158,7 +158,7 @@ describe Projects::ArtifactsController do ...@@ -158,7 +158,7 @@ describe Projects::ArtifactsController do
it_behaves_like 'a valid file' do it_behaves_like 'a valid file' do
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
let!(:job) { create(:ci_build, :success, pipeline: pipeline) } let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
let(:store) { ObjectStoreUploader::REMOTE_STORE } let(:store) { ObjectStorage::Store::REMOTE }
let(:archive_path) { 'https://' } let(:archive_path) { 'https://' }
end end
end end
......
...@@ -58,7 +58,7 @@ describe Projects::RawController do ...@@ -58,7 +58,7 @@ describe Projects::RawController do
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png") lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save! lfs_object.save!
stub_lfs_object_storage stub_lfs_object_storage
lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE) lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end end
it 'responds with redirect to file' do it 'responds with redirect to file' do
......
...@@ -65,6 +65,7 @@ describe UploadsController do ...@@ -65,6 +65,7 @@ describe UploadsController do
it 'creates a corresponding Upload record' do it 'creates a corresponding Upload record' do
upload = Upload.last upload = Upload.last
binding.pry
aggregate_failures do aggregate_failures do
expect(upload).to exist expect(upload).to exist
...@@ -212,6 +213,7 @@ describe UploadsController do ...@@ -212,6 +213,7 @@ describe UploadsController do
context "when not signed in" do context "when not signed in" do
it "responds with status 200" do it "responds with status 200" do
binding.pry
get :show, model: "project", mounted_as: "avatar", id: project.id, filename: "image.png" get :show, model: "project", mounted_as: "avatar", id: project.id, filename: "image.png"
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
......
...@@ -18,7 +18,7 @@ describe Geo::AttachmentRegistryFinder, :geo, :truncate do ...@@ -18,7 +18,7 @@ describe Geo::AttachmentRegistryFinder, :geo, :truncate do
let(:upload_3) { create(:upload, :issuable_upload, model: synced_project) } let(:upload_3) { create(:upload, :issuable_upload, model: synced_project) }
let(:upload_4) { create(:upload, model: unsynced_project) } let(:upload_4) { create(:upload, model: unsynced_project) }
let(:upload_5) { create(:upload, model: synced_project) } let(:upload_5) { create(:upload, model: synced_project) }
let(:upload_6) { create(:upload, :personal_snippet) } let(:upload_6) { create(:upload, :personal_snippet_upload) }
let(:upload_7) { create(:upload, model: synced_subgroup) } let(:upload_7) { create(:upload, model: synced_subgroup) }
let(:lfs_object) { create(:lfs_object) } let(:lfs_object) { create(:lfs_object) }
......
...@@ -8,14 +8,14 @@ describe LfsObject do ...@@ -8,14 +8,14 @@ describe LfsObject do
expect(subject.local_store?).to eq true expect(subject.local_store?).to eq true
end end
it 'returns true when file_store is equal to LfsObjectUploader::LOCAL_STORE' do it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
subject.file_store = LfsObjectUploader::LOCAL_STORE subject.file_store = LfsObjectUploader::Store::LOCAL
expect(subject.local_store?).to eq true expect(subject.local_store?).to eq true
end end
it 'returns false whe file_store is equal to LfsObjectUploader::REMOTE_STORE' do it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
subject.file_store = LfsObjectUploader::REMOTE_STORE subject.file_store = LfsObjectUploader::Store::REMOTE
expect(subject.local_store?).to eq false expect(subject.local_store?).to eq false
end end
......
require 'spec_helper' require 'spec_helper'
describe ObjectStorageUploadWorker do describe ObjectStorageUploadWorker do
let(:local) { ObjectStoreUploader::LOCAL_STORE } let(:local) { ObjectStorage::Store::LOCAL }
let(:remote) { ObjectStoreUploader::REMOTE_STORE } let(:remote) { ObjectStorage::Store::REMOTE }
def perform def perform
described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id) described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
......
...@@ -6,7 +6,7 @@ FactoryGirl.define do ...@@ -6,7 +6,7 @@ FactoryGirl.define do
file_type :archive file_type :archive
trait :remote_store do trait :remote_store do
file_store JobArtifactUploader::REMOTE_STORE file_store JobArtifactUploader::Store::REMOTE
end end
after :build do |artifact| after :build do |artifact|
......
...@@ -116,11 +116,11 @@ FactoryGirl.define do ...@@ -116,11 +116,11 @@ FactoryGirl.define do
end end
trait :with_attachment do trait :with_attachment do
attachment { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png") } attachment { fixture_file_upload(Rails.root.join( "spec/fixtures/dk.png"), "image/png") }
end end
trait :with_svg_attachment do trait :with_svg_attachment do
attachment { fixture_file_upload(Rails.root + "spec/fixtures/unsanitized.svg", "image/svg+xml") } attachment { fixture_file_upload(Rails.root.join("spec/fixtures/unsanitized.svg"), "image/svg+xml") }
end end
transient do transient do
......
FactoryGirl.define do FactoryGirl.define do
factory :upload do factory :upload do
model { build(:project) } model { build(:project) }
path { "uploads/-/system/project/avatar/avatar.jpg" }
size 100.kilobytes size 100.kilobytes
uploader "AvatarUploader" uploader "AvatarUploader"
store ObjectStorage::Store::LOCAL
trait :personal_snippet do # we should build a mount agnostic upload by default
transient do
mounted_as :avatar
secret SecureRandom.hex
end
# this needs to comply with RecordsUpload::Concern#upload_path
path { File.join("uploads/-/system", model.class.to_s.underscore, mounted_as.to_s, 'avatar.jpg') }
trait :personal_snippet_upload do
model { build(:personal_snippet) } model { build(:personal_snippet) }
path { File.join(secret, 'myfile.jpg') }
uploader "PersonalFileUploader" uploader "PersonalFileUploader"
end end
trait :issuable_upload do trait :issuable_upload do
path { "#{SecureRandom.hex}/myfile.jpg" } path { File.join(secret, 'myfile.jpg') }
uploader "FileUploader" uploader "FileUploader"
end end
trait :namespace_upload do trait :namespace_upload do
path { "#{SecureRandom.hex}/myfile.jpg" }
model { build(:group) } model { build(:group) }
path { File.join(secret, 'myfile.jpg') }
uploader "NamespaceFileUploader" uploader "NamespaceFileUploader"
end end
trait :attachment_upload do
transient do
mounted_as :attachment
end
model { build(:note) }
uploader "AttachmentUploader"
end
end end
end end
...@@ -11,7 +11,7 @@ describe Gitlab::Geo::FileTransfer do ...@@ -11,7 +11,7 @@ describe Gitlab::Geo::FileTransfer do
it 'sets an absolute path' do it 'sets an absolute path' do
expect(subject.file_type).to eq(:file) expect(subject.file_type).to eq(:file)
expect(subject.file_id).to eq(upload.id) expect(subject.file_id).to eq(upload.id)
expect(subject.filename).to eq(AvatarUploader.absolute_path(upload)) expect(subject.filename).to eq(upload.absolute_path)
expect(Pathname.new(subject.filename).absolute?).to be_truthy expect(Pathname.new(subject.filename).absolute?).to be_truthy
expect(subject.request_data).to eq({ id: upload.id, expect(subject.request_data).to eq({ id: upload.id,
type: 'User', type: 'User',
......
...@@ -285,7 +285,7 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql, :clean_gitlab_redis_shared ...@@ -285,7 +285,7 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql, :clean_gitlab_redis_shared
end end
it 'schedules a Geo::FileRemovalWorker' do it 'schedules a Geo::FileRemovalWorker' do
file_path = File.join(LfsObjectUploader.local_store_path, file_path = File.join(LfsObjectUploader.workhorse_upload_path,
lfs_object_deleted_event.file_path) lfs_object_deleted_event.file_path)
expect(::Geo::FileRemovalWorker).to receive(:perform_async) expect(::Geo::FileRemovalWorker).to receive(:perform_async)
......
...@@ -17,7 +17,7 @@ describe Gitlab::Gfm::UploadsRewriter do ...@@ -17,7 +17,7 @@ describe Gitlab::Gfm::UploadsRewriter do
end end
let(:text) do let(:text) do
"Text and #{image_uploader.to_markdown} and #{zip_uploader.to_markdown}" "Text and #{image_uploader.markdown_link} and #{zip_uploader.markdown_link}"
end end
describe '#rewrite' do describe '#rewrite' do
......
...@@ -4,7 +4,7 @@ describe Gitlab::ImportExport::UploadsRestorer do ...@@ -4,7 +4,7 @@ describe Gitlab::ImportExport::UploadsRestorer do
describe 'bundle a project Git repo' do describe 'bundle a project Git repo' do
let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" } let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) } let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
let(:uploads_path) { FileUploader.dynamic_path_segment(project) } let(:uploads_path) { FileUploader.model_path_segment(project) }
before do before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
......
...@@ -21,7 +21,7 @@ describe Upload do ...@@ -21,7 +21,7 @@ describe Upload do
path: __FILE__, path: __FILE__,
size: described_class::CHECKSUM_THRESHOLD + 1.kilobyte, size: described_class::CHECKSUM_THRESHOLD + 1.kilobyte,
model: build_stubbed(:user), model: build_stubbed(:user),
uploader: double('ExampleUploader') uploader: double('ExampleUploader'),
) )
expect(UploadChecksumWorker) expect(UploadChecksumWorker)
...@@ -35,7 +35,7 @@ describe Upload do ...@@ -35,7 +35,7 @@ describe Upload do
path: __FILE__, path: __FILE__,
size: described_class::CHECKSUM_THRESHOLD, size: described_class::CHECKSUM_THRESHOLD,
model: build_stubbed(:user), model: build_stubbed(:user),
uploader: double('ExampleUploader') uploader: double('ExampleUploader'),
) )
expect { upload.save } expect { upload.save }
...@@ -51,7 +51,7 @@ describe Upload do ...@@ -51,7 +51,7 @@ describe Upload do
size: File.size(__FILE__), size: File.size(__FILE__),
path: __FILE__, path: __FILE__,
model: build_stubbed(:user), model: build_stubbed(:user),
uploader: 'AvatarUploader' uploader: 'AvatarUploader',
) )
expect { described_class.remove_path(__FILE__) } expect { described_class.remove_path(__FILE__) }
...@@ -63,26 +63,20 @@ describe Upload do ...@@ -63,26 +63,20 @@ describe Upload do
let(:fake_uploader) do let(:fake_uploader) do
double( double(
file: double(size: 12_345), file: double(size: 12_345),
relative_path: 'foo/bar.jpg', upload_path: 'foo/bar.jpg',
model: build_stubbed(:user), model: build_stubbed(:user),
class: 'AvatarUploader' class: 'AvatarUploader',
upload: nil
) )
end end
it 'removes existing paths before creation' do
expect(described_class).to receive(:remove_path)
.with(fake_uploader.relative_path)
described_class.record(fake_uploader)
end
it 'creates a new record and assigns size, path, model, and uploader' do it 'creates a new record and assigns size, path, model, and uploader' do
upload = described_class.record(fake_uploader) upload = described_class.record(fake_uploader)
aggregate_failures do aggregate_failures do
expect(upload).to be_persisted expect(upload).to be_persisted
expect(upload.size).to eq fake_uploader.file.size expect(upload.size).to eq fake_uploader.file.size
expect(upload.path).to eq fake_uploader.relative_path expect(upload.path).to eq fake_uploader.upload_path
expect(upload.model_id).to eq fake_uploader.model.id expect(upload.model_id).to eq fake_uploader.model.id
expect(upload.model_type).to eq fake_uploader.model.class.to_s expect(upload.model_type).to eq fake_uploader.model.class.to_s
expect(upload.uploader).to eq fake_uploader.class expect(upload.uploader).to eq fake_uploader.class
...@@ -90,18 +84,6 @@ describe Upload do ...@@ -90,18 +84,6 @@ describe Upload do
end end
end end
describe '.hexdigest' do
it 'calculates the SHA256 sum' do
expected = Digest::SHA256.file(__FILE__).hexdigest
expect(described_class.hexdigest(__FILE__)).to eq expected
end
it 'returns nil for a non-existant file' do
expect(described_class.hexdigest("#{__FILE__}-nope")).to be_nil
end
end
describe '#absolute_path' do describe '#absolute_path' do
it 'returns the path directly when already absolute' do it 'returns the path directly when already absolute' do
path = '/path/to/namespace/project/secret/file.jpg' path = '/path/to/namespace/project/secret/file.jpg'
...@@ -123,27 +105,27 @@ describe Upload do ...@@ -123,27 +105,27 @@ describe Upload do
end end
end end
describe '#calculate_checksum' do describe '#calculate_checksum!' do
it 'calculates the SHA256 sum' do let(:upload) do
upload = described_class.new( described_class.new(path: __FILE__,
path: __FILE__, size: described_class::CHECKSUM_THRESHOLD - 1.megabyte)
size: described_class::CHECKSUM_THRESHOLD - 1.megabyte end
)
it 'sets `checksum` to SHA256 sum of the file' do
expected = Digest::SHA256.file(__FILE__).hexdigest expected = Digest::SHA256.file(__FILE__).hexdigest
expect { upload.calculate_checksum } expect { upload.calculate_checksum! }
.to change { upload.checksum }.from(nil).to(expected) .to change { upload.checksum }.from(nil).to(expected)
end end
it 'returns nil for a non-existant file' do it 'sets `checksum` to nil for a non-existant file' do
upload = described_class.new(
path: __FILE__,
size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
)
expect(upload).to receive(:exist?).and_return(false) expect(upload).to receive(:exist?).and_return(false)
expect(upload.calculate_checksum).to be_nil checksum = Digest::SHA256.file(__FILE__).hexdigest
upload.checksum = checksum
expect { upload.calculate_checksum! }
.to change { upload.checksum }.from(checksum).to(nil)
end end
end end
......
...@@ -1154,7 +1154,7 @@ describe API::Runner do ...@@ -1154,7 +1154,7 @@ describe API::Runner do
context 'when job has artifacts' do context 'when job has artifacts' do
let(:job) { create(:ci_build) } let(:job) { create(:ci_build) }
let(:store) { JobArtifactUploader::LOCAL_STORE } let(:store) { JobArtifactUploader::Store::LOCAL }
before do before do
create(:ci_job_artifact, :archive, file_store: store, job: job) create(:ci_job_artifact, :archive, file_store: store, job: job)
...@@ -1176,7 +1176,7 @@ describe API::Runner do ...@@ -1176,7 +1176,7 @@ describe API::Runner do
end end
context 'when artifacts are stored remotely' do context 'when artifacts are stored remotely' do
let(:store) { JobArtifactUploader::REMOTE_STORE } let(:store) { JobArtifactUploader::Store::REMOTE }
let!(:job) { create(:ci_build) } let!(:job) { create(:ci_build) }
it 'download artifacts' do it 'download artifacts' do
......
...@@ -245,7 +245,7 @@ describe 'Git LFS API and storage' do ...@@ -245,7 +245,7 @@ describe 'Git LFS API and storage' do
context 'when LFS uses object storage' do context 'when LFS uses object storage' do
let(:before_get) do let(:before_get) do
stub_lfs_object_storage stub_lfs_object_storage
lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE) lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end end
it 'responds with redirect' do it 'responds with redirect' do
......
...@@ -98,7 +98,7 @@ describe Geo::FileDownloadService do ...@@ -98,7 +98,7 @@ describe Geo::FileDownloadService do
end end
context 'with a snippet' do context 'with a snippet' do
let(:upload) { create(:upload, :personal_snippet) } let(:upload) { create(:upload, :personal_snippet_upload) }
subject(:execute!) { described_class.new(:personal_file, upload.id).execute } subject(:execute!) { described_class.new(:personal_file, upload.id).execute }
......
...@@ -9,7 +9,7 @@ describe Geo::FilesExpireService, :geo, :truncate do ...@@ -9,7 +9,7 @@ describe Geo::FilesExpireService, :geo, :truncate do
describe '#execute' do describe '#execute' do
let(:file_uploader) { build(:file_uploader, project: project) } let(:file_uploader) { build(:file_uploader, project: project) }
let!(:upload) { Upload.find_by(path: file_uploader.relative_path) } let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let!(:file_registry) { create(:geo_file_registry, file_id: upload.id) } let!(:file_registry) { create(:geo_file_registry, file_id: upload.id) }
before do before do
......
...@@ -6,7 +6,7 @@ describe Geo::HashedStorageAttachmentsMigrationService do ...@@ -6,7 +6,7 @@ describe Geo::HashedStorageAttachmentsMigrationService do
let(:legacy_storage) { Storage::LegacyProject.new(project) } let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) } let(:hashed_storage) { Storage::HashedProject.new(project) }
let!(:upload) { Upload.find_by(path: file_uploader.relative_path) } let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let(:file_uploader) { build(:file_uploader, project: project) } let(:file_uploader) { build(:file_uploader, project: project) }
let(:old_path) { File.join(base_path(legacy_storage), upload.path) } let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
let(:new_path) { File.join(base_path(hashed_storage), upload.path) } let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
......
...@@ -244,7 +244,7 @@ describe Issues::MoveService do ...@@ -244,7 +244,7 @@ describe Issues::MoveService do
context 'issue description with uploads' do context 'issue description with uploads' do
let(:uploader) { build(:file_uploader, project: old_project) } let(:uploader) { build(:file_uploader, project: old_project) }
let(:description) { "Text and #{uploader.to_markdown}" } let(:description) { "Text and #{uploader.markdown_link}" }
include_context 'issue move executed' include_context 'issue move executed'
......
...@@ -6,7 +6,7 @@ describe Projects::HashedStorage::MigrateAttachmentsService do ...@@ -6,7 +6,7 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
let(:legacy_storage) { Storage::LegacyProject.new(project) } let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) } let(:hashed_storage) { Storage::HashedProject.new(project) }
let!(:upload) { Upload.find_by(path: file_uploader.relative_path) } let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let(:file_uploader) { build(:file_uploader, project: project) } let(:file_uploader) { build(:file_uploader, project: project) }
let(:old_path) { File.join(base_path(legacy_storage), upload.path) } let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
let(:new_path) { File.join(base_path(hashed_storage), upload.path) } let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
......
...@@ -30,4 +30,11 @@ module StubConfiguration ...@@ -30,4 +30,11 @@ module StubConfiguration
remote_directory: 'lfs-objects', remote_directory: 'lfs-objects',
**params) **params)
end end
def stub_uploads_object_storage(uploader = described_class, **params)
stub_object_storage_uploader(config: Gitlab.config.uploads.object_store,
uploader: uploader,
remote_directory: 'uploads',
**params)
end
end end
...@@ -236,7 +236,7 @@ module TestEnv ...@@ -236,7 +236,7 @@ module TestEnv
end end
def artifacts_path def artifacts_path
Gitlab.config.artifacts.path Gitlab.config.artifacts.storage_path
end end
# When no cached assets exist, manually hit the root path to create them # When no cached assets exist, manually hit the root path to create them
......
...@@ -18,7 +18,7 @@ describe 'gitlab:artifacts namespace rake task' do ...@@ -18,7 +18,7 @@ describe 'gitlab:artifacts namespace rake task' do
let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) } let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
context 'when local storage is used' do context 'when local storage is used' do
let(:store) { ObjectStoreUploader::LOCAL_STORE } let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do context 'and job does not have file store defined' do
let(:object_storage_enabled) { true } let(:object_storage_enabled) { true }
...@@ -27,8 +27,8 @@ describe 'gitlab:artifacts namespace rake task' do ...@@ -27,8 +27,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do it "migrates file to remote storage" do
subject subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end end
end end
...@@ -38,8 +38,8 @@ describe 'gitlab:artifacts namespace rake task' do ...@@ -38,8 +38,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do it "migrates file to remote storage" do
subject subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end end
end end
...@@ -47,8 +47,8 @@ describe 'gitlab:artifacts namespace rake task' do ...@@ -47,8 +47,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "fails to migrate to remote storage" do it "fails to migrate to remote storage" do
subject subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::LOCAL_STORE) expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::LOCAL)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::LOCAL_STORE) expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::LOCAL)
end end
end end
end end
...@@ -56,13 +56,13 @@ describe 'gitlab:artifacts namespace rake task' do ...@@ -56,13 +56,13 @@ describe 'gitlab:artifacts namespace rake task' do
context 'when remote storage is used' do context 'when remote storage is used' do
let(:object_storage_enabled) { true } let(:object_storage_enabled) { true }
let(:store) { ObjectStoreUploader::REMOTE_STORE } let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do it "file stays on remote storage" do
subject subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end end
end end
end end
...@@ -72,7 +72,7 @@ describe 'gitlab:artifacts namespace rake task' do ...@@ -72,7 +72,7 @@ describe 'gitlab:artifacts namespace rake task' do
let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) } let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
context 'when local storage is used' do context 'when local storage is used' do
let(:store) { ObjectStoreUploader::LOCAL_STORE } let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do context 'and job does not have file store defined' do
let(:object_storage_enabled) { true } let(:object_storage_enabled) { true }
...@@ -81,7 +81,7 @@ describe 'gitlab:artifacts namespace rake task' do ...@@ -81,7 +81,7 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do it "migrates file to remote storage" do
subject subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end end
end end
...@@ -91,7 +91,7 @@ describe 'gitlab:artifacts namespace rake task' do ...@@ -91,7 +91,7 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do it "migrates file to remote storage" do
subject subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end end
end end
...@@ -99,19 +99,19 @@ describe 'gitlab:artifacts namespace rake task' do ...@@ -99,19 +99,19 @@ describe 'gitlab:artifacts namespace rake task' do
it "fails to migrate to remote storage" do it "fails to migrate to remote storage" do
subject subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::LOCAL_STORE) expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
end end
end end
end end
context 'when remote storage is used' do context 'when remote storage is used' do
let(:object_storage_enabled) { true } let(:object_storage_enabled) { true }
let(:store) { ObjectStoreUploader::REMOTE_STORE } let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do it "file stays on remote storage" do
subject subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE) expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end end
end end
end end
......
...@@ -6,8 +6,8 @@ describe 'gitlab:lfs namespace rake task' do ...@@ -6,8 +6,8 @@ describe 'gitlab:lfs namespace rake task' do
end end
describe 'migrate' do describe 'migrate' do
let(:local) { ObjectStoreUploader::LOCAL_STORE } let(:local) { ObjectStorage::Store::LOCAL }
let(:remote) { ObjectStoreUploader::REMOTE_STORE } let(:remote) { ObjectStorage::Store::REMOTE }
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) } let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
def lfs_migrate def lfs_migrate
......
require 'spec_helper' require 'spec_helper'
describe AttachmentUploader do describe AttachmentUploader do
let(:uploader) { described_class.new(build_stubbed(:user)) } let(:uploader) { described_class.new(build_stubbed(:user), :attachment) }
let(:upload) { create(:upload, :attachment_upload, model: uploader.model) }
describe "#store_dir" do subject { uploader }
it "stores in the system dir" do
expect(uploader.store_dir).to start_with("uploads/-/system/user")
end
it "uses the old path when using object storage" do it_behaves_like 'builds correct paths',
expect(described_class).to receive(:file_storage?).and_return(false) store_dir: %r[uploads/-/system/user/attachment/],
expect(uploader.store_dir).to start_with("uploads/user") upload_path: %r[uploads/-/system/user/attachment/],
end absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/attachment/]
end
describe '#move_to_cache' do describe '#move_to_cache' do
it 'is true' do it 'is true' do
...@@ -25,4 +22,17 @@ describe AttachmentUploader do ...@@ -25,4 +22,17 @@ describe AttachmentUploader do
expect(uploader.move_to_store).to eq(true) expect(uploader.move_to_store).to eq(true)
end end
end end
# EE-specific
context "object_store is REMOTE" do
before do
stub_uploads_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[user/attachment/],
upload_path: %r[user/attachment/]
end
end end
require 'spec_helper' require 'spec_helper'
describe AvatarUploader do describe AvatarUploader do
let(:uploader) { described_class.new(build_stubbed(:user)) } let(:model) { build_stubbed(:user) }
let(:uploader) { described_class.new(model, :avatar) }
let(:upload) { create(:upload, model: model) }
describe "#store_dir" do subject { uploader }
it "stores in the system dir" do
expect(uploader.store_dir).to start_with("uploads/-/system/user")
end
it "uses the old path when using object storage" do it_behaves_like 'builds correct paths',
expect(described_class).to receive(:file_storage?).and_return(false) store_dir: %r[uploads/-/system/user/avatar/],
expect(uploader.store_dir).to start_with("uploads/user") upload_path: %r[uploads/-/system/user/avatar/],
end absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/]
end
describe '#move_to_cache' do describe '#move_to_cache' do
it 'is false' do it 'is false' do
...@@ -25,4 +23,17 @@ describe AvatarUploader do ...@@ -25,4 +23,17 @@ describe AvatarUploader do
expect(uploader.move_to_store).to eq(false) expect(uploader.move_to_store).to eq(false)
end end
end end
# EE-specific
context "object_store is REMOTE" do
before do
stub_uploads_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[user/avatar/],
upload_path: %r[user/avatar/]
end
end end
...@@ -3,13 +3,13 @@ require 'spec_helper' ...@@ -3,13 +3,13 @@ require 'spec_helper'
describe FileMover do describe FileMover do
let(:filename) { 'banana_sample.gif' } let(:filename) { 'banana_sample.gif' }
let(:file) { fixture_file_upload(Rails.root.join('spec', 'fixtures', filename)) } let(:file) { fixture_file_upload(Rails.root.join('spec', 'fixtures', filename)) }
let(:temp_file_path) { File.join('uploads/-/system/temp', 'secret55', filename) }
let(:temp_description) do let(:temp_description) do
'test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) same ![banana_sample]'\ "test ![banana_sample](/#{temp_file_path}) "\
'(/uploads/-/system/temp/secret55/banana_sample.gif)' "same ![banana_sample](/#{temp_file_path}) "
end end
let(:temp_file_path) { File.join('secret55', filename).to_s } let(:file_path) { File.join('uploads/-/system/personal_snippet', snippet.id.to_s, 'secret55', filename) }
let(:file_path) { File.join('uploads', '-', 'system', 'personal_snippet', snippet.id.to_s, 'secret55', filename).to_s }
let(:snippet) { create(:personal_snippet, description: temp_description) } let(:snippet) { create(:personal_snippet, description: temp_description) }
subject { described_class.new(file_path, snippet).execute } subject { described_class.new(file_path, snippet).execute }
...@@ -24,12 +24,13 @@ describe FileMover do ...@@ -24,12 +24,13 @@ describe FileMover do
context 'when move and field update successful' do context 'when move and field update successful' do
it 'updates the description correctly' do it 'updates the description correctly' do
binding.pry
subject subject
expect(snippet.reload.description) expect(snippet.reload.description)
.to eq( .to eq(
"test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"\ "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "\
" same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)" "same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "
) )
end end
...@@ -50,8 +51,8 @@ describe FileMover do ...@@ -50,8 +51,8 @@ describe FileMover do
expect(snippet.reload.description) expect(snippet.reload.description)
.to eq( .to eq(
"test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"\ "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "\
" same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)" "same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "
) )
end end
......
require 'spec_helper' require 'spec_helper'
describe FileUploader do describe FileUploader do
let(:uploader) { described_class.new(build_stubbed(:project)) } let(:group) { create(:group, name: 'awesome') }
let(:project) { build_stubbed(:project, namespace: group, name: 'project') }
let(:uploader) { described_class.new(project) }
let(:upload) { double(model: project, path: 'secret/foo.jpg') }
context 'legacy storage' do subject { uploader }
let(:project) { build_stubbed(:project) }
describe '.absolute_path' do
it 'returns the correct absolute path by building it dynamically' do
upload = double(model: project, path: 'secret/foo.jpg')
dynamic_segment = project.full_path
expect(described_class.absolute_path(upload)) shared_examples 'builds correct legacy storage paths' do
.to end_with("#{dynamic_segment}/secret/foo.jpg") include_examples 'builds correct paths',
end store_dir: %r{awesome/project/\h+},
end absolute_path: %r{#{CarrierWave.root}/awesome/project/secret/foo.jpg}
describe "#store_dir" do
it "stores in the namespace path" do
uploader = described_class.new(project)
expect(uploader.store_dir).to include(project.full_path)
expect(uploader.store_dir).not_to include("system")
end
end
end end
context 'hashed storage' do shared_examples 'uses hashed storage' do
context 'when rolled out attachments' do context 'when rolled out attachments' do
let(:project) { build_stubbed(:project, :hashed) } before do
expect(project).to receive(:disk_path).and_return('ca/fe/fe/ed')
describe '.absolute_path' do
it 'returns the correct absolute path by building it dynamically' do
upload = double(model: project, path: 'secret/foo.jpg')
dynamic_segment = project.disk_path
expect(described_class.absolute_path(upload))
.to end_with("#{dynamic_segment}/secret/foo.jpg")
end
end end
describe "#store_dir" do let(:project) { build_stubbed(:project, :hashed, namespace: group, name: 'project') }
it "stores in the namespace path" do
uploader = described_class.new(project)
expect(uploader.store_dir).to include(project.disk_path) it_behaves_like 'builds correct paths',
expect(uploader.store_dir).not_to include("system") store_dir: %r{ca/fe/fe/ed/\h+},
end absolute_path: %r{#{CarrierWave.root}/ca/fe/fe/ed/secret/foo.jpg}
end
end end
context 'when only repositories are rolled out' do context 'when only repositories are rolled out' do
let(:project) { build_stubbed(:project, storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) } let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
describe '.absolute_path' do it_behaves_like 'builds correct legacy storage paths'
it 'returns the correct absolute path by building it dynamically' do end
upload = double(model: project, path: 'secret/foo.jpg') end
dynamic_segment = project.full_path context 'legacy storage' do
it_behaves_like 'builds correct legacy storage paths'
include_examples 'uses hashed storage'
end
expect(described_class.absolute_path(upload)) context 'object store is remote' do
.to end_with("#{dynamic_segment}/secret/foo.jpg") before do
end stub_uploads_object_storage
end end
describe "#store_dir" do include_context 'with storage', described_class::Store::REMOTE
it "stores in the namespace path" do
uploader = described_class.new(project)
expect(uploader.store_dir).to include(project.full_path) it_behaves_like 'builds correct legacy storage paths'
expect(uploader.store_dir).not_to include("system") include_examples 'uses hashed storage'
end
end
end
end end
describe 'initialize' do describe 'initialize' do
it 'generates a secret if none is provided' do let(:uploader) { described_class.new(double, 'secret') }
expect(SecureRandom).to receive(:hex).and_return('secret')
uploader = described_class.new(double)
expect(uploader.secret).to eq 'secret'
end
it 'accepts a secret parameter' do it 'accepts a secret parameter' do
expect(SecureRandom).not_to receive(:hex) expect(uploader).not_to receive(:generate_secret)
expect(uploader.secret).to eq('secret')
uploader = described_class.new(double, 'secret') end
end
expect(uploader.secret).to eq 'secret' describe '#secret' do
it 'generates a secret if none is provided' do
expect(uploader).to receive(:generate_secret).and_return('secret')
expect(uploader.secret).to eq('secret')
end end
end end
...@@ -106,13 +77,4 @@ describe FileUploader do ...@@ -106,13 +77,4 @@ describe FileUploader do
expect(uploader.move_to_store).to eq(true) expect(uploader.move_to_store).to eq(true)
end end
end end
describe '#relative_path' do
it 'removes the leading dynamic path segment' do
fixture = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
uploader.store!(fixture_file_upload(fixture))
expect(uploader.relative_path).to match(/\A\h{32}\/rails_sample.jpg\z/)
end
end
end end
require 'spec_helper' require 'spec_helper'
describe JobArtifactUploader do describe JobArtifactUploader do
let(:store) { described_class::LOCAL_STORE } let(:store) { described_class::Store::LOCAL }
let(:job_artifact) { create(:ci_job_artifact, file_store: store) } let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) } let(:uploader) { described_class.new(job_artifact, :file) }
let(:local_path) { Gitlab.config.artifacts.path }
describe '#store_dir' do subject { uploader }
subject { uploader.store_dir }
let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" } it_behaves_like "builds correct paths",
base_dir: %r[artifacts],
store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z],
cache_dir: %r[artifacts/tmp/cache],
work_dir: %r[artifacts/tmp/work]
context 'when using local storage' do context "object store is REMOTE" do
it { is_expected.to start_with(local_path) } before do
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) } stub_artifacts_object_storage
it { is_expected.to end_with(path) }
end
context 'when using remote storage' do
let(:store) { described_class::REMOTE_STORE }
before do
stub_artifacts_object_storage
end
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
it { is_expected.to end_with(path) }
end end
end
describe '#cache_dir' do
subject { uploader.cache_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/cache') }
end
describe '#work_dir' do include_context 'with storage', described_class::Store::REMOTE
subject { uploader.work_dir }
it { is_expected.to start_with(local_path) } it_behaves_like "builds correct paths",
it { is_expected.to end_with('/tmp/work') } store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
end end
context 'file is stored in valid local_path' do context 'file is stored in valid local_path' do
...@@ -55,7 +36,7 @@ describe JobArtifactUploader do ...@@ -55,7 +36,7 @@ describe JobArtifactUploader do
subject { uploader.file.path } subject { uploader.file.path }
it { is_expected.to start_with(local_path) } it { is_expected.to start_with("#{uploader.root}/artifacts") }
it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") } it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
it { is_expected.to include("/#{job_artifact.project_id}/") } it { is_expected.to include("/#{job_artifact.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") } it { is_expected.to end_with("ci_build_artifacts.zip") }
......
require 'rails_helper' require 'rails_helper'
describe LegacyArtifactUploader do describe LegacyArtifactUploader do
let(:store) { described_class::LOCAL_STORE } let(:store) { described_class::Store::LOCAL }
let(:job) { create(:ci_build, artifacts_file_store: store) } let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) } let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:local_path) { Gitlab.config.artifacts.path } let(:local_path) { described_class.root }
describe '.local_store_path' do subject { uploader }
subject { described_class.local_store_path }
it "delegate to artifacts path" do
expect(Gitlab.config.artifacts).to receive(:path)
subject
end
end
# TODO: move to Workhorse::UploadPath
describe '.artifacts_upload_path' do describe '.artifacts_upload_path' do
subject { described_class.artifacts_upload_path } subject { described_class.workhorse_upload_path }
it { is_expected.to start_with(local_path) } it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/uploads/') } it { is_expected.to end_with('tmp/uploads/') }
end end
describe '#store_dir' do it_behaves_like "builds correct paths",
subject { uploader.store_dir } base_dir: %r[artifacts],
store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z],
let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" } cache_dir: %r[artifacts/tmp/cache],
work_dir: %r[artifacts/tmp/work]
context 'when using local storage' do
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with(path) }
end
context 'when using remote storage' do
let(:store) { described_class::REMOTE_STORE }
before do
stub_artifacts_object_storage
end
it { is_expected.to eq(path) } context 'object store is remote' do
before do
stub_artifacts_object_storage
end end
end
describe '#cache_dir' do
subject { uploader.cache_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/cache') }
end
describe '#work_dir' do include_context 'with storage', described_class::Store::REMOTE
subject { uploader.work_dir }
it { is_expected.to start_with(local_path) } it_behaves_like "builds correct paths",
it { is_expected.to end_with('/tmp/work') } store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z]
end end
describe '#filename' do describe '#filename' do
...@@ -80,7 +55,7 @@ describe LegacyArtifactUploader do ...@@ -80,7 +55,7 @@ describe LegacyArtifactUploader do
subject { uploader.file.path } subject { uploader.file.path }
it { is_expected.to start_with(local_path) } it { is_expected.to start_with("#{uploader.root}/artifacts") }
it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") } it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
it { is_expected.to include("/#{job.project_id}/") } it { is_expected.to include("/#{job.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") } it { is_expected.to end_with("ci_build_artifacts.zip") }
......
...@@ -73,7 +73,7 @@ describe LfsObjectUploader do ...@@ -73,7 +73,7 @@ describe LfsObjectUploader do
end end
describe 'remote file' do describe 'remote file' do
let(:remote) { described_class::REMOTE_STORE } let(:remote) { described_class::Store::REMOTE }
let(:lfs_object) { create(:lfs_object, file_store: remote) } let(:lfs_object) { create(:lfs_object, file_store: remote) }
context 'with object storage enabled' do context 'with object storage enabled' do
......
require 'spec_helper' require 'spec_helper'
IDENTIFIER = %r{\h+/\S+}
describe NamespaceFileUploader do describe NamespaceFileUploader do
let(:group) { build_stubbed(:group) } let(:group) { build_stubbed(:group) }
let(:uploader) { described_class.new(group) } let(:uploader) { described_class.new(group) }
let(:upload) { create(:upload, :namespace_upload, model: group) }
describe "#store_dir" do subject { uploader }
it "stores in the namespace id directory" do
expect(uploader.store_dir).to include(group.id.to_s)
end
end
describe ".absolute_path" do it_behaves_like 'builds correct paths',
it "stores in thecorrect directory" do store_dir: %r[uploads/-/system/namespace/\d+],
upload_record = create(:upload, :namespace_upload, model: group) upload_path: IDENTIFIER,
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}]
expect(described_class.absolute_path(upload_record)) # EE-specific
.to include("-/system/namespace/#{group.id}") context "object_store is REMOTE" do
before do
stub_uploads_object_storage
end end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[namespace/\d+/\h+],
upload_path: IDENTIFIER
end end
end end
require 'rails_helper' require 'rails_helper'
require 'carrierwave/storage/fog' require 'carrierwave/storage/fog'
describe ObjectStoreUploader do class Implementation < GitlabUploader
let(:uploader_class) { Class.new(described_class) } include ObjectStorage::Concern
let(:object) { double } include ::RecordsUploads::Concern
prepend ::ObjectStorage::Extension::RecordsUploads
storage_options Gitlab.config.uploads
private
# user/:id
def dynamic_segment
File.join(model.class.to_s.underscore, model.id.to_s)
end
end
describe ObjectStorage do
let(:uploader_class) { Implementation }
let(:object) { build_stubbed(:user) }
let(:uploader) { uploader_class.new(object, :file) } let(:uploader) { uploader_class.new(object, :file) }
before do before do
allow(object.class).to receive(:uploader_option).with(:file, :mount_on) { nil } allow(uploader_class).to receive(:object_store_enabled?).and_return(true)
end end
describe '#object_store' do describe '#object_store=' do
it "calls artifacts_file_store on object" do it "reload the local storage" do
expect(object).to receive(:file_store) uploader.object_store = described_class::Store::LOCAL
expect(uploader.file_storage?).to be_truthy
end
uploader.object_store it "reload the REMOTE storage" do
uploader.object_store = described_class::Store::REMOTE
expect(uploader.file_storage?).to be_falsey
end end
end
context 'when store is null' do context 'object_store is Store::LOCAL' do
before do before do
expect(object).to receive(:file_store).twice.and_return(nil) uploader.object_store = described_class::Store::LOCAL
end end
it "returns LOCAL_STORE" do describe '#store_dir' do
expect(uploader.real_object_store).to be_nil it 'is the composition of (base_dir, dynamic_segment)' do
expect(uploader.object_store).to eq(described_class::LOCAL_STORE) expect(uploader.store_dir).to start_with("uploads/-/system/user/")
end end
end end
end
context 'when value is set' do context 'object_store is Store::REMOTE' do
before do before do
expect(object).to receive(:file_store).twice.and_return(described_class::REMOTE_STORE) uploader.object_store = described_class::Store::REMOTE
end end
it "returns given value" do describe '#store_dir' do
expect(uploader.real_object_store).not_to be_nil it 'is the composition of (dynamic_segment)' do
expect(uploader.object_store).to eq(described_class::REMOTE_STORE) expect(uploader.store_dir).to start_with("user/")
end end
end end
end end
describe '#object_store=' do describe '#object_store' do
it "calls artifacts_file_store= on object" do it "delegates to <mount>_store on model" do
expect(object).to receive(:file_store=).with(described_class::REMOTE_STORE) expect(object).to receive(:file_store)
uploader.object_store = described_class::REMOTE_STORE uploader.object_store
end end
end
describe '#file_storage?' do context 'when store is null' do
context 'when file storage is used' do
before do before do
expect(object).to receive(:file_store).and_return(described_class::LOCAL_STORE) expect(object).to receive(:file_store).and_return(nil)
end end
it { expect(uploader).to be_file_storage } it "returns Store::LOCAL" do
expect(uploader.object_store).to eq(described_class::Store::LOCAL)
end
end end
context 'when is remote storage' do context 'when value is set' do
before do before do
uploader_class.storage_options double( expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE)
object_store: double(enabled: true))
expect(object).to receive(:file_store).and_return(described_class::REMOTE_STORE)
end end
it { expect(uploader).not_to be_file_storage } it "returns given value" do
expect(uploader.object_store).to eq(described_class::Store::REMOTE)
end
end end
end end
...@@ -86,152 +107,185 @@ describe ObjectStoreUploader do ...@@ -86,152 +107,185 @@ describe ObjectStoreUploader do
end end
end end
context 'when using JobArtifactsUploader' do # this means the model shall include
let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) } # include RecordsUpload::Concern
let(:uploader) { artifact.file } # prepend ObjectStorage::Extension::RecordsUploads
# the object_store persistence is delegated to the `Upload` model
context 'checking described_class' do # this also implies a <mounted_as>_uploader method can be implemented to
let(:store) { described_class::LOCAL_STORE } # correctly fetch the upload.
#
it "uploader is of a described_class" do context 'when persist_object_store? is false' do
expect(uploader).to be_a(described_class) let(:object) { create(:project, :with_avatar) }
let(:uploader_class) { AvatarUploader }
let(:uploader) { uploader_class.new(object, :avatar) }
# let(:upload) { create(:upload, model: project) }
it { expect(object).to be_a(Avatarable) }
it { expect(uploader.persist_object_store?).to be_falsey }
describe 'delegates the object_store logic to the `Upload` model' do
it 'call the <mounted_as>_uploader hook' do
expect(object).to receive(:avatar_uploader)
expect(uploader).to be
end end
it 'moves files locally' do it 'sets @upload to the found `upload`' do
expect(uploader.move_to_store).to be(true) expect(uploader.upload).to eq(uploader.upload)
expect(uploader.move_to_cache).to be(true)
end end
end
context 'when store is null' do it 'sets @object_store to the `Upload` value' do
let(:store) { nil } expect(uploader.object_store).to eq(uploader.upload.store)
it "sets the store to LOCAL_STORE" do
expect(artifact.file_store).to eq(described_class::LOCAL_STORE)
end end
end end
end
describe '#use_file' do # TODO: persist_object_store? is true
context 'when file is stored locally' do # this means the model holds an <mounted_as>_store attribute directly
let(:store) { described_class::LOCAL_STORE } # and do not delegate the object_store persistence to the `Upload` model.
#
it "calls a regular path" do context 'persist_object_store? is true' do
expect { |b| uploader.use_file(&b) }.not_to yield_with_args(/tmp\/cache/) context 'when using JobArtifactsUploader' do
end let(:store) { described_class::Store::LOCAL }
end let(:object) { create(:ci_job_artifact, :archive, file_store: store) }
let(:uploader) { object.file }
context 'when file is stored remotely' do
let(:store) { described_class::REMOTE_STORE } context 'checking described_class' do
it "uploader include described_class::Concern" do
before do expect(uploader).to be_a(described_class::Concern)
stub_artifacts_object_storage
end end
it "calls a cache path" do it 'moves files locally' do
expect { |b| uploader.use_file(&b) }.to yield_with_args(/tmp\/cache/) expect(uploader.move_to_store).to be(true)
expect(uploader.move_to_cache).to be(true)
end end
end end
end
describe '#migrate!' do
let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
let(:uploader) { artifact.file }
let(:store) { described_class::LOCAL_STORE }
subject { uploader.migrate!(new_store) } describe '#use_file' do
context 'when file is stored locally' do
it "calls a regular path" do
expect { |b| uploader.use_file(&b) }.not_to yield_with_args(/tmp\/cache/)
end
end
context 'when using the same storage' do context 'when file is stored remotely' do
let(:new_store) { store } let(:store) { described_class::Store::REMOTE }
it "to not migrate the storage" do before do
subject stub_artifacts_object_storage
end
expect(uploader.object_store).to eq(store) it "calls a cache path" do
expect { |b| uploader.use_file(&b) }.to yield_with_args(/tmp\/cache/)
end
end end
end end
context 'when migrating to local storage' do describe '#migrate!' do
let(:store) { described_class::REMOTE_STORE } subject { uploader.migrate!(new_store) }
let(:new_store) { described_class::LOCAL_STORE }
before do shared_examples "updates the underlying <mounted>_store" do
stub_artifacts_object_storage it do
end subject
it "local file does not exist" do expect(object.file_store).to eq(new_store)
expect(File.exist?(uploader.path)).to eq(false) end
end end
it "does migrate the file" do context 'when using the same storage' do
subject let(:new_store) { store }
expect(uploader.object_store).to eq(new_store)
expect(File.exist?(uploader.path)).to eq(true)
end
end
context 'when migrating to remote storage' do it "to not migrate the storage" do
let(:new_store) { described_class::REMOTE_STORE } subject
let!(:current_path) { uploader.path }
it "file does exist" do expect(uploader).not_to receive(:store!)
expect(File.exist?(current_path)).to eq(true) expect(uploader.object_store).to eq(store)
end
end end
context 'when storage is disabled' do context 'when migrating to local storage' do
let(:store) { described_class::Store::REMOTE }
let(:new_store) { described_class::Store::LOCAL }
before do before do
stub_artifacts_object_storage(enabled: false) stub_artifacts_object_storage
end end
it "to raise an error" do include_examples "updates the underlying <mounted>_store"
expect { subject }.to raise_error(/Object Storage is not enabled/)
it "local file does not exist" do
expect(File.exist?(uploader.path)).to eq(false)
end end
end
context 'when storage is unlicensed' do it "remote file exist" do
before do expect(uploader.file.exists?).to be_truthy
stub_artifacts_object_storage(licensed: false)
end end
it "raises an error" do it "does migrate the file" do
expect { subject }.to raise_error(/Object Storage feature is missing/) subject
expect(uploader.object_store).to eq(new_store)
expect(File.exist?(uploader.path)).to eq(true)
end end
end end
context 'when credentials are set' do context 'when migrating to remote storage' do
before do let(:new_store) { described_class::Store::REMOTE }
stub_artifacts_object_storage let!(:current_path) { uploader.path }
it "file does exist" do
expect(File.exist?(current_path)).to eq(true)
end end
it "does migrate the file" do context 'when storage is disabled' do
subject before do
stub_artifacts_object_storage(enabled: false)
end
expect(uploader.object_store).to eq(new_store) it "to raise an error" do
expect(File.exist?(current_path)).to eq(false) expect { subject }.to raise_error(/Object Storage is not enabled/)
end
end end
it "does delete original file" do context 'when storage is unlicensed' do
subject before do
stub_artifacts_object_storage(licensed: false)
end
expect(File.exist?(current_path)).to eq(false) it "raises an error" do
expect { subject }.to raise_error(/Object Storage feature is missing/)
end
end end
context 'when subject save fails' do context 'when credentials are set' do
before do before do
expect(artifact).to receive(:save!).and_raise(RuntimeError, "exception") stub_artifacts_object_storage
end end
it "does catch an error" do include_examples "updates the underlying <mounted>_store"
expect { subject }.to raise_error(/exception/)
it "does migrate the file" do
subject
expect(uploader.object_store).to eq(new_store)
end end
it "original file is not removed" do it "does delete original file" do
begin subject
subject
rescue expect(File.exist?(current_path)).to eq(false)
end
context 'when subject save fails' do
before do
expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception")
end end
expect(File.exist?(current_path)).to eq(true) it "original file is not removed" do
expect { subject }.to raise_error(/exception/)
expect(File.exist?(current_path)).to eq(true)
end
end end
end end
end end
...@@ -243,8 +297,7 @@ describe ObjectStoreUploader do ...@@ -243,8 +297,7 @@ describe ObjectStoreUploader do
let(:remote_directory) { 'directory' } let(:remote_directory) { 'directory' }
before do before do
uploader_class.storage_options double( uploader_class.storage_options double(object_store: double(remote_directory: remote_directory))
object_store: double(remote_directory: remote_directory))
end end
subject { uploader.fog_directory } subject { uploader.fog_directory }
...@@ -256,8 +309,7 @@ describe ObjectStoreUploader do ...@@ -256,8 +309,7 @@ describe ObjectStoreUploader do
let(:connection) { Settingslogic.new("provider" => "AWS") } let(:connection) { Settingslogic.new("provider" => "AWS") }
before do before do
uploader_class.storage_options double( uploader_class.storage_options double(object_store: double(connection: connection))
object_store: double(connection: connection))
end end
subject { uploader.fog_credentials } subject { uploader.fog_credentials }
...@@ -276,7 +328,7 @@ describe ObjectStoreUploader do ...@@ -276,7 +328,7 @@ describe ObjectStoreUploader do
context 'when using local storage' do context 'when using local storage' do
before do before do
expect(object).to receive(:file_store) { described_class::LOCAL_STORE } expect(object).to receive(:file_store) { described_class::Store::LOCAL }
end end
it "does not raise an error" do it "does not raise an error" do
...@@ -286,14 +338,13 @@ describe ObjectStoreUploader do ...@@ -286,14 +338,13 @@ describe ObjectStoreUploader do
context 'when using remote storage' do context 'when using remote storage' do
before do before do
uploader_class.storage_options double( uploader_class.storage_options double(object_store: double(enabled: true))
object_store: double(enabled: true)) expect(object).to receive(:file_store) { described_class::Store::REMOTE }
expect(object).to receive(:file_store) { described_class::REMOTE_STORE }
end end
context 'feature is not available' do context 'feature is not available' do
before do before do
expect(License).to receive(:feature_available?).with(:object_storage) { false } expect(License).to receive(:feature_available?).with(:object_storage).and_return(false)
end end
it "does raise an error" do it "does raise an error" do
...@@ -303,7 +354,7 @@ describe ObjectStoreUploader do ...@@ -303,7 +354,7 @@ describe ObjectStoreUploader do
context 'feature is available' do context 'feature is available' do
before do before do
expect(License).to receive(:feature_available?).with(:object_storage) { true } expect(License).to receive(:feature_available?).with(:object_storage).and_return(true)
end end
it "does not raise an error" do it "does not raise an error" do
......
require 'spec_helper' require 'spec_helper'
IDENTIFIER = %r{\h+/\S+}
describe PersonalFileUploader do describe PersonalFileUploader do
let(:uploader) { described_class.new(build_stubbed(:project)) } let(:model) { create(:personal_snippet) }
let(:snippet) { create(:personal_snippet) } let(:uploader) { described_class.new(model) }
let(:upload) { create(:upload, :personal_snippet_upload) }
describe '.absolute_path' do subject { uploader }
it 'returns the correct absolute path by building it dynamically' do
upload = double(model: snippet, path: 'secret/foo.jpg')
dynamic_segment = "personal_snippet/#{snippet.id}" it_behaves_like 'builds correct paths',
store_dir: %r[uploads/-/system/personal_snippet/\d+],
upload_path: IDENTIFIER,
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}]
expect(described_class.absolute_path(upload)).to end_with("/-/system/#{dynamic_segment}/secret/foo.jpg") # EE-specific
context "object_store is REMOTE" do
before do
stub_uploads_object_storage
end end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[\d+/\h+],
upload_path: IDENTIFIER
end end
describe '#to_h' do describe '#to_h' do
it 'returns the hass' do before do
uploader = described_class.new(snippet, 'secret') subject.instance_variable_set(:@secret, 'secret')
end
it 'is correct' do
allow(uploader).to receive(:file).and_return(double(extension: 'txt', filename: 'file_name')) allow(uploader).to receive(:file).and_return(double(extension: 'txt', filename: 'file_name'))
expected_url = "/uploads/-/system/personal_snippet/#{snippet.id}/secret/file_name" expected_url = "/uploads/-/system/personal_snippet/#{model.id}/secret/file_name"
expect(uploader.to_h).to eq( expect(uploader.to_h).to eq(
alt: 'file_name', alt: 'file_name',
......
...@@ -3,7 +3,7 @@ require 'rails_helper' ...@@ -3,7 +3,7 @@ require 'rails_helper'
describe RecordsUploads do describe RecordsUploads do
let!(:uploader) do let!(:uploader) do
class RecordsUploadsExampleUploader < GitlabUploader class RecordsUploadsExampleUploader < GitlabUploader
include RecordsUploads include RecordsUploads::Concern
storage :file storage :file
...@@ -20,29 +20,27 @@ describe RecordsUploads do ...@@ -20,29 +20,27 @@ describe RecordsUploads do
end end
describe 'callbacks' do describe 'callbacks' do
it 'calls `record_upload` after `store`' do it '#record_upload after `store`' do
expect(uploader).to receive(:record_upload).once expect(uploader).to receive(:record_upload).once
uploader.store!(upload_fixture('doc_sample.txt')) uploader.store!(upload_fixture('doc_sample.txt'))
end end
it 'calls `destroy_upload` after `remove`' do it '#destroy_upload before `store`' do
expect(uploader).to receive(:destroy_upload).once expect(uploader).to receive(:destroy_upload).once
uploader.store!(upload_fixture('doc_sample.txt')) uploader.store!(upload_fixture('doc_sample.txt'))
end
it '#destroy_upload after `remove`' do
uploader.store!(upload_fixture('doc_sample.txt'))
expect(uploader).to receive(:destroy_upload).once
uploader.remove! uploader.remove!
end end
end end
describe '#record_upload callback' do describe '#record_upload callback' do
it 'returns early when not using file storage' do
allow(uploader).to receive(:file_storage?).and_return(false)
expect(Upload).not_to receive(:record)
uploader.store!(upload_fixture('rails_sample.jpg'))
end
it "returns early when the file doesn't exist" do it "returns early when the file doesn't exist" do
allow(uploader).to receive(:file).and_return(double(exists?: false)) allow(uploader).to receive(:file).and_return(double(exists?: false))
expect(Upload).not_to receive(:record) expect(Upload).not_to receive(:record)
...@@ -75,20 +73,11 @@ describe RecordsUploads do ...@@ -75,20 +73,11 @@ describe RecordsUploads do
uploader.store!(upload_fixture('rails_sample.jpg')) uploader.store!(upload_fixture('rails_sample.jpg'))
expect { existing.reload }.to raise_error(ActiveRecord::RecordNotFound) expect { existing.reload }.to raise_error(ActiveRecord::RecordNotFound)
expect(Upload.count).to eq 1 expect(Upload.count).to eq(1)
end end
end end
describe '#destroy_upload callback' do describe '#destroy_upload callback' do
it 'returns early when not using file storage' do
uploader.store!(upload_fixture('rails_sample.jpg'))
allow(uploader).to receive(:file_storage?).and_return(false)
expect(Upload).not_to receive(:remove_path)
uploader.remove!
end
it 'returns early when file is nil' do it 'returns early when file is nil' do
expect(Upload).not_to receive(:remove_path) expect(Upload).not_to receive(:remove_path)
......
...@@ -54,7 +54,7 @@ describe Geo::FileDownloadDispatchWorker, :geo, :truncate do ...@@ -54,7 +54,7 @@ describe Geo::FileDownloadDispatchWorker, :geo, :truncate do
before do before do
stub_lfs_object_storage stub_lfs_object_storage
lfs_object_remote_store.file.migrate!(LfsObjectUploader::REMOTE_STORE) lfs_object_remote_store.file.migrate!(LfsObjectUploader::Store::REMOTE)
end end
it 'filters S3-backed files' do it 'filters S3-backed files' do
...@@ -78,7 +78,7 @@ describe Geo::FileDownloadDispatchWorker, :geo, :truncate do ...@@ -78,7 +78,7 @@ describe Geo::FileDownloadDispatchWorker, :geo, :truncate do
create_list(:lfs_object, 2, :with_file) create_list(:lfs_object, 2, :with_file)
create_list(:user, 2, avatar: avatar) create_list(:user, 2, avatar: avatar)
create_list(:note, 2, :with_attachment) create_list(:note, 2, :with_attachment)
create_list(:upload, 2, :personal_snippet) create_list(:upload, 2, :personal_snippet_upload)
create(:appearance, logo: avatar, header_logo: avatar) create(:appearance, logo: avatar, header_logo: avatar)
expect(Geo::FileDownloadWorker).to receive(:perform_async).exactly(10).times.and_call_original expect(Geo::FileDownloadWorker).to receive(:perform_async).exactly(10).times.and_call_original
......
...@@ -2,18 +2,31 @@ require 'rails_helper' ...@@ -2,18 +2,31 @@ require 'rails_helper'
describe UploadChecksumWorker do describe UploadChecksumWorker do
describe '#perform' do describe '#perform' do
it 'rescues ActiveRecord::RecordNotFound' do subject { described_class.new }
expect { described_class.new.perform(999_999) }.not_to raise_error
context 'without a valid record' do
it 'rescues ActiveRecord::RecordNotFound' do
expect { subject.perform(999_999) }.not_to raise_error
end
end end
it 'calls calculate_checksum_without_delay and save!' do context 'with a valid record' do
upload = spy let(:upload) { create(:upload) }
expect(Upload).to receive(:find).with(999_999).and_return(upload)
before do
expect(Upload).to receive(:find).and_return(upload)
expect(upload).to receive(:foreground_checksum?).and_return(false)
end
described_class.new.perform(999_999) it 'calls calculate_checksum!' do
expect(upload).to receive(:calculate_checksum!)
subject.perform(upload.id)
end
expect(upload).to have_received(:calculate_checksum) it 'calls save!' do
expect(upload).to have_received(:save!) expect(upload).to receive(:save!)
subject.perform(upload.id)
end
end end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment