Commit 66d28737 authored by Ian Baum's avatar Ian Baum

Fix conflicts with app/models/ci/job_artifact.rb and app/uploaders/file_uploader.rb

# Conflicts:
#   app/models/ci/job_artifact.rb
#   app/uploaders/file_uploader.rb
parents 4099ebae 8b200096
...@@ -6,6 +6,7 @@ class Projects::GitHttpController < Projects::GitHttpClientController ...@@ -6,6 +6,7 @@ class Projects::GitHttpController < Projects::GitHttpClientController
rescue_from Gitlab::GitAccess::UnauthorizedError, with: :render_403 rescue_from Gitlab::GitAccess::UnauthorizedError, with: :render_403
rescue_from Gitlab::GitAccess::NotFoundError, with: :render_404 rescue_from Gitlab::GitAccess::NotFoundError, with: :render_404
rescue_from Gitlab::GitAccess::ProjectCreationError, with: :render_422
# GET /foo/bar.git/info/refs?service=git-upload-pack (git pull) # GET /foo/bar.git/info/refs?service=git-upload-pack (git pull)
# GET /foo/bar.git/info/refs?service=git-receive-pack (git push) # GET /foo/bar.git/info/refs?service=git-receive-pack (git push)
...@@ -56,8 +57,15 @@ class Projects::GitHttpController < Projects::GitHttpClientController ...@@ -56,8 +57,15 @@ class Projects::GitHttpController < Projects::GitHttpClientController
render plain: exception.message, status: :not_found render plain: exception.message, status: :not_found
end end
def render_422(exception)
render plain: exception.message, status: :unprocessable_entity
end
def access def access
@access ||= access_klass.new(access_actor, project, 'http', authentication_abilities: authentication_abilities, redirected_path: redirected_path) @access ||= access_klass.new(access_actor, project,
'http', authentication_abilities: authentication_abilities,
namespace_path: params[:namespace_id], project_path: project_path,
redirected_path: redirected_path)
end end
def access_actor def access_actor
...@@ -69,12 +77,17 @@ class Projects::GitHttpController < Projects::GitHttpClientController ...@@ -69,12 +77,17 @@ class Projects::GitHttpController < Projects::GitHttpClientController
# Use the magic string '_any' to indicate we do not know what the # Use the magic string '_any' to indicate we do not know what the
# changes are. This is also what gitlab-shell does. # changes are. This is also what gitlab-shell does.
access.check(git_command, '_any') access.check(git_command, '_any')
@project ||= access.project
end end
def access_klass def access_klass
@access_klass ||= wiki? ? Gitlab::GitAccessWiki : Gitlab::GitAccess @access_klass ||= wiki? ? Gitlab::GitAccessWiki : Gitlab::GitAccess
end end
def project_path
@project_path ||= params[:project_id].sub(/\.git$/, '')
end
def log_user_activity def log_user_activity
Users::ActivityService.new(user, 'pull').execute Users::ActivityService.new(user, 'pull').execute
end end
......
class Appearance < ActiveRecord::Base class Appearance < ActiveRecord::Base
include CacheMarkdownField include CacheMarkdownField
include AfterCommitQueue
include ObjectStorage::BackgroundMove
cache_markdown_field :description cache_markdown_field :description
cache_markdown_field :new_project_guidelines cache_markdown_field :new_project_guidelines
......
...@@ -11,7 +11,7 @@ module Ci ...@@ -11,7 +11,7 @@ module Ci
mount_uploader :file, JobArtifactUploader mount_uploader :file, JobArtifactUploader
delegate :open, :exists?, to: :file delegate :exists?, :open, to: :file
enum file_type: { enum file_type: {
archive: 1, archive: 1,
......
...@@ -3,6 +3,7 @@ module Avatarable ...@@ -3,6 +3,7 @@ module Avatarable
included do included do
prepend ShadowMethods prepend ShadowMethods
include ObjectStorage::BackgroundMove
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? } validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i } validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
......
...@@ -7,16 +7,8 @@ class LfsObject < ActiveRecord::Base ...@@ -7,16 +7,8 @@ class LfsObject < ActiveRecord::Base
validates :oid, presence: true, uniqueness: true validates :oid, presence: true, uniqueness: true
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
mount_uploader :file, LfsObjectUploader mount_uploader :file, LfsObjectUploader
after_save if: :file_changed?, on: [:create, :update] do
run_after_commit do
file.schedule_migration_to_object_storage
end
end
def project_allowed_access?(project) def project_allowed_access?(project)
projects.exists?(project.lfs_storage_project.id) projects.exists?(project.lfs_storage_project.id)
end end
......
...@@ -36,8 +36,8 @@ class Upload < ActiveRecord::Base ...@@ -36,8 +36,8 @@ class Upload < ActiveRecord::Base
self.checksum = self.class.hexdigest(absolute_path) self.checksum = self.class.hexdigest(absolute_path)
end end
def build_uploader def build_uploader(mounted_as = nil)
uploader_class.new(model, mount_point, **uploader_context).tap do |uploader| uploader_class.new(model, mounted_as || mount_point).tap do |uploader|
uploader.upload = self uploader.upload = self
uploader.retrieve_from_store!(identifier) uploader.retrieve_from_store!(identifier)
end end
...@@ -54,6 +54,12 @@ class Upload < ActiveRecord::Base ...@@ -54,6 +54,12 @@ class Upload < ActiveRecord::Base
}.compact }.compact
end end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
private private
def delete_file! def delete_file!
...@@ -64,12 +70,6 @@ class Upload < ActiveRecord::Base ...@@ -64,12 +70,6 @@ class Upload < ActiveRecord::Base
checksum.nil? && local? && exist? checksum.nil? && local? && exist?
end end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
def foreground_checksummable? def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD checksummable? && size <= CHECKSUM_THRESHOLD
end end
......
...@@ -17,7 +17,9 @@ class FileUploader < GitlabUploader ...@@ -17,7 +17,9 @@ class FileUploader < GitlabUploader
after :remove, :prune_store_dir after :remove, :prune_store_dir
after :remove, :prune_store_dir # FileUploader do not run in a model transaction, so we can simply
# enqueue a job after the :store hook.
after :store, :schedule_background_upload
def self.root def self.root
File.join(options.storage_path, 'uploads') File.join(options.storage_path, 'uploads')
......
...@@ -121,6 +121,10 @@ ...@@ -121,6 +121,10 @@
- geo:geo_repositories_clean_up - geo:geo_repositories_clean_up
- geo:geo_repository_destroy - geo:geo_repository_destroy
- object_storage_upload
- object_storage:object_storage_background_move
- object_storage:object_storage_migrate_uploads
- admin_emails - admin_emails
- elastic_batch_project_indexer - elastic_batch_project_indexer
- elastic_commit_indexer - elastic_commit_indexer
...@@ -131,7 +135,6 @@ ...@@ -131,7 +135,6 @@
- geo_project_sync - geo_project_sync
- geo_repository_shard_sync - geo_repository_shard_sync
- ldap_group_sync - ldap_group_sync
- object_storage_upload
- project_update_repository_storage - project_update_repository_storage
- rebase - rebase
- repository_update_mirror - repository_update_mirror
......
---
title: Add object storage migration task for uploads.
merge_request: 4215
author:
type: added
---
title: User can now git push to create a new project
merge_request: 16547
author:
type: added
...@@ -85,3 +85,4 @@ ...@@ -85,3 +85,4 @@
- [elastic_commit_indexer, 1] - [elastic_commit_indexer, 1]
- [export_csv, 1] - [export_csv, 1]
- [object_storage_upload, 1] - [object_storage_upload, 1]
- [object_storage, 1]
...@@ -33,5 +33,40 @@ ...@@ -33,5 +33,40 @@
1. Click **Create project**. 1. Click **Create project**.
## Push to create a new project
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/issues/26388) in GitLab 10.5.
When you create a new repo locally, instead of going to GitLab to manually
create a new project and then push the repo, you can directly push it to
GitLab to create the new project, all without leaving your terminal. If you have access to that
namespace, we will automatically create a new project under that GitLab namespace with its
visibility set to private by default (you can later change it in the UI).
This can be done by using either SSH or HTTP:
```
## Git push using SSH
git push git@gitlab.example.com:namespace/nonexistent-project.git
## Git push using HTTP
git push https://gitlab.example.com/namespace/nonexistent-project.git
```
Once the push finishes successfully, a remote message will indicate
the command to set the remote and the URL to the new project:
```
remote:
remote: The private project namespace/nonexistent-project was created.
remote:
remote: To configure the remote, run:
remote: git remote add origin https://gitlab.example.com/namespace/nonexistent-project.git
remote:
remote: To view the project, visit:
remote: https://gitlab.example.com/namespace/nonexistent-project
remote:
```
[import it]: ../workflow/importing/README.md [import it]: ../workflow/importing/README.md
[reserved]: ../user/reserved_names.md [reserved]: ../user/reserved_names.md
...@@ -14,6 +14,8 @@ module EE ...@@ -14,6 +14,8 @@ module EE
DAST_FILE = 'gl-dast-report.json'.freeze DAST_FILE = 'gl-dast-report.json'.freeze
included do included do
include ObjectStorage::BackgroundMove
scope :codequality, -> { where(name: %w[codequality codeclimate]) } scope :codequality, -> { where(name: %w[codequality codeclimate]) }
scope :performance, -> { where(name: %w[performance deploy]) } scope :performance, -> { where(name: %w[performance deploy]) }
scope :sast, -> { where(name: 'sast') } scope :sast, -> { where(name: 'sast') }
......
...@@ -7,6 +7,8 @@ module EE ...@@ -7,6 +7,8 @@ module EE
extend ActiveSupport::Concern extend ActiveSupport::Concern
prepended do prepended do
include ObjectStorage::BackgroundMove
after_destroy :log_geo_event after_destroy :log_geo_event
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) } scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
......
...@@ -7,7 +7,11 @@ module EE ...@@ -7,7 +7,11 @@ module EE
extend ActiveSupport::Concern extend ActiveSupport::Concern
prepended do prepended do
include ObjectStorage::BackgroundMove
after_destroy :log_geo_event after_destroy :log_geo_event
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
end end
def local_store? def local_store?
......
...@@ -2,6 +2,10 @@ module EE ...@@ -2,6 +2,10 @@ module EE
module Note module Note
extend ActiveSupport::Concern extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundMove
end
def for_epic? def for_epic?
noteable.is_a?(Epic) noteable.is_a?(Epic)
end end
......
...@@ -8,7 +8,7 @@ require 'carrierwave/storage/fog' ...@@ -8,7 +8,7 @@ require 'carrierwave/storage/fog'
module ObjectStorage module ObjectStorage
RemoteStoreError = Class.new(StandardError) RemoteStoreError = Class.new(StandardError)
UnknownStoreError = Class.new(StandardError) UnknownStoreError = Class.new(StandardError)
ObjectStoreUnavailable = Class.new(StandardError) ObjectStorageUnavailable = Class.new(StandardError)
module Store module Store
LOCAL = 1 LOCAL = 1
...@@ -21,7 +21,7 @@ module ObjectStorage ...@@ -21,7 +21,7 @@ module ObjectStorage
extend ActiveSupport::Concern extend ActiveSupport::Concern
prepended do |base| prepended do |base|
raise ObjectStoreUnavailable, "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern raise "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern
base.include(::RecordsUploads::Concern) base.include(::RecordsUploads::Concern)
end end
...@@ -50,6 +50,15 @@ module ObjectStorage ...@@ -50,6 +50,15 @@ module ObjectStorage
super super
end end
def schedule_background_upload(*args)
return unless schedule_background_upload?
ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
upload.class.to_s,
mounted_as,
upload.id)
end
private private
def current_upload_satisfies?(paths, model) def current_upload_satisfies?(paths, model)
...@@ -63,6 +72,33 @@ module ObjectStorage ...@@ -63,6 +72,33 @@ module ObjectStorage
end end
end end
# Add support for automatic background uploading after the file is stored.
#
module BackgroundMove
extend ActiveSupport::Concern
def background_upload(mount_points = [])
return unless mount_points.any?
run_after_commit do
mount_points.each { |mount| send(mount).schedule_background_upload } # rubocop:disable GitlabSecurity/PublicSend
end
end
def changed_mounts
self.class.uploaders.select do |mount, uploader_class|
mounted_as = uploader_class.serialization_column(self.class, mount)
mount if send(:"#{mounted_as}_changed?") # rubocop:disable GitlabSecurity/PublicSend
end.keys
end
included do
after_save on: [:create, :update] do
background_upload(changed_mounts)
end
end
end
module Concern module Concern
extend ActiveSupport::Concern extend ActiveSupport::Concern
...@@ -97,6 +133,10 @@ module ObjectStorage ...@@ -97,6 +133,10 @@ module ObjectStorage
def licensed? def licensed?
License.feature_available?(:object_storage) License.feature_available?(:object_storage)
end end
def serialization_column(model_class, mount_point)
model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
end
end end
def file_storage? def file_storage?
...@@ -183,13 +223,13 @@ module ObjectStorage ...@@ -183,13 +223,13 @@ module ObjectStorage
raise e raise e
end end
def schedule_migration_to_object_storage(*args) def schedule_background_upload(*args)
return unless self.class.object_store_enabled? return unless schedule_background_upload?
return unless self.class.background_upload_enabled?
return unless self.class.licensed?
return unless self.file_storage?
ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id) ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
model.class.name,
mounted_as,
model.id)
end end
def fog_directory def fog_directory
...@@ -211,7 +251,7 @@ module ObjectStorage ...@@ -211,7 +251,7 @@ module ObjectStorage
def verify_license!(_file) def verify_license!(_file)
return if file_storage? return if file_storage?
raise 'Object Storage feature is missing' unless self.class.licensed? raise(ObjectStorageUnavailable, 'Object Storage feature is missing') unless self.class.licensed?
end end
def exists? def exists?
...@@ -231,6 +271,13 @@ module ObjectStorage ...@@ -231,6 +271,13 @@ module ObjectStorage
private private
def schedule_background_upload?
self.class.object_store_enabled? &&
self.class.background_upload_enabled? &&
self.class.licensed? &&
self.file_storage?
end
# this is a hack around CarrierWave. The #migrate method needs to be # this is a hack around CarrierWave. The #migrate method needs to be
# able to force the current file to the migrated file upon success. # able to force the current file to the migrated file upon success.
def file=(file) def file=(file)
...@@ -238,7 +285,7 @@ module ObjectStorage ...@@ -238,7 +285,7 @@ module ObjectStorage
end end
def serialization_column def serialization_column
model.class.uploader_options.dig(mounted_as, :mount_on) || mounted_as self.class.serialization_column(model.class, mounted_as)
end end
# Returns the column where the 'store' is saved # Returns the column where the 'store' is saved
......
# Concern for setting Sidekiq settings for the various GitLab ObjectStorage workers.
module ObjectStorageQueue
extend ActiveSupport::Concern
included do
queue_namespace :object_storage
end
end
module ObjectStorage
class BackgroundMoveWorker
include ApplicationWorker
include ObjectStorageQueue
sidekiq_options retry: 5
def perform(uploader_class_name, subject_class_name, file_field, subject_id)
uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
return unless uploader_class.licensed?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find(subject_id)
uploader = build_uploader(subject, file_field&.to_sym)
uploader.migrate!(ObjectStorage::Store::REMOTE)
end
def build_uploader(subject, mount_point)
case subject
when Upload then subject.build_uploader(mount_point)
else
subject.send(mount_point) # rubocop:disable GitlabSecurity/PublicSend
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Metrics/LineLength
# rubocop:disable Style/Documentation
module ObjectStorage
class MigrateUploadsWorker
include ApplicationWorker
include ObjectStorageQueue
SanityCheckError = Class.new(StandardError)
class Upload < ActiveRecord::Base
# Upper limit for foreground checksum processing
CHECKSUM_THRESHOLD = 100.megabytes
belongs_to :model, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
validates :size, presence: true
validates :path, presence: true
validates :model, presence: true
validates :uploader, presence: true
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
scope :stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
scope :stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
def self.hexdigest(path)
Digest::SHA256.file(path).hexdigest
end
def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
end
def calculate_checksum!
self.checksum = nil
return unless checksummable?
self.checksum = self.class.hexdigest(absolute_path)
end
def build_uploader(mounted_as = nil)
uploader_class.new(model, mounted_as).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
end
def exist?
File.exist?(absolute_path)
end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
private
def checksummable?
checksum.nil? && local? && exist?
end
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
def schedule_checksum
UploadChecksumWorker.perform_async(id)
end
def relative_path?
!path.start_with?('/')
end
def identifier
File.basename(path)
end
def uploader_class
Object.const_get(uploader)
end
end
class MigrationResult
attr_reader :upload
attr_accessor :error
def initialize(upload, error = nil)
@upload, @error = upload, error
end
def success?
error.nil?
end
def to_s
success? ? "Migration successful." : "Error while migrating #{upload.id}: #{error.message}"
end
end
module Report
class MigrationFailures < StandardError
attr_reader :errors
def initialize(errors)
@errors = errors
end
def message
errors.map(&:message).join("\n")
end
end
def report!(results)
success, failures = results.partition(&:success?)
Rails.logger.info header(success, failures)
Rails.logger.warn failures(failures)
raise MigrationFailures.new(failures.map(&:error)) if failures.any?
end
def header(success, failures)
"Migrated #{success.count}/#{success.count + failures.count} files."
end
def failures(failures)
failures.map { |f| "\t#{f}" }.join('\n')
end
end
include Report
def self.enqueue!(uploads, mounted_as, to_store)
sanity_check!(uploads, mounted_as)
perform_async(uploads.ids, mounted_as, to_store)
end
# We need to be sure all the uploads are for the same uploader and model type
# and that the mount point exists if provided.
#
def self.sanity_check!(uploads, mounted_as)
upload = uploads.first
uploader_class = upload.uploader.constantize
model_class = uploads.first.model_type.constantize
uploader_types = uploads.map(&:uploader).uniq
model_types = uploads.map(&:model_type).uniq
model_has_mount = mounted_as.nil? || model_class.uploaders[mounted_as] == uploader_class
raise(SanityCheckError, "Multiple uploaders found: #{uploader_types}") unless uploader_types.count == 1
raise(SanityCheckError, "Multiple model types found: #{model_types}") unless model_types.count == 1
raise(SanityCheckError, "Mount point #{mounted_as} not found in #{model_class}.") unless model_has_mount
end
def perform(ids, mounted_as, to_store)
@mounted_as = mounted_as&.to_sym
@to_store = to_store
uploads = Upload.preload(:model).where(id: ids)
sanity_check!(uploads)
results = migrate(uploads)
report!(results)
rescue SanityCheckError => e
# do not retry: the job is insane
Rails.logger.warn "#{self.class}: Sanity check error (#{e.message})"
end
def sanity_check!(uploads)
self.class.sanity_check!(uploads, @mounted_as)
end
def build_uploaders(uploads)
uploads.map { |upload| upload.build_uploader(@mounted_as) }
end
def migrate(uploads)
build_uploaders(uploads).map(&method(:process_uploader))
end
def process_uploader(uploader)
MigrationResult.new(uploader.upload).tap do |result|
begin
uploader.migrate!(@to_store)
rescue => e
result.error = e
end
end
end
end
end
# @Deprecated - remove once the `object_storage_upload` queue is empty
# The queue has been renamed `object_storage:object_storage_background_upload`
#
class ObjectStorageUploadWorker class ObjectStorageUploadWorker
include ApplicationWorker include ApplicationWorker
...@@ -15,8 +18,5 @@ class ObjectStorageUploadWorker ...@@ -15,8 +18,5 @@ class ObjectStorageUploadWorker
subject = subject_class.find(subject_id) subject = subject_class.find(subject_id)
uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
uploader.migrate!(ObjectStorage::Store::REMOTE) uploader.migrate!(ObjectStorage::Store::REMOTE)
rescue RecordNotFound
# does not retry when the record do not exists
Rails.logger.warn("Cannot find subject #{subject_class} with id=#{subject_id}.")
end end
end end
namespace :gitlab do
namespace :uploads do
desc 'GitLab | Uploads | Migrate the uploaded files to object storage'
task :migrate, [:uploader_class, :model_class, :mounted_as] => :environment do |task, args|
batch_size = ENV.fetch('BATCH', 200).to_i
@to_store = ObjectStorage::Store::REMOTE
@mounted_as = args.mounted_as&.gsub(':', '')&.to_sym
@uploader_class = args.uploader_class.constantize
@model_class = args.model_class.constantize
uploads.each_batch(of: batch_size, &method(:enqueue_batch)) # rubocop: disable Cop/InBatches
end
def enqueue_batch(batch, index)
job = ObjectStorage::MigrateUploadsWorker.enqueue!(batch,
@mounted_as,
@to_store)
puts "Enqueued job ##{index}: #{job}"
rescue ObjectStorage::MigrateUploadsWorker::SanityCheckError => e
# continue for the next batch
puts "Could not enqueue batch (#{batch.ids}) #{e.message}".color(:red)
end
def uploads
Upload.class_eval { include EachBatch } unless Upload < EachBatch
Upload
.where.not(store: @to_store)
.where(uploader: @uploader_class.to_s,
model_type: @model_class.base_class.sti_name)
end
end
end
...@@ -60,8 +60,20 @@ module API ...@@ -60,8 +60,20 @@ module API
false false
end end
def project_path
project&.path || project_path_match[:project_path]
end
def namespace_path
project&.namespace&.full_path || project_path_match[:namespace_path]
end
private private
def project_path_match
@project_path_match ||= params[:project].match(Gitlab::PathRegex.full_project_git_path_regex) || {}
end
# rubocop:disable Gitlab/ModuleWithInstanceVariables # rubocop:disable Gitlab/ModuleWithInstanceVariables
def set_project def set_project
if params[:gl_repository] if params[:gl_repository]
......
...@@ -42,11 +42,14 @@ module API ...@@ -42,11 +42,14 @@ module API
end end
access_checker_klass = wiki? ? Gitlab::GitAccessWiki : Gitlab::GitAccess access_checker_klass = wiki? ? Gitlab::GitAccessWiki : Gitlab::GitAccess
access_checker = access_checker_klass access_checker = access_checker_klass.new(actor, project,
.new(actor, project, protocol, authentication_abilities: ssh_authentication_abilities, redirected_path: redirected_path) protocol, authentication_abilities: ssh_authentication_abilities,
namespace_path: namespace_path, project_path: project_path,
redirected_path: redirected_path)
begin begin
access_checker.check(params[:action], params[:changes]) access_checker.check(params[:action], params[:changes])
@project ||= access_checker.project
rescue Gitlab::GitAccess::UnauthorizedError, Gitlab::GitAccess::NotFoundError => e rescue Gitlab::GitAccess::UnauthorizedError, Gitlab::GitAccess::NotFoundError => e
return { status: false, message: e.message } return { status: false, message: e.message }
end end
...@@ -207,8 +210,11 @@ module API ...@@ -207,8 +210,11 @@ module API
# A user is not guaranteed to be returned; an orphaned write deploy # A user is not guaranteed to be returned; an orphaned write deploy
# key could be used # key could be used
if user if user
redirect_message = Gitlab::Checks::ProjectMoved.fetch_redirect_message(user.id, project.id) redirect_message = Gitlab::Checks::ProjectMoved.fetch_message(user.id, project.id)
project_created_message = Gitlab::Checks::ProjectCreated.fetch_message(user.id, project.id)
output[:redirected_message] = redirect_message if redirect_message output[:redirected_message] = redirect_message if redirect_message
output[:project_created_message] = project_created_message if project_created_message
end end
output output
......
module Gitlab
module Checks
class PostPushMessage
def initialize(project, user, protocol)
@project = project
@user = user
@protocol = protocol
end
def self.fetch_message(user_id, project_id)
key = message_key(user_id, project_id)
Gitlab::Redis::SharedState.with do |redis|
message = redis.get(key)
redis.del(key)
message
end
end
def add_message
return unless user.present? && project.present?
Gitlab::Redis::SharedState.with do |redis|
key = self.class.message_key(user.id, project.id)
redis.setex(key, 5.minutes, message)
end
end
def message
raise NotImplementedError
end
protected
attr_reader :project, :user, :protocol
def self.message_key(user_id, project_id)
raise NotImplementedError
end
def url_to_repo
protocol == 'ssh' ? project.ssh_url_to_repo : project.http_url_to_repo
end
end
end
end
module Gitlab
module Checks
class ProjectCreated < PostPushMessage
PROJECT_CREATED = "project_created".freeze
def message
<<~MESSAGE
The private project #{project.full_path} was successfully created.
To configure the remote, run:
git remote add origin #{url_to_repo}
To view the project, visit:
#{project_url}
MESSAGE
end
private
def self.message_key(user_id, project_id)
"#{PROJECT_CREATED}:#{user_id}:#{project_id}"
end
def project_url
Gitlab::Routing.url_helpers.project_url(project)
end
end
end
end
module Gitlab module Gitlab
module Checks module Checks
class ProjectMoved class ProjectMoved < PostPushMessage
REDIRECT_NAMESPACE = "redirect_namespace".freeze REDIRECT_NAMESPACE = "redirect_namespace".freeze
def initialize(project, user, redirected_path, protocol) def initialize(project, user, protocol, redirected_path)
@project = project
@user = user
@redirected_path = redirected_path @redirected_path = redirected_path
@protocol = protocol
end
def self.fetch_redirect_message(user_id, project_id)
redirect_key = redirect_message_key(user_id, project_id)
Gitlab::Redis::SharedState.with do |redis| super(project, user, protocol)
message = redis.get(redirect_key)
redis.del(redirect_key)
message
end
end
def add_redirect_message
# Don't bother with sending a redirect message for anonymous clones
# because they never see it via the `/internal/post_receive` endpoint
return unless user.present? && project.present?
Gitlab::Redis::SharedState.with do |redis|
key = self.class.redirect_message_key(user.id, project.id)
redis.setex(key, 5.minutes, redirect_message)
end
end end
def redirect_message(rejected: false) def message(rejected: false)
<<~MESSAGE.strip_heredoc <<~MESSAGE
Project '#{redirected_path}' was moved to '#{project.full_path}'. Project '#{redirected_path}' was moved to '#{project.full_path}'.
Please update your Git remote: Please update your Git remote:
...@@ -47,17 +25,17 @@ module Gitlab ...@@ -47,17 +25,17 @@ module Gitlab
private private
attr_reader :project, :redirected_path, :protocol, :user attr_reader :redirected_path
def self.redirect_message_key(user_id, project_id) def self.message_key(user_id, project_id)
"#{REDIRECT_NAMESPACE}:#{user_id}:#{project_id}" "#{REDIRECT_NAMESPACE}:#{user_id}:#{project_id}"
end end
def remote_url_message(rejected) def remote_url_message(rejected)
if rejected if rejected
"git remote set-url origin #{url} and try again." "git remote set-url origin #{url_to_repo} and try again."
else else
"git remote set-url origin #{url}" "git remote set-url origin #{url_to_repo}"
end end
end end
......
...@@ -5,16 +5,19 @@ module Gitlab ...@@ -5,16 +5,19 @@ module Gitlab
prepend ::EE::Gitlab::GitAccess prepend ::EE::Gitlab::GitAccess
include ActionView::Helpers::SanitizeHelper include ActionView::Helpers::SanitizeHelper
include PathLocksHelper include PathLocksHelper
include Gitlab::Utils::StrongMemoize
UnauthorizedError = Class.new(StandardError) UnauthorizedError = Class.new(StandardError)
NotFoundError = Class.new(StandardError) NotFoundError = Class.new(StandardError)
ProjectCreationError = Class.new(StandardError)
ProjectMovedError = Class.new(NotFoundError) ProjectMovedError = Class.new(NotFoundError)
ERROR_MESSAGES = { ERROR_MESSAGES = {
upload: 'You are not allowed to upload code for this project.', upload: 'You are not allowed to upload code for this project.',
download: 'You are not allowed to download code from this project.', download: 'You are not allowed to download code from this project.',
deploy_key_upload: auth_upload: 'You are not allowed to upload code.',
'This deploy key does not have write access to this project.', auth_download: 'You are not allowed to download code.',
deploy_key_upload: 'This deploy key does not have write access to this project.',
no_repo: 'A repository for this project does not exist yet.', no_repo: 'A repository for this project does not exist yet.',
project_not_found: 'The project you were looking for could not be found.', project_not_found: 'The project you were looking for could not be found.',
account_blocked: 'Your account has been blocked.', account_blocked: 'Your account has been blocked.',
...@@ -29,24 +32,31 @@ module Gitlab ...@@ -29,24 +32,31 @@ module Gitlab
PUSH_COMMANDS = %w{ git-receive-pack }.freeze PUSH_COMMANDS = %w{ git-receive-pack }.freeze
ALL_COMMANDS = DOWNLOAD_COMMANDS + PUSH_COMMANDS ALL_COMMANDS = DOWNLOAD_COMMANDS + PUSH_COMMANDS
attr_reader :actor, :project, :protocol, :authentication_abilities, :redirected_path attr_reader :actor, :project, :protocol, :authentication_abilities, :namespace_path, :project_path, :redirected_path
def initialize(actor, project, protocol, authentication_abilities:, redirected_path: nil) def initialize(actor, project, protocol, authentication_abilities:, namespace_path: nil, project_path: nil, redirected_path: nil)
@actor = actor @actor = actor
@project = project @project = project
@protocol = protocol @protocol = protocol
@redirected_path = redirected_path
@authentication_abilities = authentication_abilities @authentication_abilities = authentication_abilities
@namespace_path = namespace_path
@project_path = project_path
@redirected_path = redirected_path
end end
def check(cmd, changes) def check(cmd, changes)
check_protocol! check_protocol!
check_valid_actor! check_valid_actor!
check_active_user! check_active_user!
check_project_accessibility! check_authentication_abilities!(cmd)
check_project_moved!
check_command_disabled!(cmd) check_command_disabled!(cmd)
check_command_existence!(cmd) check_command_existence!(cmd)
check_db_accessibility!(cmd)
ensure_project_on_push!(cmd, changes)
check_project_accessibility!
check_project_moved!
check_repository_existence! check_repository_existence!
case cmd case cmd
...@@ -99,6 +109,19 @@ module Gitlab ...@@ -99,6 +109,19 @@ module Gitlab
end end
end end
def check_authentication_abilities!(cmd)
case cmd
when *DOWNLOAD_COMMANDS
unless authentication_abilities.include?(:download_code) || authentication_abilities.include?(:build_download_code)
raise UnauthorizedError, ERROR_MESSAGES[:auth_download]
end
when *PUSH_COMMANDS
unless authentication_abilities.include?(:push_code)
raise UnauthorizedError, ERROR_MESSAGES[:auth_upload]
end
end
end
def check_project_accessibility! def check_project_accessibility!
if project.blank? || !can_read_project? if project.blank? || !can_read_project?
raise NotFoundError, ERROR_MESSAGES[:project_not_found] raise NotFoundError, ERROR_MESSAGES[:project_not_found]
...@@ -108,12 +131,12 @@ module Gitlab ...@@ -108,12 +131,12 @@ module Gitlab
def check_project_moved! def check_project_moved!
return if redirected_path.nil? return if redirected_path.nil?
project_moved = Checks::ProjectMoved.new(project, user, redirected_path, protocol) project_moved = Checks::ProjectMoved.new(project, user, protocol, redirected_path)
if project_moved.permanent_redirect? if project_moved.permanent_redirect?
project_moved.add_redirect_message project_moved.add_message
else else
raise ProjectMovedError, project_moved.redirect_message(rejected: true) raise ProjectMovedError, project_moved.message(rejected: true)
end end
end end
...@@ -143,6 +166,40 @@ module Gitlab ...@@ -143,6 +166,40 @@ module Gitlab
end end
end end
def check_db_accessibility!(cmd)
return unless receive_pack?(cmd)
if Gitlab::Database.read_only?
raise UnauthorizedError, push_to_read_only_message
end
end
def ensure_project_on_push!(cmd, changes)
return if project || deploy_key?
return unless receive_pack?(cmd) && changes == '_any' && authentication_abilities.include?(:push_code)
namespace = Namespace.find_by_full_path(namespace_path)
return unless user&.can?(:create_projects, namespace)
project_params = {
path: project_path,
namespace_id: namespace.id,
visibility_level: Gitlab::VisibilityLevel::PRIVATE
}
project = Projects::CreateService.new(user, project_params).execute
unless project.saved?
raise ProjectCreationError, "Could not create project: #{project.errors.full_messages.join(', ')}"
end
@project = project
user_access.project = @project
Checks::ProjectCreated.new(project, user, protocol).add_message
end
def check_repository_existence! def check_repository_existence!
unless project.repository.exists? unless project.repository.exists?
raise UnauthorizedError, ERROR_MESSAGES[:no_repo] raise UnauthorizedError, ERROR_MESSAGES[:no_repo]
...@@ -150,9 +207,8 @@ module Gitlab ...@@ -150,9 +207,8 @@ module Gitlab
end end
def check_download_access! def check_download_access!
return if deploy_key? passed = deploy_key? ||
user_can_download_code? ||
passed = user_can_download_code? ||
build_can_download_code? || build_can_download_code? ||
guest_can_download_code? guest_can_download_code?
...@@ -167,19 +223,17 @@ module Gitlab ...@@ -167,19 +223,17 @@ module Gitlab
raise UnauthorizedError, ERROR_MESSAGES[:read_only] raise UnauthorizedError, ERROR_MESSAGES[:read_only]
end end
if Gitlab::Database.read_only?
raise UnauthorizedError, push_to_read_only_message
end
if deploy_key if deploy_key
check_deploy_key_push_access! unless deploy_key.can_push_to?(project)
raise UnauthorizedError, ERROR_MESSAGES[:deploy_key_upload]
end
elsif user elsif user
check_user_push_access! # User access is verified in check_change_access!
else else
raise UnauthorizedError, ERROR_MESSAGES[:upload] raise UnauthorizedError, ERROR_MESSAGES[:upload]
end end
return if changes.blank? # Allow access. return if changes.blank? # Allow access this is needed for EE.
if project.above_size_limit? if project.above_size_limit?
raise UnauthorizedError, Gitlab::RepositorySizeError.new(project).push_error raise UnauthorizedError, Gitlab::RepositorySizeError.new(project).push_error
...@@ -193,18 +247,6 @@ module Gitlab ...@@ -193,18 +247,6 @@ module Gitlab
check_change_access!(changes) check_change_access!(changes)
end end
def check_user_push_access!
unless authentication_abilities.include?(:push_code)
raise UnauthorizedError, ERROR_MESSAGES[:upload]
end
end
def check_deploy_key_push_access!
unless deploy_key.can_push_to?(project)
raise UnauthorizedError, ERROR_MESSAGES[:deploy_key_upload]
end
end
def check_change_access!(changes) def check_change_access!(changes)
changes_list = Gitlab::ChangesList.new(changes) changes_list = Gitlab::ChangesList.new(changes)
......
...@@ -189,6 +189,10 @@ module Gitlab ...@@ -189,6 +189,10 @@ module Gitlab
@full_project_path_regex ||= %r{\A#{full_namespace_route_regex}/#{project_route_regex}/\z} @full_project_path_regex ||= %r{\A#{full_namespace_route_regex}/#{project_route_regex}/\z}
end end
def full_project_git_path_regex
@full_project_git_path_regex ||= %r{\A\/?(?<namespace_path>#{full_namespace_route_regex})\/(?<project_path>#{project_route_regex})\.git\z}
end
def full_namespace_format_regex def full_namespace_format_regex
@namespace_format_regex ||= /A#{FULL_NAMESPACE_FORMAT_REGEX}\z/.freeze @namespace_format_regex ||= /A#{FULL_NAMESPACE_FORMAT_REGEX}\z/.freeze
end end
......
...@@ -6,7 +6,8 @@ module Gitlab ...@@ -6,7 +6,8 @@ module Gitlab
[user&.id, project&.id] [user&.id, project&.id]
end end
attr_reader :user, :project attr_reader :user
attr_accessor :project
def initialize(user, project: nil) def initialize(user, project: nil)
@user = user @user = user
......
...@@ -445,7 +445,7 @@ namespace :gitlab do ...@@ -445,7 +445,7 @@ namespace :gitlab do
namespace :geo do namespace :geo do
desc 'GitLab | Check Geo configuration and dependencies' desc 'GitLab | Check Geo configuration and dependencies'
task check: :environment do task check: :gitlab_environment do
warn_user_is_not_gitlab warn_user_is_not_gitlab
checks = [ checks = [
......
namespace :gitlab do
namespace :uploads do
desc 'GitLab | Uploads | Check integrity of uploaded files'
task check: :environment do
puts 'Checking integrity of uploaded files'
uploads_batches do |batch|
batch.each do |upload|
puts "- Checking file (#{upload.id}): #{upload.absolute_path}".color(:green)
if upload.exist?
check_checksum(upload)
else
puts " * File does not exist on the file system".color(:red)
end
end
end
puts 'Done!'
end
def batch_size
ENV.fetch('BATCH', 200).to_i
end
def calculate_checksum(absolute_path)
Digest::SHA256.file(absolute_path).hexdigest
end
def check_checksum(upload)
checksum = calculate_checksum(upload.absolute_path)
if checksum != upload.checksum
puts " * File checksum (#{checksum}) does not match the one in the database (#{upload.checksum})".color(:red)
end
end
def uploads_batches(&block)
Upload.all.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches
yield relation
end
end
end
end
require_relative 'helpers.rb'
namespace :gitlab do
namespace :uploads do
desc 'GitLab | Uploads | Check integrity of uploaded files'
task check: :environment do
include UploadTaskHelpers
puts 'Checking integrity of uploaded files'
uploads_batches do |batch|
batch.each do |upload|
begin
puts "- Checking file (#{upload.id}): #{upload.absolute_path}".color(:green)
if upload.exist?
check_checksum(upload)
else
puts " * File does not exist on the file system".color(:red)
end
rescue ObjectStorage::RemoteStoreError
puts "- File (#{upload.id}): File is stored remotely, skipping".color(:yellow)
end
end
end
puts 'Done!'
end
end
end
module UploadTaskHelpers
def batch_size
ENV.fetch('BATCH', 200).to_i
end
def calculate_checksum(absolute_path)
Digest::SHA256.file(absolute_path).hexdigest
end
def check_checksum(upload)
checksum = calculate_checksum(upload.absolute_path)
if checksum != upload.checksum
puts " * File checksum (#{checksum}) does not match the one in the database (#{upload.checksum})".color(:red)
end
end
def uploads_batches(&block)
Upload.all.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches
yield relation
end
end
end
...@@ -34,7 +34,7 @@ describe LfsObject do ...@@ -34,7 +34,7 @@ describe LfsObject do
end end
end end
describe '#schedule_migration_to_object_storage' do describe '#schedule_background_upload' do
before do before do
stub_lfs_setting(enabled: true) stub_lfs_setting(enabled: true)
end end
...@@ -47,7 +47,7 @@ describe LfsObject do ...@@ -47,7 +47,7 @@ describe LfsObject do
end end
it 'does not schedule the migration' do it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async) expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject subject
end end
...@@ -61,7 +61,7 @@ describe LfsObject do ...@@ -61,7 +61,7 @@ describe LfsObject do
end end
it 'schedules the model for migration' do it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric)) expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
subject subject
end end
...@@ -73,7 +73,7 @@ describe LfsObject do ...@@ -73,7 +73,7 @@ describe LfsObject do
end end
it 'does not schedule the migration' do it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async) expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject subject
end end
...@@ -86,7 +86,7 @@ describe LfsObject do ...@@ -86,7 +86,7 @@ describe LfsObject do
end end
it 'schedules the model for migration' do it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async) expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject subject
end end
......
require 'rake_helper'
describe 'gitlab:uploads:migrate rake tasks' do
let!(:projects) { create_list(:project, 10, :with_avatar) }
let(:model_class) { Project }
let(:uploader_class) { AvatarUploader }
let(:mounted_as) { :avatar }
let(:batch_size) { 3 }
before do
stub_env('BATCH', batch_size.to_s)
stub_uploads_object_storage(uploader_class)
Rake.application.rake_require 'tasks/gitlab/uploads/migrate'
allow(ObjectStorage::MigrateUploadsWorker).to receive(:perform_async)
end
def run
args = [uploader_class.to_s, model_class.to_s, mounted_as].compact
run_rake_task("gitlab:uploads:migrate", *args)
end
it 'enqueue jobs in batch' do
expect(ObjectStorage::MigrateUploadsWorker).to receive(:enqueue!).exactly(4).times
run
end
end
require 'spec_helper'
describe ObjectStorage::BackgroundMoveWorker do
let(:local) { ObjectStorage::Store::LOCAL }
let(:remote) { ObjectStorage::Store::REMOTE }
def perform
described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
end
context 'for LFS' do
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
let(:uploader_class) { LfsObjectUploader }
let(:subject_class) { LfsObject }
let(:file_field) { :file }
let(:subject_id) { lfs_object.id }
context 'when object storage is enabled' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'uploads object to storage' do
expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote)
end
context 'when background upload is disabled' do
before do
allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false }
end
it 'is skipped' do
expect { perform }.not_to change { lfs_object.reload.file_store }
end
end
end
context 'when object storage is disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it "doesn't migrate files" do
perform
expect(lfs_object.reload.file_store).to eq(local)
end
end
end
context 'for legacy artifacts' do
let(:build) { create(:ci_build, :legacy_artifacts) }
let(:uploader_class) { LegacyArtifactUploader }
let(:subject_class) { Ci::Build }
let(:file_field) { :artifacts_file }
let(:subject_id) { build.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it "migrates file to remote storage" do
perform
expect(build.reload.artifacts_file_store).to eq(remote)
end
context 'for artifacts_metadata' do
let(:file_field) { :artifacts_metadata }
it 'migrates metadata to remote storage' do
perform
expect(build.reload.artifacts_metadata_store).to eq(remote)
end
end
end
end
end
context 'for job artifacts' do
let(:artifact) { create(:ci_job_artifact, :archive) }
let(:uploader_class) { JobArtifactUploader }
let(:subject_class) { Ci::JobArtifact }
let(:file_field) { :file }
let(:subject_id) { artifact.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it "migrates file to remote storage" do
perform
expect(artifact.reload.file_store).to eq(remote)
end
end
end
end
context 'for uploads' do
let!(:project) { create(:project, :with_avatar) }
let(:uploader_class) { AvatarUploader }
let(:file_field) { :avatar }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_uploads_object_storage(uploader_class, background_upload: true)
end
describe 'supports using the model' do
let(:subject_class) { project.class }
let(:subject_id) { project.id }
it "migrates file to remote storage" do
perform
expect(project.reload.avatar.file_storage?).to be_falsey
end
end
describe 'supports using the Upload' do
let(:subject_class) { Upload }
let(:subject_id) { project.avatar.upload.id }
it "migrates file to remote storage" do
perform
expect(project.reload.avatar.file_storage?).to be_falsey
end
end
end
end
end
end
require 'spec_helper'
describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
shared_context 'sanity_check! fails' do
before do
expect(described_class).to receive(:sanity_check!).and_raise(described_class::SanityCheckError)
end
end
let!(:projects) { create_list(:project, 10, :with_avatar) }
let(:uploads) { Upload.all }
let(:mounted_as) { :avatar }
let(:to_store) { ObjectStorage::Store::REMOTE }
before do
stub_uploads_object_storage(AvatarUploader)
end
describe '.enqueue!' do
def enqueue!
described_class.enqueue!(uploads, mounted_as, to_store)
end
it 'is guarded by .sanity_check!' do
expect(described_class).to receive(:perform_async)
expect(described_class).to receive(:sanity_check!)
enqueue!
end
context 'sanity_check! fails' do
include_context 'sanity_check! fails'
it 'does not enqueue a job' do
expect(described_class).not_to receive(:perform_async)
expect { enqueue! }.to raise_error(described_class::SanityCheckError)
end
end
end
describe '.sanity_check!' do
shared_examples 'raises a SanityCheckError' do
let(:mount_point) { nil }
it do
expect { described_class.sanity_check!(uploads, mount_point) }
.to raise_error(described_class::SanityCheckError)
end
end
context 'uploader types mismatch' do
let!(:outlier) { create(:upload, uploader: 'FileUploader') }
include_examples 'raises a SanityCheckError'
end
context 'model types mismatch' do
let!(:outlier) { create(:upload, model_type: 'Potato') }
include_examples 'raises a SanityCheckError'
end
context 'mount point not found' do
include_examples 'raises a SanityCheckError' do
let(:mount_point) { :potato }
end
end
end
describe '#perform' do
def perform
described_class.new.perform(uploads.ids, mounted_as, to_store)
rescue ObjectStorage::MigrateUploadsWorker::Report::MigrationFailures
# swallow
end
shared_examples 'outputs correctly' do |success: 0, failures: 0|
total = success + failures
if success > 0
it 'outputs the reports' do
expect(Rails.logger).to receive(:info).with(%r{Migrated #{success}/#{total} files})
perform
end
end
if failures > 0
it 'outputs upload failures' do
expect(Rails.logger).to receive(:warn).with(/Error .* I am a teapot/)
perform
end
end
end
it_behaves_like 'outputs correctly', success: 10
it 'migrates files' do
perform
aggregate_failures do
projects.each do |project|
expect(project.reload.avatar.upload.local?).to be_falsey
end
end
end
context 'migration is unsuccessful' do
before do
allow_any_instance_of(ObjectStorage::Concern).to receive(:migrate!).and_raise(CarrierWave::UploadError, "I am a teapot.")
end
it_behaves_like 'outputs correctly', failures: 10
end
end
end
...@@ -6,4 +6,17 @@ FactoryBot.define do ...@@ -6,4 +6,17 @@ FactoryBot.define do
description "Open source software to collaborate on code" description "Open source software to collaborate on code"
new_project_guidelines "Custom project guidelines" new_project_guidelines "Custom project guidelines"
end end
trait :with_logo do
logo { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :with_header_logo do
header_logo { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :with_logos do
with_logo
with_header_logo
end
end end
require 'rails_helper'
describe Gitlab::Checks::ProjectCreated, :clean_gitlab_redis_shared_state do
let(:user) { create(:user) }
let(:project) { create(:project) }
describe '.fetch_message' do
context 'with a project created message queue' do
let(:project_created) { described_class.new(project, user, 'http') }
before do
project_created.add_message
end
it 'returns project created message' do
expect(described_class.fetch_message(user.id, project.id)).to eq(project_created.message)
end
it 'deletes the project created message from redis' do
expect(Gitlab::Redis::SharedState.with { |redis| redis.get("project_created:#{user.id}:#{project.id}") }).not_to be_nil
described_class.fetch_message(user.id, project.id)
expect(Gitlab::Redis::SharedState.with { |redis| redis.get("project_created:#{user.id}:#{project.id}") }).to be_nil
end
end
context 'with no project created message queue' do
it 'returns nil' do
expect(described_class.fetch_message(1, 2)).to be_nil
end
end
end
describe '#add_message' do
it 'queues a project created message' do
project_created = described_class.new(project, user, 'http')
expect(project_created.add_message).to eq('OK')
end
it 'handles anonymous push' do
project_created = described_class.new(nil, user, 'http')
expect(project_created.add_message).to be_nil
end
end
end
...@@ -4,82 +4,82 @@ describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do ...@@ -4,82 +4,82 @@ describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:project) { create(:project) } let(:project) { create(:project) }
describe '.fetch_redirct_message' do describe '.fetch_message' do
context 'with a redirect message queue' do context 'with a redirect message queue' do
it 'should return the redirect message' do it 'returns the redirect message' do
project_moved = described_class.new(project, user, 'foo/bar', 'http') project_moved = described_class.new(project, user, 'http', 'foo/bar')
project_moved.add_redirect_message project_moved.add_message
expect(described_class.fetch_redirect_message(user.id, project.id)).to eq(project_moved.redirect_message) expect(described_class.fetch_message(user.id, project.id)).to eq(project_moved.message)
end end
it 'should delete the redirect message from redis' do it 'deletes the redirect message from redis' do
project_moved = described_class.new(project, user, 'foo/bar', 'http') project_moved = described_class.new(project, user, 'http', 'foo/bar')
project_moved.add_redirect_message project_moved.add_message
expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).not_to be_nil expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).not_to be_nil
described_class.fetch_redirect_message(user.id, project.id) described_class.fetch_message(user.id, project.id)
expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).to be_nil expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).to be_nil
end end
end end
context 'with no redirect message queue' do context 'with no redirect message queue' do
it 'should return nil' do it 'returns nil' do
expect(described_class.fetch_redirect_message(1, 2)).to be_nil expect(described_class.fetch_message(1, 2)).to be_nil
end end
end end
end end
describe '#add_redirect_message' do describe '#add_message' do
it 'should queue a redirect message' do it 'queues a redirect message' do
project_moved = described_class.new(project, user, 'foo/bar', 'http') project_moved = described_class.new(project, user, 'http', 'foo/bar')
expect(project_moved.add_redirect_message).to eq("OK") expect(project_moved.add_message).to eq("OK")
end end
it 'should handle anonymous clones' do it 'handles anonymous clones' do
project_moved = described_class.new(project, nil, 'foo/bar', 'http') project_moved = described_class.new(project, nil, 'http', 'foo/bar')
expect(project_moved.add_redirect_message).to eq(nil) expect(project_moved.add_message).to eq(nil)
end end
end end
describe '#redirect_message' do describe '#message' do
context 'when the push is rejected' do context 'when the push is rejected' do
it 'should return a redirect message telling the user to try again' do it 'returns a redirect message telling the user to try again' do
project_moved = described_class.new(project, user, 'foo/bar', 'http') project_moved = described_class.new(project, user, 'http', 'foo/bar')
message = "Project 'foo/bar' was moved to '#{project.full_path}'." + message = "Project 'foo/bar' was moved to '#{project.full_path}'." +
"\n\nPlease update your Git remote:" + "\n\nPlease update your Git remote:" +
"\n\n git remote set-url origin #{project.http_url_to_repo} and try again.\n" "\n\n git remote set-url origin #{project.http_url_to_repo} and try again.\n"
expect(project_moved.redirect_message(rejected: true)).to eq(message) expect(project_moved.message(rejected: true)).to eq(message)
end end
end end
context 'when the push is not rejected' do context 'when the push is not rejected' do
it 'should return a redirect message' do it 'returns a redirect message' do
project_moved = described_class.new(project, user, 'foo/bar', 'http') project_moved = described_class.new(project, user, 'http', 'foo/bar')
message = "Project 'foo/bar' was moved to '#{project.full_path}'." + message = "Project 'foo/bar' was moved to '#{project.full_path}'." +
"\n\nPlease update your Git remote:" + "\n\nPlease update your Git remote:" +
"\n\n git remote set-url origin #{project.http_url_to_repo}\n" "\n\n git remote set-url origin #{project.http_url_to_repo}\n"
expect(project_moved.redirect_message).to eq(message) expect(project_moved.message).to eq(message)
end end
end end
end end
describe '#permanent_redirect?' do describe '#permanent_redirect?' do
context 'with a permanent RedirectRoute' do context 'with a permanent RedirectRoute' do
it 'should return true' do it 'returns true' do
project.route.create_redirect('foo/bar', permanent: true) project.route.create_redirect('foo/bar', permanent: true)
project_moved = described_class.new(project, user, 'foo/bar', 'http') project_moved = described_class.new(project, user, 'http', 'foo/bar')
expect(project_moved.permanent_redirect?).to be_truthy expect(project_moved.permanent_redirect?).to be_truthy
end end
end end
context 'without a permanent RedirectRoute' do context 'without a permanent RedirectRoute' do
it 'should return false' do it 'returns false' do
project.route.create_redirect('foo/bar') project.route.create_redirect('foo/bar')
project_moved = described_class.new(project, user, 'foo/bar', 'http') project_moved = described_class.new(project, user, 'http', 'foo/bar')
expect(project_moved.permanent_redirect?).to be_falsy expect(project_moved.permanent_redirect?).to be_falsy
end end
end end
......
This diff is collapsed.
...@@ -18,14 +18,14 @@ describe Ci::JobArtifact do ...@@ -18,14 +18,14 @@ describe Ci::JobArtifact do
describe 'callbacks' do describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) } subject { create(:ci_job_artifact, :archive) }
describe '#schedule_migration_to_object_storage' do describe '#schedule_background_upload' do
context 'when object storage is disabled' do context 'when object storage is disabled' do
before do before do
stub_artifacts_object_storage(enabled: false) stub_artifacts_object_storage(enabled: false)
end end
it 'does not schedule the migration' do it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async) expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject subject
end end
...@@ -39,7 +39,7 @@ describe Ci::JobArtifact do ...@@ -39,7 +39,7 @@ describe Ci::JobArtifact do
end end
it 'schedules the model for migration' do it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric)) expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
subject subject
end end
...@@ -51,7 +51,7 @@ describe Ci::JobArtifact do ...@@ -51,7 +51,7 @@ describe Ci::JobArtifact do
end end
it 'does not schedule the migration' do it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async) expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject subject
end end
...@@ -64,7 +64,7 @@ describe Ci::JobArtifact do ...@@ -64,7 +64,7 @@ describe Ci::JobArtifact do
end end
it 'schedules the model for migration' do it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async) expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject subject
end end
......
...@@ -368,7 +368,7 @@ describe API::Internal do ...@@ -368,7 +368,7 @@ describe API::Internal do
context 'project as /namespace/project' do context 'project as /namespace/project' do
it do it do
pull(key, project_with_repo_path('/' + project.full_path)) push(key, project_with_repo_path('/' + project.full_path))
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy expect(json_response["status"]).to be_truthy
...@@ -379,7 +379,7 @@ describe API::Internal do ...@@ -379,7 +379,7 @@ describe API::Internal do
context 'project as namespace/project' do context 'project as namespace/project' do
it do it do
pull(key, project_with_repo_path(project.full_path)) push(key, project_with_repo_path(project.full_path))
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy expect(json_response["status"]).to be_truthy
...@@ -807,14 +807,27 @@ describe API::Internal do ...@@ -807,14 +807,27 @@ describe API::Internal do
context 'with a redirected data' do context 'with a redirected data' do
it 'returns redirected message on the response' do it 'returns redirected message on the response' do
project_moved = Gitlab::Checks::ProjectMoved.new(project, user, 'foo/baz', 'http') project_moved = Gitlab::Checks::ProjectMoved.new(project, user, 'http', 'foo/baz')
project_moved.add_redirect_message project_moved.add_message
post api("/internal/post_receive"), valid_params post api("/internal/post_receive"), valid_params
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
expect(json_response["redirected_message"]).to be_present expect(json_response["redirected_message"]).to be_present
expect(json_response["redirected_message"]).to eq(project_moved.redirect_message) expect(json_response["redirected_message"]).to eq(project_moved.message)
end
end
context 'with new project data' do
it 'returns new project message on the response' do
project_created = Gitlab::Checks::ProjectCreated.new(project, user, 'http')
project_created.add_message
post api("/internal/post_receive"), valid_params
expect(response).to have_gitlab_http_status(200)
expect(json_response["project_created_message"]).to be_present
expect(json_response["project_created_message"]).to eq(project_created.message)
end end
end end
......
...@@ -107,15 +107,39 @@ describe 'Git HTTP requests' do ...@@ -107,15 +107,39 @@ describe 'Git HTTP requests' do
let(:user) { create(:user) } let(:user) { create(:user) }
context "when the project doesn't exist" do context "when the project doesn't exist" do
let(:path) { 'doesnt/exist.git' } context "when namespace doesn't exist" do
let(:path) { 'doesnt/exist.git' }
it_behaves_like 'pulls require Basic HTTP Authentication' it_behaves_like 'pulls require Basic HTTP Authentication'
it_behaves_like 'pushes require Basic HTTP Authentication' it_behaves_like 'pushes require Basic HTTP Authentication'
context 'when authenticated' do context 'when authenticated' do
it 'rejects downloads and uploads with 404 Not Found' do it 'rejects downloads and uploads with 404 Not Found' do
download_or_upload(path, user: user.username, password: user.password) do |response| download_or_upload(path, user: user.username, password: user.password) do |response|
expect(response).to have_gitlab_http_status(:not_found) expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end
context 'when namespace exists' do
let(:path) { "#{user.namespace.path}/new-project.git"}
context 'when authenticated' do
it 'creates a new project under the existing namespace' do
expect do
upload(path, user: user.username, password: user.password) do |response|
expect(response).to have_gitlab_http_status(:ok)
end
end.to change { user.projects.count }.by(1)
end
it 'rejects push with 422 Unprocessable Entity when project is invalid' do
path = "#{user.namespace.path}/new.git"
push_get(path, user: user.username, password: user.password)
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end end
end end
end end
...@@ -596,7 +620,7 @@ describe 'Git HTTP requests' do ...@@ -596,7 +620,7 @@ describe 'Git HTTP requests' do
push_get(path, env) push_get(path, env)
expect(response).to have_gitlab_http_status(:forbidden) expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to eq(git_access_error(:upload)) expect(response.body).to eq(git_access_error(:auth_upload))
end end
# We are "authenticated" as CI using a valid token here. But we are # We are "authenticated" as CI using a valid token here. But we are
...@@ -636,7 +660,7 @@ describe 'Git HTTP requests' do ...@@ -636,7 +660,7 @@ describe 'Git HTTP requests' do
push_get path, env push_get path, env
expect(response).to have_gitlab_http_status(:forbidden) expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to eq(git_access_error(:upload)) expect(response.body).to eq(git_access_error(:auth_upload))
end end
end end
......
...@@ -1030,7 +1030,7 @@ describe 'Git LFS API and storage' do ...@@ -1030,7 +1030,7 @@ describe 'Git LFS API and storage' do
context 'with object storage disabled' do context 'with object storage disabled' do
it "doesn't attempt to migrate file to object storage" do it "doesn't attempt to migrate file to object storage" do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async) expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
put_finalize(with_tempfile: true) put_finalize(with_tempfile: true)
end end
...@@ -1042,7 +1042,7 @@ describe 'Git LFS API and storage' do ...@@ -1042,7 +1042,7 @@ describe 'Git LFS API and storage' do
end end
it 'schedules migration of file to object storage' do it 'schedules migration of file to object storage' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric)) expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
put_finalize(with_tempfile: true) put_finalize(with_tempfile: true)
end end
......
require 'rake_helper'
describe 'gitlab:uploads:check rake tasks' do
let!(:upload) { create(:upload, path: Rails.root.join('spec/fixtures/banana_sample.gif')) }
before do
Rake.application.rake_require 'tasks/gitlab/uploads/check'
end
it 'outputs the integrity check for each uploaded file' do
expect { run_rake_task('gitlab:uploads:check') }.to output(/Checking file \(#{upload.id}\): #{Regexp.quote(upload.absolute_path)}/).to_stdout
end
it 'errors out about missing files on the file system' do
create(:upload)
expect { run_rake_task('gitlab:uploads:check') }.to output(/File does not exist on the file system/).to_stdout
end
it 'errors out about invalid checksum' do
upload.update_column(:checksum, '01a3156db2cf4f67ec823680b40b7302f89ab39179124ad219f94919b8a1769e')
expect { run_rake_task('gitlab:uploads:check') }.to output(/File checksum \(9e697aa09fe196909813ee36103e34f721fe47a5fdc8aac0e4e4ac47b9b38282\) does not match the one in the database \(#{upload.checksum}\)/).to_stdout
end
end
require 'rake_helper'
describe 'gitlab:uploads rake tasks' do
describe 'check' do
let!(:upload) { create(:upload, path: Rails.root.join('spec/fixtures/banana_sample.gif')) }
before do
Rake.application.rake_require 'tasks/gitlab/uploads'
end
it 'outputs the integrity check for each uploaded file' do
expect { run_rake_task('gitlab:uploads:check') }.to output(/Checking file \(#{upload.id}\): #{Regexp.quote(upload.absolute_path)}/).to_stdout
end
it 'errors out about missing files on the file system' do
create(:upload)
expect { run_rake_task('gitlab:uploads:check') }.to output(/File does not exist on the file system/).to_stdout
end
it 'errors out about invalid checksum' do
upload.update_column(:checksum, '01a3156db2cf4f67ec823680b40b7302f89ab39179124ad219f94919b8a1769e')
expect { run_rake_task('gitlab:uploads:check') }.to output(/File checksum \(9e697aa09fe196909813ee36103e34f721fe47a5fdc8aac0e4e4ac47b9b38282\) does not match the one in the database \(#{upload.checksum}\)/).to_stdout
end
end
end
...@@ -26,7 +26,7 @@ describe LfsObjectUploader do ...@@ -26,7 +26,7 @@ describe LfsObjectUploader do
describe 'migration to object storage' do describe 'migration to object storage' do
context 'with object storage disabled' do context 'with object storage disabled' do
it "is skipped" do it "is skipped" do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async) expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
lfs_object lfs_object
end end
...@@ -38,7 +38,7 @@ describe LfsObjectUploader do ...@@ -38,7 +38,7 @@ describe LfsObjectUploader do
end end
it 'is scheduled to run after creation' do it 'is scheduled to run after creation' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric)) expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
lfs_object lfs_object
end end
...@@ -50,7 +50,7 @@ describe LfsObjectUploader do ...@@ -50,7 +50,7 @@ describe LfsObjectUploader do
end end
it 'is skipped' do it 'is skipped' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async) expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
lfs_object lfs_object
end end
...@@ -67,7 +67,7 @@ describe LfsObjectUploader do ...@@ -67,7 +67,7 @@ describe LfsObjectUploader do
end end
it 'can store file remotely' do it 'can store file remotely' do
allow(ObjectStorageUploadWorker).to receive(:perform_async) allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
store_file(lfs_object) store_file(lfs_object)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment