Commit dd271e24 authored by Grzegorz Bizon's avatar Grzegorz Bizon

Merge branch 'direct-upload-of-artifacts' into 'master'

Direct upload of artifacts

See merge request gitlab-org/gitlab-ce!18160
parents e9e800f5 678620cc
......@@ -31,7 +31,9 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
render plain: 'Unprocessable entity', status: 422
end
rescue ActiveRecord::RecordInvalid
render_400
render_lfs_forbidden
rescue UploadedFile::InvalidPathError
render_lfs_forbidden
rescue ObjectStorage::RemoteStoreError
render_lfs_forbidden
end
......@@ -66,10 +68,11 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
end
def create_file!(oid, size)
LfsObject.new(oid: oid, size: size).tap do |object|
object.file.store_workhorse_file!(params, :file)
object.save!
end
uploaded_file = UploadedFile.from_params(
params, :file, LfsObjectUploader.workhorse_local_upload_path)
return unless uploaded_file
LfsObject.create!(oid: oid, size: size, file: uploaded_file)
end
def link_to_project!(object)
......
......@@ -7,6 +7,7 @@ module Ci
belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
before_save :update_file_store
before_save :set_size, if: :file_changed?
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
......@@ -21,6 +22,10 @@ module Ci
trace: 3
}
def update_file_store
self.file_store = file.object_store
end
def self.artifacts_size_for(project)
self.where(project: project).sum(:size)
end
......
......@@ -2,6 +2,8 @@ class JobArtifactUploader < GitlabUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
ObjectNotReadyError = Class.new(StandardError)
storage_options Gitlab.config.artifacts
def size
......@@ -25,6 +27,8 @@ class JobArtifactUploader < GitlabUploader
private
def dynamic_segment
raise ObjectNotReadyError, 'JobArtifact is not ready' unless model.id
creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
......
......@@ -2,6 +2,8 @@ class LegacyArtifactUploader < GitlabUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
ObjectNotReadyError = Class.new(StandardError)
storage_options Gitlab.config.artifacts
def store_dir
......@@ -11,6 +13,8 @@ class LegacyArtifactUploader < GitlabUploader
private
def dynamic_segment
raise ObjectNotReadyError, 'Build is not ready' unless model.id
File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end
end
......@@ -156,11 +156,10 @@ module ObjectStorage
end
def workhorse_authorize
if options = workhorse_remote_upload_options
{ RemoteObject: options }
else
{ TempPath: workhorse_local_upload_path }
end
{
RemoteObject: workhorse_remote_upload_options,
TempPath: workhorse_local_upload_path
}.compact
end
def workhorse_local_upload_path
......@@ -293,16 +292,14 @@ module ObjectStorage
}
end
def store_workhorse_file!(params, identifier)
filename = params["#{identifier}.name"]
if remote_object_id = params["#{identifier}.remote_id"]
store_remote_file!(remote_object_id, filename)
elsif local_path = params["#{identifier}.path"]
store_local_file!(local_path, filename)
else
raise RemoteStoreError, 'Bad file'
def cache!(new_file = sanitized_file)
# We intercept ::UploadedFile which might be stored on remote storage
# We use that for "accelerated" uploads, where we store result on remote storage
if new_file.is_a?(::UploadedFile) && new_file.remote_id
return cache_remote_file!(new_file.remote_id, new_file.original_filename)
end
super
end
private
......@@ -313,36 +310,29 @@ module ObjectStorage
self.file_storage?
end
def store_remote_file!(remote_object_id, filename)
raise RemoteStoreError, 'Missing filename' unless filename
def cache_remote_file!(remote_object_id, original_filename)
file_path = File.join(TMP_UPLOAD_PATH, remote_object_id)
file_path = Pathname.new(file_path).cleanpath.to_s
raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(TMP_UPLOAD_PATH + '/')
self.object_store = Store::REMOTE
# TODO:
# This should be changed to make use of `tmp/cache` mechanism
# instead of using custom upload directory,
# using tmp/cache makes this implementation way easier than it is today
CarrierWave::Storage::Fog::File.new(self, storage, file_path).tap do |file|
CarrierWave::Storage::Fog::File.new(self, storage_for(Store::REMOTE), file_path).tap do |file|
raise RemoteStoreError, 'Missing file' unless file.exists?
self.filename = filename
self.file = storage.store!(file)
end
end
def store_local_file!(local_path, filename)
raise RemoteStoreError, 'Missing filename' unless filename
root_path = File.realpath(self.class.workhorse_local_upload_path)
file_path = File.realpath(local_path)
raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(root_path)
# Remote stored file, we force to store on remote storage
self.object_store = Store::REMOTE
self.object_store = Store::LOCAL
self.store!(UploadedFile.new(file_path, filename))
# TODO:
# We store file internally and force it to be considered as `cached`
# This makes CarrierWave to store file in permament location (copy/delete)
# once this object is saved, but not sooner
@cache_id = "force-to-use-cache" # rubocop:disable Gitlab/ModuleWithInstanceVariables
@file = file # rubocop:disable Gitlab/ModuleWithInstanceVariables
@filename = original_filename # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
end
# this is a hack around CarrierWave. The #migrate method needs to be
......
......@@ -308,6 +308,7 @@ Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['direct_upload'] = false if Settings.artifacts['object_store']['direct_upload'].nil?
Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
Settings.artifacts['object_store']['proxy_download'] = false if Settings.artifacts['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy
......
artifacts_object_store = Gitlab.config.artifacts.object_store
if artifacts_object_store.enabled &&
artifacts_object_store.direct_upload &&
artifacts_object_store.connection&.provider.to_s != 'Google'
raise "Only 'Google' is supported as a object storage provider when 'direct_upload' of artifacts is used"
end
......@@ -108,6 +108,7 @@ For source installations the following settings are nested under `artifacts:` an
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Artfacts will be stored| |
| `direct_upload` | Set to true to enable direct upload of Artifacts without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. Currently only `Google` provider is supported | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
......
......@@ -78,6 +78,14 @@ module API
rack_response({ 'message' => '404 Not found' }.to_json, 404)
end
rescue_from UploadedFile::InvalidPathError do |e|
rack_response({ 'message' => e.message }.to_json, 400)
end
rescue_from ObjectStorage::RemoteStoreError do |e|
rack_response({ 'message' => e.message }.to_json, 500)
end
# Retain 405 error rather than a 500 error for Grape 0.15.0+.
# https://github.com/ruby-grape/grape/blob/a3a28f5b5dfbb2797442e006dbffd750b27f2a76/UPGRADING.md#changes-to-method-not-allowed-routes
rescue_from Grape::Exceptions::MethodNotAllowed do |e|
......
......@@ -389,28 +389,6 @@ module API
# file helpers
def uploaded_file(field, uploads_path)
if params[field]
bad_request!("#{field} is not a file") unless params[field][:filename]
return params[field]
end
return nil unless params["#{field}.path"] && params["#{field}.name"]
# sanitize file paths
# this requires all paths to exist
required_attributes! %W(#{field}.path)
uploads_path = File.realpath(uploads_path)
file_path = File.realpath(params["#{field}.path"])
bad_request!('Bad file path') unless file_path.start_with?(uploads_path)
UploadedFile.new(
file_path,
params["#{field}.name"],
params["#{field}.type"] || 'application/octet-stream'
)
end
def present_disk_file!(path, filename, content_type = 'application/octet-stream')
filename ||= File.basename(path)
header['Content-Disposition'] = "attachment; filename=#{filename}"
......
......@@ -186,7 +186,7 @@ module API
status 200
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
Gitlab::Workhorse.artifact_upload_ok
JobArtifactUploader.workhorse_authorize
end
desc 'Upload artifacts for job' do
......@@ -201,14 +201,15 @@ module API
requires :id, type: Integer, desc: %q(Job's ID)
optional :token, type: String, desc: %q(Job's authentication token)
optional :expire_in, type: String, desc: %q(Specify when artifacts should expire)
optional :file, type: File, desc: %q(Artifact's file)
optional 'file.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'file.name', type: String, desc: %q(real filename as send in Content-Disposition (generated by Workhorse))
optional 'file.type', type: String, desc: %q(real content type as send in Content-Type (generated by Workhorse))
optional 'file.sha256', type: String, desc: %q(sha256 checksum of the file)
optional 'file.size', type: Integer, desc: %q(real size of file (generated by Workhorse))
optional 'file.sha256', type: String, desc: %q(sha256 checksum of the file (generated by Workhorse))
optional 'metadata.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'metadata.name', type: String, desc: %q(filename (generated by Workhorse))
optional 'metadata.sha256', type: String, desc: %q(sha256 checksum of the file)
optional 'metadata.size', type: Integer, desc: %q(real size of metadata (generated by Workhorse))
optional 'metadata.sha256', type: String, desc: %q(sha256 checksum of metadata (generated by Workhorse))
end
post '/:id/artifacts' do
not_allowed! unless Gitlab.config.artifacts.enabled
......@@ -217,21 +218,34 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
workhorse_upload_path = JobArtifactUploader.workhorse_upload_path
artifacts = uploaded_file(:file, workhorse_upload_path)
metadata = uploaded_file(:metadata, workhorse_upload_path)
artifacts = UploadedFile.from_params(params, :file, JobArtifactUploader.workhorse_local_upload_path)
metadata = UploadedFile.from_params(params, :metadata, JobArtifactUploader.workhorse_local_upload_path)
bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size
bad_request!("Already uploaded") if job.job_artifacts_archive
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.build_job_artifacts_archive(project: job.project, file_type: :archive, file: artifacts, file_sha256: params['file.sha256'], expire_in: expire_in)
job.build_job_artifacts_metadata(project: job.project, file_type: :metadata, file: metadata, file_sha256: params['metadata.sha256'], expire_in: expire_in) if metadata
job.artifacts_expire_in = expire_in
job.build_job_artifacts_archive(
project: job.project,
file: artifacts,
file_type: :archive,
file_sha256: artifacts.sha256,
expire_in: expire_in)
if metadata
job.build_job_artifacts_metadata(
project: job.project,
file: metadata,
file_type: :metadata,
file_sha256: metadata.sha256,
expire_in: expire_in)
end
if job.save
if job.update(artifacts_expire_in: expire_in)
present job, with: Entities::JobRequest::Response
else
render_validation_error!(job)
......
......@@ -125,6 +125,8 @@ excluded_attributes:
- :trace
- :token
- :when
- :artifacts_file
- :artifacts_metadata
push_event_payload:
- :event_id
project_badges:
......
......@@ -82,7 +82,7 @@ module Gitlab
end
def open_file(path, name)
::UploadedFile.new(path, name || File.basename(path), 'application/octet-stream')
::UploadedFile.new(path, filename: name || File.basename(path), content_type: 'application/octet-stream')
end
end
......
......@@ -36,10 +36,6 @@ module Gitlab
}
end
def artifact_upload_ok
{ TempPath: JobArtifactUploader.workhorse_upload_path }
end
def send_git_blob(repository, blob)
params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_raw_show, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT)
{
......
require "tempfile"
require "tmpdir"
require "fileutils"
# Taken from: Rack::Test::UploadedFile
class UploadedFile
InvalidPathError = Class.new(StandardError)
# The filename, *not* including the path, of the "uploaded" file
attr_reader :original_filename
......@@ -12,14 +14,46 @@ class UploadedFile
# The content type of the "uploaded" file
attr_accessor :content_type
def initialize(path, filename, content_type = "text/plain")
raise "#{path} file does not exist" unless ::File.exist?(path)
attr_reader :remote_id
attr_reader :sha256
def initialize(path, filename: nil, content_type: "application/octet-stream", sha256: nil, remote_id: nil)
raise InvalidPathError, "#{path} file does not exist" unless ::File.exist?(path)
@content_type = content_type
@original_filename = filename || ::File.basename(path)
@content_type = content_type
@sha256 = sha256
@remote_id = remote_id
@tempfile = File.new(path, 'rb')
end
def self.from_params(params, field, upload_path)
unless params["#{field}.path"]
raise InvalidPathError, "file is invalid" if params["#{field}.remote_id"]
return
end
file_path = File.realpath(params["#{field}.path"])
unless self.allowed_path?(file_path, [upload_path, Dir.tmpdir].compact)
raise InvalidPathError, "insecure path used '#{file_path}'"
end
UploadedFile.new(file_path,
filename: params["#{field}.name"],
content_type: params["#{field}.type"] || 'application/octet-stream',
sha256: params["#{field}.sha256"],
remote_id: params["#{field}.remote_id"])
end
def self.allowed_path?(file_path, paths)
paths.any? do |path|
File.exist?(path) && file_path.start_with?(File.realpath(path))
end
end
def path
@tempfile.path
end
......
require 'spec_helper'
describe 'Artifacts direct upload support' do
subject do
load Rails.root.join('config/initializers/artifacts_direct_upload_support.rb')
end
let(:connection) do
{ provider: provider }
end
before do
stub_artifacts_setting(
object_store: {
enabled: enabled,
direct_upload: direct_upload,
connection: connection
})
end
context 'when object storage is enabled' do
let(:enabled) { true }
context 'when direct upload is enabled' do
let(:direct_upload) { true }
context 'when provider is Google' do
let(:provider) { 'Google' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
context 'when connection is empty' do
let(:connection) { nil }
it 'raises an error' do
expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/
end
end
context 'when other provider is used' do
let(:provider) { 'AWS' }
it 'raises an error' do
expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/
end
end
end
context 'when direct upload is disabled' do
let(:direct_upload) { false }
let(:provider) { 'AWS' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
end
context 'when object storage is disabled' do
let(:enabled) { false }
let(:direct_upload) { false }
let(:provider) { 'AWS' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
end
......@@ -6180,12 +6180,6 @@
"user_id": null,
"target_url": null,
"description": null,
"artifacts_file": {
"url": null
},
"artifacts_metadata": {
"url": null
},
"erased_by_id": null,
"erased_at": null,
"type": "Ci::Build",
......@@ -6218,12 +6212,6 @@
"user_id": null,
"target_url": null,
"description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/72/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/72/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null,
"erased_at": null
}
......@@ -6292,12 +6280,6 @@
"user_id": null,
"target_url": null,
"description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/74/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/74/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null,
"erased_at": null
},
......@@ -6327,12 +6309,6 @@
"user_id": null,
"target_url": null,
"description": null,
"artifacts_file": {
"url": null
},
"artifacts_metadata": {
"url": null
},
"erased_by_id": null,
"erased_at": null
}
......@@ -6392,12 +6368,6 @@
"user_id": null,
"target_url": null,
"description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/76/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/76/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null,
"erased_at": null
},
......@@ -6427,12 +6397,6 @@
"user_id": null,
"target_url": null,
"description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/75/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/75/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null,
"erased_at": null
}
......@@ -6492,12 +6456,6 @@
"user_id": null,
"target_url": null,
"description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/78/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/78/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null,
"erased_at": null
},
......@@ -6527,12 +6485,6 @@
"user_id": null,
"target_url": null,
"description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/77/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/77/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null,
"erased_at": null
}
......@@ -6592,12 +6544,6 @@
"user_id": null,
"target_url": null,
"description": null,
"artifacts_file": {
"url": null
},
"artifacts_metadata": {
"url": null
},
"erased_by_id": null,
"erased_at": null
},
......@@ -6627,12 +6573,6 @@
"user_id": null,
"target_url": null,
"description": null,
"artifacts_file": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/80/p5_build_artifacts.zip"
},
"artifacts_metadata": {
"url": "/Users/Test/Test/gitlab-development-kit/gitlab/shared/artifacts/2016_03/5/80/p5_build_artifacts_metadata.gz"
},
"erased_by_id": null,
"erased_at": null
}
......
require 'spec_helper'
describe UploadedFile do
describe ".from_params" do
let(:temp_dir) { Dir.tmpdir }
let(:temp_file) { Tempfile.new("test", temp_dir) }
let(:upload_path) { nil }
subject do
described_class.from_params(params, :file, upload_path)
end
before do
FileUtils.touch(temp_file)
end
after do
FileUtils.rm_f(temp_file)
FileUtils.rm_r(upload_path) if upload_path
end
context 'when valid file is specified' do
context 'only local path is specified' do
let(:params) do
{ 'file.path' => temp_file.path }
end
it "succeeds" do
is_expected.not_to be_nil
end
it "generates filename from path" do
expect(subject.original_filename).to eq(::File.basename(temp_file.path))
end
end
context 'all parameters are specified' do
let(:params) do
{ 'file.path' => temp_file.path,
'file.name' => 'my_file.txt',
'file.type' => 'my/type',
'file.sha256' => 'sha256',
'file.remote_id' => 'remote_id' }
end
it "succeeds" do
is_expected.not_to be_nil
end
it "generates filename from path" do
expect(subject.original_filename).to eq('my_file.txt')
expect(subject.content_type).to eq('my/type')
expect(subject.sha256).to eq('sha256')
expect(subject.remote_id).to eq('remote_id')
end
end
end
context 'when no params are specified' do
let(:params) do
{}
end
it "does not return an object" do
is_expected.to be_nil
end
end
context 'when only remote id is specified' do
let(:params) do
{ 'file.remote_id' => 'remote_id' }
end
it "raises an error" do
expect { subject }.to raise_error(UploadedFile::InvalidPathError, /file is invalid/)
end
end
context 'when verifying allowed paths' do
let(:params) do
{ 'file.path' => temp_file.path }
end
context 'when file is stored in system temporary folder' do
let(:temp_dir) { Dir.tmpdir }
it "succeeds" do
is_expected.not_to be_nil
end
end
context 'when file is stored in user provided upload path' do
let(:upload_path) { Dir.mktmpdir }
let(:temp_dir) { upload_path }
it "succeeds" do
is_expected.not_to be_nil
end
end
context 'when file is stored outside of user provided upload path' do
let!(:generated_dir) { Dir.mktmpdir }
let!(:temp_dir) { Dir.mktmpdir }
before do
# We overwrite default temporary path
allow(Dir).to receive(:tmpdir).and_return(generated_dir)
end
it "raises an error" do
expect { subject }.to raise_error(UploadedFile::InvalidPathError, /insecure path used/)
end
end
end
end
end
......@@ -950,12 +950,53 @@ describe API::Runner do
describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
context 'when using token as parameter' do
it 'authorizes posting artifacts to running job' do
authorize_artifacts_with_token_in_params
context 'posting artifacts to running job' do
subject do
authorize_artifacts_with_token_in_params
end
expect(response).to have_gitlab_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).not_to be_nil
shared_examples 'authorizes local file' do
it 'succeeds' do
subject
expect(response).to have_gitlab_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
end
end
context 'when using local storage' do
it_behaves_like 'authorizes local file'
end
context 'when using remote storage' do
context 'when direct upload is enabled' do
before do
stub_artifacts_object_storage(enabled: true, direct_upload: true)
end
it 'succeeds' do
subject
expect(response).to have_gitlab_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL')
end
end
context 'when direct upload is disabled' do
before do
stub_artifacts_object_storage(enabled: true, direct_upload: false)
end
it_behaves_like 'authorizes local file'
end
end
end
it 'fails to post too large artifact' do
......@@ -1051,20 +1092,45 @@ describe API::Runner do
end
end
context 'when uses regular file post' do
before do
upload_artifacts(file_upload, headers_with_token, false)
context 'when uses accelerated file post' do
context 'for file stored locally' do
before do
upload_artifacts(file_upload, headers_with_token)
end
it_behaves_like 'successful artifacts upload'
end
it_behaves_like 'successful artifacts upload'
end
context 'for file stored remotelly' do
let!(:fog_connection) do
stub_artifacts_object_storage(direct_upload: true)
end
context 'when uses accelerated file post' do
before do
upload_artifacts(file_upload, headers_with_token, true)
end
before do
fog_connection.directories.get('artifacts').files.create(
key: 'tmp/upload/12312300',
body: 'content'
)
it_behaves_like 'successful artifacts upload'
upload_artifacts(file_upload, headers_with_token,
{ 'file.remote_id' => remote_id })
end
context 'when valid remote_id is used' do
let(:remote_id) { '12312300' }
it_behaves_like 'successful artifacts upload'
end
context 'when invalid remote_id is used' do
let(:remote_id) { 'invalid id' }
it 'responds with bad request' do
expect(response).to have_gitlab_http_status(500)
expect(json_response['message']).to eq("Missing file")
end
end
end
end
context 'when using runners token' do
......@@ -1208,15 +1274,19 @@ describe API::Runner do
end
context 'when artifacts are being stored outside of tmp path' do
let(:new_tmpdir) { Dir.mktmpdir }
before do
# init before overwriting tmp dir
file_upload
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir
allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir)
allow(Dir).to receive(:tmpdir).and_return(new_tmpdir)
end
after do
FileUtils.remove_entry @tmpdir
FileUtils.remove_entry(new_tmpdir)
end
it' "fails to post artifacts for outside of tmp path"' do
......@@ -1226,12 +1296,11 @@ describe API::Runner do
end
end
def upload_artifacts(file, headers = {}, accelerated = true)
params = if accelerated
{ 'file.path' => file.path, 'file.name' => file.original_filename }
else
{ 'file' => file }
end
def upload_artifacts(file, headers = {}, params = {})
params = params.merge({
'file.path' => file.path,
'file.name' => file.original_filename
})
post api("/jobs/#{job.id}/artifacts"), params, headers
end
......
......@@ -1016,7 +1016,7 @@ describe 'Git LFS API and storage' do
it_behaves_like 'a valid response' do
it 'responds with status 200, location of lfs remote store and object details' do
expect(json_response['TempPath']).to be_nil
expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
......@@ -1073,7 +1073,9 @@ describe 'Git LFS API and storage' do
['123123', '../../123123'].each do |remote_id|
context "with invalid remote_id: #{remote_id}" do
subject do
put_finalize_with_args('file.remote_id' => remote_id)
put_finalize(with_tempfile: true, args: {
'file.remote_id' => remote_id
})
end
it 'responds with status 403' do
......@@ -1093,9 +1095,10 @@ describe 'Git LFS API and storage' do
end
subject do
put_finalize_with_args(
put_finalize(with_tempfile: true, args: {
'file.remote_id' => '12312300',
'file.name' => 'name')
'file.name' => 'name'
})
end
it 'responds with status 200' do
......@@ -1331,7 +1334,7 @@ describe 'Git LFS API and storage' do
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers
end
def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false)
def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false, args: {})
upload_path = LfsObjectUploader.workhorse_local_upload_path
file_path = upload_path + '/' + lfs_tmp if lfs_tmp
......@@ -1340,12 +1343,12 @@ describe 'Git LFS API and storage' do
FileUtils.touch(file_path)
end
args = {
extra_args = {
'file.path' => file_path,
'file.name' => File.basename(file_path)
}.compact
}
put_finalize_with_args(args)
put_finalize_with_args(args.merge(extra_args).compact)
end
def put_finalize_with_args(args)
......
......@@ -45,6 +45,10 @@ module StubConfiguration
allow(Gitlab.config.lfs).to receive_messages(to_settings(messages))
end
def stub_artifacts_setting(messages)
allow(Gitlab.config.artifacts).to receive_messages(to_settings(messages))
end
def stub_storage_settings(messages)
messages.deep_stringify_keys!
......
......@@ -516,108 +516,46 @@ describe ObjectStorage do
end
end
describe '#store_workhorse_file!' do
describe '#cache!' do
subject do
uploader.store_workhorse_file!(params, :file)
uploader.cache!(uploaded_file)
end
context 'when local file is used' do
context 'when valid file is used' do
let(:target_path) do
File.join(uploader_class.root, uploader_class::TMP_UPLOAD_PATH)
let(:uploaded_file) do
fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg')
end
before do
FileUtils.mkdir_p(target_path)
end
context 'when no filename is specified' do
let(:params) do
{ "file.path" => "test/file" }
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/)
end
end
context 'when invalid file is specified' do
let(:file_path) do
File.join(target_path, "..", "test.file")
end
before do
FileUtils.touch(file_path)
end
let(:params) do
{ "file.path" => file_path,
"file.name" => "my_file.txt" }
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/)
end
end
context 'when filename is specified' do
let(:params) do
{ "file.path" => tmp_file,
"file.name" => "my_file.txt" }
end
let(:tmp_file) { Tempfile.new('filename', target_path) }
before do
FileUtils.touch(tmp_file)
end
after do
FileUtils.rm_f(tmp_file)
end
it 'succeeds' do
expect { subject }.not_to raise_error
expect(uploader).to be_exists
end
it 'proper path is being used' do
subject
expect(uploader.path).to start_with(uploader_class.root)
expect(uploader.path).to end_with("my_file.txt")
end
it "properly caches the file" do
subject
it 'source file to not exist' do
subject
expect(File.exist?(tmp_file.path)).to be_falsey
end
expect(uploader).to be_exists
expect(uploader.path).to start_with(uploader_class.root)
expect(uploader.filename).to eq('rails_sample.jpg')
end
end
end
context 'when remote file is used' do
let(:temp_file) { Tempfile.new("test") }
let!(:fog_connection) do
stub_uploads_object_storage(uploader_class)
end
context 'when valid file is used' do
context 'when no filename is specified' do
let(:params) do
{ "file.remote_id" => "test/123123" }
end
before do
FileUtils.touch(temp_file)
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/)
end
end
after do
FileUtils.rm_f(temp_file)
end
context 'when valid file is used' do
context 'when invalid file is specified' do
let(:params) do
{ "file.remote_id" => "../test/123123",
"file.name" => "my_file.txt" }
let(:uploaded_file) do
UploadedFile.new(temp_file.path, remote_id: "../test/123123")
end
it 'raises an error' do
......@@ -626,9 +564,8 @@ describe ObjectStorage do
end
context 'when non existing file is specified' do
let(:params) do
{ "file.remote_id" => "test/12312300",
"file.name" => "my_file.txt" }
let(:uploaded_file) do
UploadedFile.new(temp_file.path, remote_id: "test/123123")
end
it 'raises an error' do
......@@ -636,10 +573,9 @@ describe ObjectStorage do
end
end
context 'when filename is specified' do
let(:params) do
{ "file.remote_id" => "test/123123",
"file.name" => "my_file.txt" }
context 'when valid file is specified' do
let(:uploaded_file) do
UploadedFile.new(temp_file.path, filename: "my_file.txt", remote_id: "test/123123")
end
let!(:fog_file) do
......@@ -649,36 +585,37 @@ describe ObjectStorage do
)
end
it 'succeeds' do
it 'file to be cached and remote stored' do
expect { subject }.not_to raise_error
expect(uploader).to be_exists
end
it 'path to not be temporary' do
subject
expect(uploader).to be_cached
expect(uploader.path).not_to be_nil
expect(uploader.path).not_to include('tmp/upload')
expect(uploader.url).to include('/my_file.txt')
expect(uploader.path).not_to include('tmp/cache')
expect(uploader.url).not_to be_nil
expect(uploader.path).not_to include('tmp/cache')
expect(uploader.object_store).to eq(described_class::Store::REMOTE)
end
it 'url is used' do
subject
context 'when file is stored' do
subject do
uploader.store!(uploaded_file)
end
expect(uploader.url).not_to be_nil
expect(uploader.url).to include('/my_file.txt')
it 'file to be remotely stored in permament location' do
subject
expect(uploader).to be_exists
expect(uploader).not_to be_cached
expect(uploader.path).not_to be_nil
expect(uploader.path).not_to include('tmp/upload')
expect(uploader.path).not_to include('tmp/cache')
expect(uploader.url).to include('/my_file.txt')
expect(uploader.object_store).to eq(described_class::Store::REMOTE)
end
end
end
end
end
context 'when no file is used' do
let(:params) { {} }
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file/)
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment