Commit adc84578 authored by Kamil Trzcinski's avatar Kamil Trzcinski

Update tests

parent 4a2f8604
......@@ -291,7 +291,7 @@ module Ci
end
def downloadable_single_artifacts_file?
artifacts_metadata? && artifacts_file.local_file?
artifacts_metadata? && artifacts_file.file_storage?
end
def artifacts_metadata?
......
......@@ -65,9 +65,9 @@ module Projects
end
def extract_archive!(temp_path)
if artifacts.ends_with?('.tar.gz') || artifacts.ends_with?('.tgz')
if artifacts_filename.ends_with?('.tar.gz') || artifacts_filename.ends_with?('.tgz')
extract_tar_archive!(temp_path)
elsif artifacts.ends_with?('.zip')
elsif artifacts_filename.ends_with?('.zip')
extract_zip_archive!(temp_path)
else
raise 'unsupported artifacts format'
......@@ -75,11 +75,13 @@ module Projects
end
def extract_tar_archive!(temp_path)
results = Open3.pipeline(%W(gunzip -c #{artifacts}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} #{SITE_PATH}),
err: '/dev/null')
raise 'pages failed to extract' unless results.compact.all?(&:success?)
build.artifacts_file.use_file do |artifacts_path|
results = Open3.pipeline(%W(gunzip -c #{artifacts_path}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} #{SITE_PATH}),
err: '/dev/null')
raise 'pages failed to extract' unless results.compact.all?(&:success?)
end
end
def extract_zip_archive!(temp_path)
......@@ -130,6 +132,10 @@ module Projects
1 + max_size / BLOCK_SIZE
end
def artifacts_filename
build.artifacts_file.filename
end
def max_size
current_application_settings.max_pages_size.megabytes || MAX_SIZE
end
......
class ArtifactUploader < ObjectStoreUploader
attr_reader :subject, :field
storage_options Gitlab.config.artifacts
def self.local_artifacts_store
......@@ -11,11 +9,6 @@ class ArtifactUploader < ObjectStoreUploader
File.join(self.local_artifacts_store, 'tmp/uploads/')
end
def initialize(subject, field)
@subject = subject
@field = field
end
def store_dir
if file_storage?
default_local_path
......@@ -25,11 +18,7 @@ class ArtifactUploader < ObjectStoreUploader
end
def cache_dir
if file_cache_storage?
File.join(self.class.local_artifacts_store, 'tmp/cache')
else
'tmp/cache'
end
File.join(self.class.local_artifacts_store, 'tmp/cache')
end
private
......
......@@ -7,29 +7,38 @@ class ObjectStoreUploader < GitlabUploader
LOCAL_STORE = 1
REMOTE_STORE = 2
def object_store
subject.public_send(:"#{field}_store")
end
class << self
def storage_options(options)
@storage_options = options
end
def object_store=(value)
@storage = nil
subject.public_send(:"#{field}_store=", value)
def object_store_options
@storage_options&.object_store
end
def object_store_enabled?
object_store_options&.enabled
end
end
def self.storage_options(options)
@storage_options = options
attr_reader :subject, :field
def initialize(subject, field)
@subject = subject
@field = field
end
def self.object_store_options
@storage_options&.object_store
def object_store
subject.public_send(:"#{field}_store")
end
def self.object_store_enabled?
object_store_options&.enabled
def object_store=(value)
@storage = nil
subject.public_send(:"#{field}_store=", value)
end
def use_file
unless object_store == REMOTE_STORE
if file_storage?
return yield path
end
......@@ -54,56 +63,51 @@ class ObjectStoreUploader < GitlabUploader
old_file = file
old_store = object_store
# for moving remote file we need to first store it locally
cache_stored_file! unless file_storage?
# change storage
self.object_store = new_store
# store file on a new storage
new_file = storage.store!(old_file)
storage.store!(file).tap do |new_file|
# since we change storage store the new storage
# in case of failure delete new file
begin
subject.save!
rescue => e
new_file.delete
self.object_store = old_store
raise e
end
# since we change storage store the new storage
# in case of failure delete new file
begin
subject.save!
rescue
self.object_store = old_store
new_file.delete
old_file.delete
end
old_file.delete
end
def move_to_store
object_store != REMOTE_STORE
end
def move_to_cache
false
end
def fog_directory
self.class.object_store_options.bucket
self.class.object_store_options.remote_directory
end
def fog_credentials
object_store_options = self.class.object_store_options
{
provider: object_store_options.provider,
aws_access_key_id: object_store_options.access_key_id,
aws_secret_access_key: object_store_options.secret_access_key,
region: object_store_options.region,
endpoint: object_store_options.endpoint,
path_style: true
}
self.class.object_store_options.connection
end
def fog_public
false
end
def move_to_store
file.try(:storage) == storage
end
def move_to_cache
file.try(:storage) == cache_storage
end
private
def set_default_local_store(new_file)
object_store ||= LOCAL_STORE
self.object_store = LOCAL_STORE unless self.object_store
end
def storage
......
......@@ -138,13 +138,14 @@ production: &base
enabled: true
# The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts
object_store:
enabled: false
provider: AWS # Only AWS supported at the moment
access_key_id: VXKLW2P7WP83RM3OQAYU
secret_access_key: hEm7WuxW3Qct9tsxNqSw+iyP26fcCacz78vErkiI
bucket: docker
region: eu-central-1
# object_store:
# enabled: false
# remote_directory: artifacts
# connection:
# provider: AWS # Only AWS supported at the moment
# aws_access_key_id: AWS_ACCESS_KEY_ID
# aws_secret_access_key: AWS_SECRET_ACCESS_KEY
# region: eu-central-1
## Git LFS
lfs:
......
......@@ -311,6 +311,14 @@ Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
Settings.artifacts['path'] = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts"))
Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil
# Convert upload connection settings to use symbol keys, to make Fog happy
if Settings.artifacts['object_store']['connection']
Settings.artifacts['object_store']['connection'] = Hash[Settings.artifacts['object_store']['connection'].map { |k, v| [k.to_sym, v] }]
end
#
# Registry
#
......
......@@ -5,8 +5,13 @@ class AddArtifactsStoreToCiBuild < ActiveRecord::Migration
disable_ddl_transaction!
def change
def up
add_column_with_default(:ci_builds, :artifacts_file_store, :integer, default: 1)
add_column_with_default(:ci_builds, :artifacts_metadata_store, :integer, default: 1)
end
def down
remove_column(:ci_builds, :artifacts_file_store)
remove_column(:ci_builds, :artifacts_metadata_store)
end
end
......@@ -88,10 +88,35 @@ _The artifacts are stored by default in
The previously mentioned methods use the local disk to store artifacts. However,
there is the option to use object stores like AWS' S3. To do this, set the
`object_store` flag to true in your `gitlab.rb`. This relies on valid AWS
credentials to be configured already. Please note, that enabling this feature
`object_store` in your `gitlab.yml`. This relies on valid AWS
credentials to be configured already.
```yaml
artifacts:
enabled: true
path: /mnt/storage/artifacts
object_store:
enabled: true
remote_directory: my-bucket-name
connection:
provider: AWS
aws_access_key_id: S3_KEY_ID
aws_secret_key_id: S3_SECRET_KEY_ID
region: eu-central-1
```
This will allow you to migrate existing artifacts to object store,
but all new artifacts will still be stored on the local disk.
In the future you will be given an option to define a default storage artifacts
for all new files. Currently the artifacts migration has to be executed manually:
```bash
gitlab-rake gitlab:artifacts:migrate
```
Please note, that enabling this feature
will have the effect that artifacts are _not_ browsable anymore through the web
interface.
interface. This limitation will be removed in one of the upcoming releases.
## Set the maximum file size of the artifacts
......
......@@ -334,7 +334,7 @@ module API
if artifacts_file.file_storage?
present_file!(artifacts_file.path, artifacts_file.filename)
else
redirect_to(artifacts_file.url)
redirect(artifacts_file.url)
end
end
......
......@@ -192,7 +192,7 @@ module Ci
end
unless artifacts_file.file_storage?
return redirect_to build.artifacts_file.url
return redirect(build.artifacts_file.url)
end
present_file!(artifacts_file.path, artifacts_file.filename)
......
desc "GitLab | Migrate files for artifacts to comply with new storage format"
namespace :gitlab do
namespace :artifacts do
task migrate: :environment do
puts 'Artifacts'.color(:yellow)
Ci::Build.joins(:project).with_artifacts
.where(artifacts_file_store: ArtifactUploader::LOCAL_STORE)
.find_each(batch_size: 100) do |issue|
begin
build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE)
build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE)
print '.'
rescue
print 'F'
end
end
end
end
end
desc "GitLab | Migrate files for artifacts to comply with new storage format"
task migrate_artifacts: :environment do
puts 'Artifacts'.color(:yellow)
Ci::Build.joins(:project).with_artifacts
.where(artifacts_file_store: ArtifactUploader::LOCAL_STORE)
.find_each(batch_size: 100) do |issue|
begin
build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE)
build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE)
print '.'
rescue
print 'F'
end
end
end
......@@ -162,6 +162,11 @@ FactoryGirl.define do
end
end
trait :remote_store do
artifacts_file_store ArtifactUploader::REMOTE_STORE
artifacts_metadata_store ArtifactUploader::REMOTE_STORE
end
trait :artifacts_expired do
after(:create) do |build, _|
build.artifacts_file =
......
......@@ -224,7 +224,9 @@ CommitStatus:
- target_url
- description
- artifacts_file
- artifacts_file_store
- artifacts_metadata
- artifacts_metadata_store
- erased_by_id
- erased_at
- artifacts_expire_at
......
......@@ -106,6 +106,50 @@ describe Ci::Build, :models do
end
end
describe '#browsable_artifacts?' do
subject { build.browsable_artifacts? }
context 'artifacts metadata does not exist' do
before do
build.update_attributes(artifacts_metadata: nil)
end
it { is_expected.to be_falsy }
end
context 'artifacts metadata does exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy }
end
end
describe '#downloadable_single_artifacts_file?' do
let(:build) { create(:ci_build, :artifacts, artifacts_file_store: store) }
subject { build.downloadable_single_artifacts_file? }
before do
expect_any_instance_of(Ci::Build).to receive(:artifacts_metadata?).and_call_original
end
context 'artifacts are stored locally' do
let(:store) { ObjectStoreUploader::LOCAL_STORE }
it { is_expected.to be_truthy }
end
context 'artifacts are stored remotely' do
let(:store) { ObjectStoreUploader::REMOTE_STORE }
before do
stub_artifacts_object_storage
end
it { is_expected.to be_falsey }
end
end
describe '#artifacts_expired?' do
subject { build.artifacts_expired? }
......
......@@ -189,30 +189,41 @@ describe API::Jobs, :api do
describe 'GET /projects/:id/jobs/:job_id/artifacts' do
before do
stub_artifacts_object_storage
get api("/projects/#{project.id}/jobs/#{build.id}/artifacts", api_user)
end
context 'job with artifacts' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
context 'when artifacts are stored locally' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
context 'authorized user' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
context 'authorized user' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end
it 'returns specific job artifacts' do
expect(response).to have_http_status(200)
expect(response.headers).to include(download_headers)
expect(response.body).to match_file(build.artifacts_file.file.file)
end
end
it 'returns specific job artifacts' do
expect(response).to have_http_status(200)
expect(response.headers).to include(download_headers)
expect(response.body).to match_file(build.artifacts_file.file.file)
context 'unauthorized user' do
let(:api_user) { nil }
it 'does not return specific job artifacts' do
expect(response).to have_http_status(401)
end
end
end
context 'unauthorized user' do
let(:api_user) { nil }
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
it 'does not return specific job artifacts' do
expect(response).to have_http_status(401)
it 'returns location redirect' do
expect(response).to have_http_status(302)
end
end
end
......@@ -227,6 +238,7 @@ describe API::Jobs, :api do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
stub_artifacts_object_storage
build.success
end
......@@ -282,14 +294,24 @@ describe API::Jobs, :api do
context 'find proper job' do
shared_examples 'a valid file' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' =>
"attachment; filename=#{build.artifacts_file.filename}" }
context 'when artifacts are stored locally' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' =>
"attachment; filename=#{build.artifacts_file.filename}" }
end
it { expect(response).to have_http_status(200) }
it { expect(response.headers).to include(download_headers) }
end
it { expect(response).to have_http_status(200) }
it { expect(response.headers).to include(download_headers) }
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
it 'returns location redirect' do
expect(response).to have_http_status(302)
end
end
end
context 'with regular branch' do
......
......@@ -767,7 +767,10 @@ describe API::Runner do
let(:file_upload) { fixture_file_upload(Rails.root + 'spec/fixtures/banana_sample.gif', 'image/gif') }
let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') }
before { job.run! }
before do
stub_artifacts_object_storage
job.run!
end
describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
context 'when using token as parameter' do
......@@ -1059,15 +1062,26 @@ describe API::Runner do
context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts) }
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end
context 'when using job token' do
it 'download artifacts' do
expect(response).to have_http_status(200)
expect(response.headers).to include download_headers
context 'when artifacts are stored locally' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end
it 'download artifacts' do
expect(response).to have_http_status(200)
expect(response.headers).to include download_headers
end
end
context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, :artifacts, :remote_store) }
it 'download artifacts' do
expect(response).to have_http_status(302)
end
end
end
......
......@@ -187,22 +187,33 @@ describe API::V3::Builds do
describe 'GET /projects/:id/builds/:build_id/artifacts' do
before do
stub_artifacts_object_storage
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
context 'job with artifacts' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
context 'when artifacts are stored locally' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
context 'authorized user' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
context 'authorized user' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end
it 'returns specific job artifacts' do
expect(response).to have_http_status(200)
expect(response.headers).to include(download_headers)
expect(response.body).to match_file(build.artifacts_file.file.file)
end
end
end
it 'returns specific job artifacts' do
expect(response).to have_http_status(200)
expect(response.headers).to include(download_headers)
expect(response.body).to match_file(build.artifacts_file.file.file)
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
it 'returns location redirect' do
expect(response).to have_http_status(302)
end
end
......@@ -225,6 +236,7 @@ describe API::V3::Builds do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
stub_artifacts_object_storage
build.success
end
......@@ -280,14 +292,24 @@ describe API::V3::Builds do
context 'find proper job' do
shared_examples 'a valid file' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' =>
"attachment; filename=#{build.artifacts_file.filename}" }
context 'when artifacts are stored locally' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' =>
"attachment; filename=#{build.artifacts_file.filename}" }
end
it { expect(response).to have_http_status(200) }
it { expect(response.headers).to include(download_headers) }
end
it { expect(response).to have_http_status(200) }
it { expect(response.headers).to include(download_headers) }
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
it 'returns location redirect' do
expect(response).to have_http_status(302)
end
end
end
context 'with regular branch' do
......
......@@ -455,7 +455,10 @@ describe Ci::API::Builds do
let(:token) { build.token }
let(:headers_with_token) { headers.merge(Ci::API::Helpers::BUILD_TOKEN_HEADER => token) }
before { build.run! }
before do
stub_artifacts_object_storage
build.run!
end
describe "POST /builds/:id/artifacts/authorize" do
context "authorizes posting artifact to running build" do
......@@ -789,16 +792,26 @@ describe Ci::API::Builds do
end
context 'build has artifacts' do
let(:build) { create(:ci_build, :artifacts) }
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end
shared_examples 'having downloadable artifacts' do
it 'download artifacts' do
expect(response).to have_http_status(200)
expect(response.headers).to include download_headers
context 'when stored locally' do
let(:build) { create(:ci_build, :artifacts) }
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end
it 'download artifacts' do
expect(response).to have_http_status(200)
expect(response.headers).to include download_headers
end
end
context 'when stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store) }
it 'redirect to artifacts file' do
expect(response).to have_http_status(302)
end
end
end
......
......@@ -22,7 +22,8 @@ describe Ci::RetryBuildService, :services do
%i[type lock_version target_url base_tags
commit_id deployments erased_by_id last_deployment project_id
runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried].freeze
user_id auto_canceled_by_id retried
artifacts_file_store artifacts_metadata_store].freeze
shared_examples 'build duplication' do
let(:build) do
......
module StubConfiguration
def stub_artifacts_object_storage(enabled: true)
Fog.mock!
allow(Gitlab.config.artifacts.object_store).to receive_messages(
enabled: enabled,
remote_directory: 'artifacts',
connection: {
provider: 'AWS',
aws_access_key_id: 'AWS_ACCESS_KEY_ID',
aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY',
region: 'eu-central-1'
}
)
::Fog::Storage.new(Gitlab.config.artifacts.object_store.connection).tap do |connection|
begin
connection.directories.create(key: 'artifacts')
rescue Excon::Error::Conflict
end
end if enabled
end
end
require 'rails_helper'
describe ArtifactUploader do
let(:job) { create(:ci_build) }
let(:store) { described_class::LOCAL_STORE }
let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :artifacts_file) }
let(:path) { Gitlab.config.artifacts.path }
let(:local_path) { Gitlab.config.artifacts.path }
describe '.local_artifacts_store' do
subject { described_class.local_artifacts_store }
......@@ -18,21 +19,35 @@ describe ArtifactUploader do
describe '.artifacts_upload_path' do
subject { described_class.artifacts_upload_path }
it { is_expected.to start_with(path) }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/uploads/') }
end
describe '#store_dir' do
subject { uploader.store_dir }
it { is_expected.to start_with(path) }
it { is_expected.to end_with("#{job.project_id}/#{job.id}") }
let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
context 'when using local storage' do
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with(path) }
end
context 'when using remote storage' do
let(:store) { described_class::REMOTE_STORE }
before do
stub_artifacts_object_storage
end
it { is_expected.to eq(path) }
end
end
describe '#cache_dir' do
subject { uploader.cache_dir }
it { is_expected.to start_with(path) }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/cache') }
end
end
require 'rails_helper'
require 'carrierwave/storage/fog'
describe ObjectStoreUploader do
let(:uploader_class) { Class.new(described_class) }
let(:object) { double }
let(:uploader) { uploader_class.new(object, :artifacts_file) }
describe '#object_store' do
it "calls artifacts_file_store on object" do
expect(object).to receive(:artifacts_file_store)
uploader.object_store
end
end
describe '#object_store=' do
it "calls artifacts_file_store= on object" do
expect(object).to receive(:artifacts_file_store=).with(described_class::REMOTE_STORE)
uploader.object_store = described_class::REMOTE_STORE
end
end
context 'when using ArtifactsUploader' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
let(:uploader) { job.artifacts_file }
context 'checking described_class' do
let(:store) { described_class::LOCAL_STORE }
it "uploader is of a described_class" do
expect(uploader).to be_a(described_class)
end
end
describe '#use_file' do
context 'when file is stored locally' do
let(:store) { described_class::LOCAL_STORE }
it "calls a regular path" do
expect { |b| uploader.use_file(&b) }.not_to yield_with_args(/tmp\/cache/)
end
end
context 'when file is stored remotely' do
let(:store) { described_class::REMOTE_STORE }
before do
stub_artifacts_object_storage
end
it "calls a cache path" do
expect { |b| uploader.use_file(&b) }.to yield_with_args(/tmp\/cache/)
end
end
end
describe '#migrate!' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
let(:uploader) { job.artifacts_file }
let(:store) { described_class::LOCAL_STORE }
subject { uploader.migrate!(new_store) }
context 'when using the same storage' do
let(:new_store) { store }
it "to not migrate the storage" do
subject
expect(uploader.object_store).to eq(store)
end
end
context 'when migrating to local storage' do
let(:store) { described_class::REMOTE_STORE }
let(:new_store) { described_class::LOCAL_STORE }
before do
stub_artifacts_object_storage
end
it "local file does not exist" do
expect(File.exist?(uploader.path)).to eq(false)
end
it "does migrate the file" do
subject
expect(uploader.object_store).to eq(new_store)
expect(File.exist?(uploader.path)).to eq(true)
end
end
context 'when migrating to remote storage' do
let(:new_store) { described_class::REMOTE_STORE }
before do
@current_path = uploader.path
end
it "file does exist" do
expect(File.exist?(@current_path)).to eq(true)
end
context 'when storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
end
it "to raise an error" do
expect { subject }.to raise_error(/Object Storage is not enabled/)
end
end
context 'when credentials are set' do
before do
stub_artifacts_object_storage
end
it "does migrate the file" do
subject
expect(uploader.object_store).to eq(new_store)
expect(File.exist?(@current_path)).to eq(false)
end
it "does delete original file" do
subject
expect(File.exist?(@current_path)).to eq(false)
end
context 'when subject save fails' do
before do
expect(job).to receive(:save!).and_raise(RuntimeError, "exception")
end
it "does catch an error" do
expect { subject }.to raise_error(/exception/)
end
it "original file is not removed" do
begin
subject
rescue
end
expect(File.exist?(@current_path)).to eq(true)
end
end
end
end
end
end
describe '#fog_directory' do
let(:remote_directory) { 'directory' }
before do
uploader_class.storage_options double(
object_store: double(remote_directory: remote_directory))
end
subject { uploader.fog_directory }
it { is_expected.to eq(remote_directory) }
end
describe '#fog_credentials' do
let(:connection) { 'connection' }
before do
uploader_class.storage_options double(
object_store: double(connection: connection))
end
subject { uploader.fog_credentials }
it { is_expected.to eq(connection) }
end
describe '#fog_public' do
subject { uploader.fog_public }
it { is_expected.to eq(false) }
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment