Commit d7e629f6 authored by Micaël Bergeron's avatar Micaël Bergeron

removing old specs

parent 42099681
class JobArtifactUploader < GitlabUploader
prepend EE::JobArtifactUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
......
......@@ -340,6 +340,7 @@ ActiveRecord::Schema.define(version: 20180308052825) do
add_index "ci_build_trace_sections", ["build_id", "section_name_id"], name: "index_ci_build_trace_sections_on_build_id_and_section_name_id", unique: true, using: :btree
add_index "ci_build_trace_sections", ["project_id"], name: "index_ci_build_trace_sections_on_project_id", using: :btree
add_index "ci_build_trace_sections", ["section_name_id"], name: "index_ci_build_trace_sections_on_section_name_id", using: :btree
create_table "ci_builds", force: :cascade do |t|
t.string "status"
......
module EE
module JobArtifactUploader
extend ActiveSupport::Concern
def open
if file_storage?
super
else
::Gitlab::Ci::Trace::HttpIO.new(url, size) if url
end
end
end
end
##
# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
# source: https://gitlab.com/snippets/1685610
module Gitlab
module Ci
class Trace
class HttpIO
BUFFER_SIZE = 128.kilobytes
InvalidURLError = Class.new(StandardError)
FailedToGetChunkError = Class.new(StandardError)
attr_reader :uri, :size
attr_reader :tell
attr_reader :chunk, :chunk_range
alias_method :pos, :tell
def initialize(url, size)
raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url)
@uri = URI(url)
@size = size
@tell = 0
end
def close
# no-op
end
def binmode
# no-op
end
def binmode?
true
end
def path
nil
end
def url
@uri.to_s
end
def seek(pos, where = IO::SEEK_SET)
new_pos =
case where
when IO::SEEK_END
size + pos
when IO::SEEK_SET
pos
when IO::SEEK_CUR
tell + pos
else
-1
end
raise 'new position is outside of file' if new_pos < 0 || new_pos > size
@tell = new_pos
end
def eof?
tell == size
end
def each_line
until eof?
line = readline
break if line.nil?
yield(line)
end
end
def read(length = nil)
out = ""
until eof? || (length && out.length >= length)
data = get_chunk
break if data.empty?
out << data
@tell += data.bytesize
end
out = out[0, length] if length && out.length > length
out
end
def readline
out = ""
until eof?
data = get_chunk
new_line = data.index("\n")
if !new_line.nil?
out << data[0..new_line]
@tell += new_line + 1
break
else
out << data
@tell += data.bytesize
end
end
out
end
def write(data)
raise NotImplementedError
end
def truncate(offset)
raise NotImplementedError
end
def flush
raise NotImplementedError
end
def present?
true
end
private
##
# The below methods are not implemented in IO class
#
def in_range?
@chunk_range&.include?(tell)
end
def get_chunk
unless in_range?
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http|
http.request(request)
end
raise FailedToGetChunkError unless response.code == '200' || response.code == '206'
@chunk = response.body.force_encoding(Encoding::BINARY)
@chunk_range = response.content_range
##
# Note: If provider does not return content_range, then we set it as we requested
# Provider: minio
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: AWS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: GCS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPOK 200
@chunk_range ||= (chunk_start...(chunk_start + @chunk.length))
end
@chunk[chunk_offset..BUFFER_SIZE]
end
def request
Net::HTTP::Get.new(uri).tap do |request|
request.set_range(chunk_start, BUFFER_SIZE)
end
end
def chunk_offset
tell % BUFFER_SIZE
end
def chunk_start
(tell / BUFFER_SIZE) * BUFFER_SIZE
end
def chunk_end
[chunk_start + BUFFER_SIZE, size].min
end
end
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Trace::HttpIO do
include HttpIOHelpers
let(:http_io) { described_class.new(url, size) }
let(:url) { remote_trace_url }
let(:size) { remote_trace_size }
describe '#close' do
subject { http_io.close }
it { is_expected.to be_nil }
end
describe '#binmode' do
subject { http_io.binmode }
it { is_expected.to be_nil }
end
describe '#binmode?' do
subject { http_io.binmode? }
it { is_expected.to be_truthy }
end
describe '#path' do
subject { http_io.path }
it { is_expected.to be_nil }
end
describe '#url' do
subject { http_io.url }
it { is_expected.to eq(url) }
end
describe '#seek' do
subject { http_io.seek(pos, where) }
context 'when moves pos to end of the file' do
let(:pos) { 0 }
let(:where) { IO::SEEK_END }
it { is_expected.to eq(size) }
end
context 'when moves pos to middle of the file' do
let(:pos) { size / 2 }
let(:where) { IO::SEEK_SET }
it { is_expected.to eq(size / 2) }
end
context 'when moves pos around' do
it 'matches the result' do
expect(http_io.seek(0)).to eq(0)
expect(http_io.seek(100, IO::SEEK_CUR)).to eq(100)
expect { http_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file')
end
end
end
describe '#eof?' do
subject { http_io.eof? }
context 'when current pos is at end of the file' do
before do
http_io.seek(size, IO::SEEK_SET)
end
it { is_expected.to be_truthy }
end
context 'when current pos is not at end of the file' do
before do
http_io.seek(0, IO::SEEK_SET)
end
it { is_expected.to be_falsey }
end
end
describe '#each_line' do
subject { http_io.each_line }
let(:string_io) { StringIO.new(remote_trace_body) }
before do
stub_remote_trace_206
end
it 'yields lines' do
expect { |b| http_io.each_line(&b) }.to yield_successive_args(*string_io.each_line.to_a)
end
context 'when buckets on GCS' do
context 'when BUFFER_SIZE is larger than file size' do
before do
stub_remote_trace_200
set_larger_buffer_size_than(size)
end
it 'calls get_chunk only once' do
expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original
http_io.each_line { |line| }
end
end
end
end
describe '#read' do
subject { http_io.read(length) }
context 'when there are no network issue' do
before do
stub_remote_trace_206
end
context 'when read whole size' do
let(:length) { nil }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
end
context 'when read only first 100 bytes' do
let(:length) { 100 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body[0, length])
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body[0, length])
end
end
end
context 'when tries to read oversize' do
let(:length) { size + 1000 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to eq(remote_trace_body)
end
end
end
context 'when tries to read 0 bytes' do
let(:length) { 0 }
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to be_empty
end
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it 'reads a trace' do
is_expected.to be_empty
end
end
end
end
context 'when there is anetwork issue' do
let(:length) { nil }
before do
stub_remote_trace_500
end
it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
end
describe '#readline' do
subject { http_io.readline }
let(:string_io) { StringIO.new(remote_trace_body) }
before do
stub_remote_trace_206
end
shared_examples 'all line matching' do
it 'reads a line' do
(0...remote_trace_body.lines.count).each do
expect(http_io.readline).to eq(string_io.readline)
end
end
end
context 'when there is anetwork issue' do
let(:length) { nil }
before do
stub_remote_trace_500
end
it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
context 'when BUFFER_SIZE is smaller than file size' do
before do
set_smaller_buffer_size_than(size)
end
it_behaves_like 'all line matching'
end
context 'when BUFFER_SIZE is larger than file size' do
before do
set_larger_buffer_size_than(size)
end
it_behaves_like 'all line matching'
end
context 'when pos is at middle of the file' do
before do
set_smaller_buffer_size_than(size)
http_io.seek(size / 2)
string_io.seek(size / 2)
end
it 'reads from pos' do
expect(http_io.readline).to eq(string_io.readline)
end
end
end
describe '#write' do
subject { http_io.write(nil) }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#truncate' do
subject { http_io.truncate(nil) }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#flush' do
subject { http_io.flush }
it { expect { subject }.to raise_error(NotImplementedError) }
end
describe '#present?' do
subject { http_io.present? }
it { is_expected.to be_truthy }
end
end
require 'spec_helper'
describe LfsObject do
describe '#local_store?' do
it 'returns true when file_store is nil' do
subject.file_store = nil
expect(subject.local_store?).to eq true
end
it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
subject.file_store = LfsObjectUploader::Store::LOCAL
expect(subject.local_store?).to eq true
end
it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
subject.file_store = LfsObjectUploader::Store::REMOTE
expect(subject.local_store?).to eq false
end
end
describe '#destroy' do
subject { create(:lfs_object, :with_file) }
......@@ -33,78 +13,4 @@ describe LfsObject do
end
end
end
describe '#schedule_background_upload' do
before do
stub_lfs_setting(enabled: true)
end
subject { create(:lfs_object, :with_file) }
context 'when object storage is disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
context 'when object storage is enabled' do
context 'when background upload is enabled' do
context 'when is licensed' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker)
.to receive(:perform_async)
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
.once
subject
end
it 'schedules the model for migration once' do
expect(ObjectStorage::BackgroundMoveWorker)
.to receive(:perform_async)
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
.once
lfs_object = create(:lfs_object)
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
end
end
context 'when is unlicensed' do
before do
stub_lfs_object_storage(background_upload: true, licensed: false)
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
end
context 'when background upload is disabled' do
before do
stub_lfs_object_storage(background_upload: false)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
end
end
end
require 'spec_helper'
describe API::Jobs do
set(:project) do
create(:project, :repository, public_builds: false)
end
set(:pipeline) do
create(:ci_empty_pipeline, project: project,
sha: project.commit.id,
ref: project.default_branch)
end
let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
let(:user) { create(:user) }
let(:api_user) { user }
let(:reporter) { create(:project_member, :reporter, project: project).user }
let(:cross_project_pipeline_enabled) { true }
let(:object_storage_enabled) { true }
before do
stub_licensed_features(cross_project_pipelines: cross_project_pipeline_enabled,
object_storage: object_storage_enabled)
project.add_developer(user)
end
describe 'GET /projects/:id/jobs/:job_id/artifacts' do
shared_examples 'downloads artifact' do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end
it 'returns specific job artifacts' do
expect(response).to have_gitlab_http_status(200)
expect(response.headers).to include(download_headers)
expect(response.body).to match_file(job.artifacts_file.file.file)
end
end
context 'normal authentication' do
before do
stub_artifacts_object_storage
end
context 'when job with artifacts are stored remotely' do
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302)
end
end
end
context 'authorized by job_token' do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts"), job_token: job.token
end
context 'user is developer' do
let(:api_user) { user }
it_behaves_like 'downloads artifact'
end
context 'when anonymous user is accessing private artifacts' do
let(:api_user) { nil }
it 'hides artifacts and rejects request' do
expect(project).to be_private
expect(response).to have_gitlab_http_status(404)
end
end
context 'feature is disabled for EES' do
let(:api_user) { user }
let(:cross_project_pipeline_enabled) { false }
it 'disallows access to the artifacts' do
expect(response).to have_gitlab_http_status(404)
end
end
end
end
describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do
let(:api_user) { reporter }
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
stub_artifacts_object_storage(licensed: :skip)
job.success
end
def get_for_ref(ref = pipeline.ref, job_name = job.name)
get api("/projects/#{project.id}/jobs/artifacts/#{ref}/download", api_user), job: job_name
end
context 'find proper job' do
shared_examples 'a valid file' do
context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302)
end
end
end
context 'with regular branch' do
before do
pipeline.reload
pipeline.update(ref: 'master',
sha: project.commit('master').sha)
get_for_ref('master')
end
it_behaves_like 'a valid file'
end
context 'with branch name containing slash' do
before do
pipeline.reload
pipeline.update(ref: 'improve/awesome',
sha: project.commit('improve/awesome').sha)
get_for_ref('improve/awesome')
end
it_behaves_like 'a valid file'
end
context 'when using job_token to authenticate' do
before do
pipeline.reload
pipeline.update(ref: 'master',
sha: project.commit('master').sha)
get api("/projects/#{project.id}/jobs/artifacts/master/download"), job: job.name, job_token: job.token
end
context 'when user is reporter' do
it_behaves_like 'a valid file'
end
context 'when user is admin, but not member' do
let(:api_user) { create(:admin) }
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
it 'does not allow to see that artfiact is present' do
expect(response).to have_gitlab_http_status(404)
end
end
end
end
end
end
require 'rails_helper'
require 'carrierwave/storage/fog'
class Implementation < GitlabUploader
include ObjectStorage::Concern
include ::RecordsUploads::Concern
prepend ::ObjectStorage::Extension::RecordsUploads
storage_options Gitlab.config.uploads
private
# user/:id
def dynamic_segment
File.join(model.class.to_s.underscore, model.id.to_s)
end
end
describe ObjectStorage do
let(:uploader_class) { Implementation }
let(:object) { build_stubbed(:user) }
let(:uploader) { uploader_class.new(object, :file) }
describe '#object_store=' do
before do
allow(uploader_class).to receive(:object_store_enabled?).and_return(true)
end
it "reload the local storage" do
uploader.object_store = described_class::Store::LOCAL
expect(uploader.file_storage?).to be_truthy
end
it "reload the REMOTE storage" do
uploader.object_store = described_class::Store::REMOTE
expect(uploader.file_storage?).to be_falsey
end
context 'object_store is Store::LOCAL' do
before do
uploader.object_store = described_class::Store::LOCAL
end
describe '#store_dir' do
it 'is the composition of (base_dir, dynamic_segment)' do
expect(uploader.store_dir).to start_with("uploads/-/system/user/")
end
end
end
context 'object_store is Store::REMOTE' do
before do
uploader.object_store = described_class::Store::REMOTE
end
describe '#store_dir' do
it 'is the composition of (dynamic_segment)' do
expect(uploader.store_dir).to start_with("user/")
end
end
end
end
describe '#object_store' do
it "delegates to <mount>_store on model" do
expect(object).to receive(:file_store)
uploader.object_store
end
context 'when store is null' do
before do
expect(object).to receive(:file_store).and_return(nil)
end
it "returns Store::LOCAL" do
expect(uploader.object_store).to eq(described_class::Store::LOCAL)
end
end
context 'when value is set' do
before do
expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE)
end
it "returns the given value" do
expect(uploader.object_store).to eq(described_class::Store::REMOTE)
end
end
end
describe '#file_cache_storage?' do
context 'when file storage is used' do
before do
expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::File }
end
it { expect(uploader).to be_file_cache_storage }
end
context 'when is remote storage' do
before do
expect(uploader_class).to receive(:cache_storage) { CarrierWave::Storage::Fog }
end
it { expect(uploader).not_to be_file_cache_storage }
end
end
# this means the model shall include
# include RecordsUpload::Concern
# prepend ObjectStorage::Extension::RecordsUploads
# the object_store persistence is delegated to the `Upload` model.
#
context 'when persist_object_store? is false' do
let(:object) { create(:project, :with_avatar) }
let(:uploader) { object.avatar }
it { expect(object).to be_a(Avatarable) }
it { expect(uploader.persist_object_store?).to be_falsey }
describe 'delegates the object_store logic to the `Upload` model' do
it 'sets @upload to the found `upload`' do
expect(uploader.upload).to eq(uploader.upload)
end
it 'sets @object_store to the `Upload` value' do
expect(uploader.object_store).to eq(uploader.upload.store)
end
end
describe '#migrate!' do
let(:new_store) { ObjectStorage::Store::REMOTE }
before do
stub_uploads_object_storage(uploader: AvatarUploader)
end
subject { uploader.migrate!(new_store) }
it 'persist @object_store to the recorded upload' do
subject
expect(uploader.upload.store).to eq(new_store)
end
describe 'fails' do
it 'is handled gracefully' do
store = uploader.object_store
expect_any_instance_of(Upload).to receive(:save!).and_raise("An error")
expect { subject }.to raise_error("An error")
expect(uploader.exists?).to be_truthy
expect(uploader.upload.store).to eq(store)
end
end
end
end
# this means the model holds an <mounted_as>_store attribute directly
# and do not delegate the object_store persistence to the `Upload` model.
#
context 'persist_object_store? is true' do
context 'when using JobArtifactsUploader' do
let(:store) { described_class::Store::LOCAL }
let(:object) { create(:ci_job_artifact, :archive, file_store: store) }
let(:uploader) { object.file }
context 'checking described_class' do
it "uploader include described_class::Concern" do
expect(uploader).to be_a(described_class::Concern)
end
end
describe '#use_file' do
context 'when file is stored locally' do
it "calls a regular path" do
expect { |b| uploader.use_file(&b) }.not_to yield_with_args(%r[tmp/cache])
end
end
context 'when file is stored remotely' do
let(:store) { described_class::Store::REMOTE }
before do
stub_artifacts_object_storage
end
it "calls a cache path" do
expect { |b| uploader.use_file(&b) }.to yield_with_args(%r[tmp/cache])
end
end
end
describe '#migrate!' do
subject { uploader.migrate!(new_store) }
shared_examples "updates the underlying <mounted>_store" do
it do
subject
expect(object.file_store).to eq(new_store)
end
end
context 'when using the same storage' do
let(:new_store) { store }
it "to not migrate the storage" do
subject
expect(uploader).not_to receive(:store!)
expect(uploader.object_store).to eq(store)
end
end
context 'when migrating to local storage' do
let(:store) { described_class::Store::REMOTE }
let(:new_store) { described_class::Store::LOCAL }
before do
stub_artifacts_object_storage
end
include_examples "updates the underlying <mounted>_store"
it "local file does not exist" do
expect(File.exist?(uploader.path)).to eq(false)
end
it "remote file exist" do
expect(uploader.file.exists?).to be_truthy
end
it "does migrate the file" do
subject
expect(uploader.object_store).to eq(new_store)
expect(File.exist?(uploader.path)).to eq(true)
end
end
context 'when migrating to remote storage' do
let(:new_store) { described_class::Store::REMOTE }
let!(:current_path) { uploader.path }
it "file does exist" do
expect(File.exist?(current_path)).to eq(true)
end
context 'when storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
end
it "to raise an error" do
expect { subject }.to raise_error(/Object Storage is not enabled/)
end
end
context 'when storage is unlicensed' do
before do
stub_artifacts_object_storage(licensed: false)
end
it "raises an error" do
expect { subject }.to raise_error(/Object Storage feature is missing/)
end
end
context 'when credentials are set' do
before do
stub_artifacts_object_storage
end
include_examples "updates the underlying <mounted>_store"
it "does migrate the file" do
subject
expect(uploader.object_store).to eq(new_store)
end
it "does delete original file" do
subject
expect(File.exist?(current_path)).to eq(false)
end
context 'when subject save fails' do
before do
expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception")
end
it "original file is not removed" do
expect { subject }.to raise_error(/exception/)
expect(File.exist?(current_path)).to eq(true)
end
end
end
end
end
end
end
describe '#fog_directory' do
let(:remote_directory) { 'directory' }
before do
allow(uploader_class).to receive(:options) do
double(object_store: double(remote_directory: remote_directory))
end
end
subject { uploader.fog_directory }
it { is_expected.to eq(remote_directory) }
end
describe '#fog_credentials' do
let(:connection) { Settingslogic.new("provider" => "AWS") }
before do
allow(uploader_class).to receive(:options) do
double(object_store: double(connection: connection))
end
end
subject { uploader.fog_credentials }
it { is_expected.to eq(provider: 'AWS') }
end
describe '#fog_public' do
subject { uploader.fog_public }
it { is_expected.to eq(false) }
end
describe '#verify_license!' do
subject { uploader.verify_license!(nil) }
context 'when using local storage' do
before do
expect(object).to receive(:file_store) { described_class::Store::LOCAL }
end
it "does not raise an error" do
expect { subject }.not_to raise_error
end
end
context 'when using remote storage' do
before do
allow(uploader_class).to receive(:options) do
double(object_store: double(enabled: true))
end
expect(object).to receive(:file_store) { described_class::Store::REMOTE }
end
context 'feature is not available' do
before do
expect(License).to receive(:feature_available?).with(:object_storage).and_return(false)
end
it "does raise an error" do
expect { subject }.to raise_error(/Object Storage feature is missing/)
end
end
context 'feature is available' do
before do
expect(License).to receive(:feature_available?).with(:object_storage).and_return(true)
end
it "does not raise an error" do
expect { subject }.not_to raise_error
end
end
end
end
describe '.workhorse_authorize' do
subject { uploader_class.workhorse_authorize }
before do
# ensure that we use regular Fog libraries
# other tests might call `Fog.mock!` and
# it will make tests to fail
Fog.unmock!
end
shared_examples 'uses local storage' do
it "returns temporary path" do
is_expected.to have_key(:TempPath)
expect(subject[:TempPath]).to start_with(uploader_class.root)
expect(subject[:TempPath]).to include(described_class::TMP_UPLOAD_PATH)
end
it "does not return remote store" do
is_expected.not_to have_key('RemoteObject')
end
end
shared_examples 'uses remote storage' do
it "returns remote store" do
is_expected.to have_key(:RemoteObject)
expect(subject[:RemoteObject]).to have_key(:ID)
expect(subject[:RemoteObject]).to have_key(:GetURL)
expect(subject[:RemoteObject]).to have_key(:DeleteURL)
expect(subject[:RemoteObject]).to have_key(:StoreURL)
expect(subject[:RemoteObject][:GetURL]).to include(described_class::TMP_UPLOAD_PATH)
expect(subject[:RemoteObject][:DeleteURL]).to include(described_class::TMP_UPLOAD_PATH)
expect(subject[:RemoteObject][:StoreURL]).to include(described_class::TMP_UPLOAD_PATH)
end
it "does not return local store" do
is_expected.not_to have_key('TempPath')
end
end
context 'when object storage is disabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:enabled) { false }
end
it_behaves_like 'uses local storage'
end
context 'when object storage is enabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:enabled) { true }
end
context 'when direct upload is enabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { true }
end
context 'uses AWS' do
before do
expect(uploader_class).to receive(:object_store_credentials) do
{ provider: "AWS",
aws_access_key_id: "AWS_ACCESS_KEY_ID",
aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
region: "eu-central-1" }
end
end
it_behaves_like 'uses remote storage' do
let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" }
it 'returns links for S3' do
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
end
end
end
context 'uses Google' do
before do
expect(uploader_class).to receive(:object_store_credentials) do
{ provider: "Google",
google_storage_access_key_id: 'ACCESS_KEY_ID',
google_storage_secret_access_key: 'SECRET_ACCESS_KEY' }
end
end
it_behaves_like 'uses remote storage' do
let(:storage_url) { "https://storage.googleapis.com/uploads/" }
it 'returns links for Google Cloud' do
expect(subject[:RemoteObject][:GetURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:DeleteURL]).to start_with(storage_url)
expect(subject[:RemoteObject][:StoreURL]).to start_with(storage_url)
end
end
end
end
context 'when direct upload is disabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { false }
end
it_behaves_like 'uses local storage'
end
end
end
describe '#store_workhorse_file!' do
subject do
uploader.store_workhorse_file!(params, :file)
end
context 'when local file is used' do
context 'when valid file is used' do
let(:target_path) do
File.join(uploader_class.root, uploader_class::TMP_UPLOAD_PATH)
end
before do
FileUtils.mkdir_p(target_path)
end
context 'when no filename is specified' do
let(:params) do
{ "file.path" => "test/file" }
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/)
end
end
context 'when invalid file is specified' do
let(:file_path) do
File.join(target_path, "..", "test.file")
end
before do
FileUtils.touch(file_path)
end
let(:params) do
{ "file.path" => file_path,
"file.name" => "my_file.txt" }
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/)
end
end
context 'when filename is specified' do
let(:params) do
{ "file.path" => tmp_file,
"file.name" => "my_file.txt" }
end
let(:tmp_file) { Tempfile.new('filename', target_path) }
before do
FileUtils.touch(tmp_file)
end
after do
FileUtils.rm_f(tmp_file)
end
it 'succeeds' do
expect { subject }.not_to raise_error
expect(uploader).to be_exists
end
it 'proper path is being used' do
subject
expect(uploader.path).to start_with(uploader_class.root)
expect(uploader.path).to end_with("my_file.txt")
end
it 'source file to not exist' do
subject
expect(File.exist?(tmp_file.path)).to be_falsey
end
end
end
end
context 'when remote file is used' do
let!(:fog_connection) do
stub_uploads_object_storage(uploader_class)
end
context 'when valid file is used' do
context 'when no filename is specified' do
let(:params) do
{ "file.remote_id" => "test/123123" }
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/)
end
end
context 'when invalid file is specified' do
let(:params) do
{ "file.remote_id" => "../test/123123",
"file.name" => "my_file.txt" }
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/)
end
end
context 'when non existing file is specified' do
let(:params) do
{ "file.remote_id" => "test/12312300",
"file.name" => "my_file.txt" }
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing file/)
end
end
context 'when filename is specified' do
let(:params) do
{ "file.remote_id" => "test/123123",
"file.name" => "my_file.txt" }
end
let!(:fog_file) do
fog_connection.directories.get('uploads').files.create(
key: 'tmp/upload/test/123123',
body: 'content'
)
end
it 'succeeds' do
expect { subject }.not_to raise_error
expect(uploader).to be_exists
end
it 'path to not be temporary' do
subject
expect(uploader.path).not_to be_nil
expect(uploader.path).not_to include('tmp/upload')
expect(uploader.url).to include('/my_file.txt')
end
it 'url is used' do
subject
expect(uploader.url).not_to be_nil
expect(uploader.url).to include('/my_file.txt')
end
end
end
end
context 'when no file is used' do
let(:params) { {} }
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file/)
end
end
end
end
......@@ -37,6 +37,10 @@ module Gitlab
end
def path
nil
end
def url
@uri.to_s
end
......
......@@ -7,26 +7,6 @@ describe Gitlab::Ci::Trace::HttpIO do
let(:url) { remote_trace_url }
let(:size) { remote_trace_size }
describe 'Interchangeability between IO and HttpIO' do
EXCEPT_METHODS = %i[read_nonblock raw raw! cooked cooked! getch echo= echo?
winsize winsize= iflush oflush ioflush beep goto cursor cursor= pressed?
getpass write_nonblock stat pathconf wait_readable wait_writable getbyte <<
wait lines bytes chars codepoints getc readpartial set_encoding printf print
putc puts readlines gets each each_byte each_char each_codepoint to_io reopen
syswrite to_i fileno sysread fdatasync fsync sync= sync lineno= lineno readchar
ungetbyte readbyte ungetc nonblock= nread rewind pos= eof close_on_exec?
close_on_exec= closed? close_read close_write isatty tty? binmode? sysseek
advise ioctl fcntl pid external_encoding internal_encoding autoclose? autoclose=
posix_fileno nonblock? ready? noecho nonblock].freeze
it 'HttpIO covers core interfaces in IO' do
expected_interfaces = ::IO.instance_methods(false)
expected_interfaces -= EXCEPT_METHODS
expect(expected_interfaces - described_class.instance_methods).to be_empty
end
end
describe '#close' do
subject { http_io.close }
......@@ -48,6 +28,12 @@ describe Gitlab::Ci::Trace::HttpIO do
describe '#path' do
subject { http_io.path }
it { is_expected.to be_nil }
end
describe '#url' do
subject { http_io.url }
it { is_expected.to eq(url) }
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment