Commit 7390772c authored by Grzegorz Bizon's avatar Grzegorz Bizon

Merge branch 'zj-object-store-artifacts' into 'master'

Object store for artifacts

Closes gitlab-ce#29203

See merge request !1762
parents df0b99e2 57b90490
...@@ -293,18 +293,28 @@ module Ci ...@@ -293,18 +293,28 @@ module Ci
!artifacts_expired? && artifacts_file.exists? !artifacts_expired? && artifacts_file.exists?
end end
def browsable_artifacts?
artifacts_metadata?
end
def downloadable_single_artifacts_file?
artifacts_metadata? && artifacts_file.file_storage?
end
def artifacts_metadata? def artifacts_metadata?
artifacts? && artifacts_metadata.exists? artifacts? && artifacts_metadata.exists?
end end
def artifacts_metadata_entry(path, **options) def artifacts_metadata_entry(path, **options)
artifacts_metadata.use_file do |metadata_path|
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new( metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
artifacts_metadata.path, metadata_path,
path, path,
**options) **options)
metadata.to_entry metadata.to_entry
end end
end
def erase_artifacts! def erase_artifacts!
remove_artifacts_file! remove_artifacts_file!
......
...@@ -6,12 +6,14 @@ class License < ActiveRecord::Base ...@@ -6,12 +6,14 @@ class License < ActiveRecord::Base
GEO_FEATURE = 'GitLab_Geo'.freeze GEO_FEATURE = 'GitLab_Geo'.freeze
AUDITOR_USER_FEATURE = 'GitLab_Auditor_User'.freeze AUDITOR_USER_FEATURE = 'GitLab_Auditor_User'.freeze
SERVICE_DESK_FEATURE = 'GitLab_ServiceDesk'.freeze SERVICE_DESK_FEATURE = 'GitLab_ServiceDesk'.freeze
OBJECT_STORAGE_FEATURE = 'GitLab_ObjectStorage'.freeze
RELATED_ISSUES_FEATURE = 'RelatedIssues'.freeze RELATED_ISSUES_FEATURE = 'RelatedIssues'.freeze
FEATURE_CODES = { FEATURE_CODES = {
geo: GEO_FEATURE, geo: GEO_FEATURE,
auditor_user: AUDITOR_USER_FEATURE, auditor_user: AUDITOR_USER_FEATURE,
service_desk: SERVICE_DESK_FEATURE, service_desk: SERVICE_DESK_FEATURE,
object_storage: OBJECT_STORAGE_FEATURE,
related_issues: RELATED_ISSUES_FEATURE, related_issues: RELATED_ISSUES_FEATURE,
# Features that make sense to Namespace: # Features that make sense to Namespace:
deploy_board: DEPLOY_BOARD_FEATURE, deploy_board: DEPLOY_BOARD_FEATURE,
...@@ -33,7 +35,8 @@ class License < ActiveRecord::Base ...@@ -33,7 +35,8 @@ class License < ActiveRecord::Base
{ FILE_LOCK_FEATURE => 1 }, { FILE_LOCK_FEATURE => 1 },
{ GEO_FEATURE => 1 }, { GEO_FEATURE => 1 },
{ AUDITOR_USER_FEATURE => 1 }, { AUDITOR_USER_FEATURE => 1 },
{ SERVICE_DESK_FEATURE => 1 } { SERVICE_DESK_FEATURE => 1 },
{ OBJECT_STORAGE_FEATURE => 1 }
].freeze ].freeze
EEU_FEATURES = [ EEU_FEATURES = [
...@@ -54,7 +57,8 @@ class License < ActiveRecord::Base ...@@ -54,7 +57,8 @@ class License < ActiveRecord::Base
{ FILE_LOCK_FEATURE => 1 }, { FILE_LOCK_FEATURE => 1 },
{ GEO_FEATURE => 1 }, { GEO_FEATURE => 1 },
{ AUDITOR_USER_FEATURE => 1 }, { AUDITOR_USER_FEATURE => 1 },
{ SERVICE_DESK_FEATURE => 1 } { SERVICE_DESK_FEATURE => 1 },
{ OBJECT_STORAGE_FEATURE => 1 }
].freeze ].freeze
FEATURES_BY_PLAN = { FEATURES_BY_PLAN = {
......
...@@ -65,9 +65,9 @@ module Projects ...@@ -65,9 +65,9 @@ module Projects
end end
def extract_archive!(temp_path) def extract_archive!(temp_path)
if artifacts.ends_with?('.tar.gz') || artifacts.ends_with?('.tgz') if artifacts_filename.ends_with?('.tar.gz') || artifacts_filename.ends_with?('.tgz')
extract_tar_archive!(temp_path) extract_tar_archive!(temp_path)
elsif artifacts.ends_with?('.zip') elsif artifacts_filename.ends_with?('.zip')
extract_zip_archive!(temp_path) extract_zip_archive!(temp_path)
else else
raise 'unsupported artifacts format' raise 'unsupported artifacts format'
...@@ -75,12 +75,14 @@ module Projects ...@@ -75,12 +75,14 @@ module Projects
end end
def extract_tar_archive!(temp_path) def extract_tar_archive!(temp_path)
results = Open3.pipeline(%W(gunzip -c #{artifacts}), build.artifacts_file.use_file do |artifacts_path|
results = Open3.pipeline(%W(gunzip -c #{artifacts_path}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}), %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} #{SITE_PATH}), %W(tar -x -C #{temp_path} #{SITE_PATH}),
err: '/dev/null') err: '/dev/null')
raise 'pages failed to extract' unless results.compact.all?(&:success?) raise 'pages failed to extract' unless results.compact.all?(&:success?)
end end
end
def extract_zip_archive!(temp_path) def extract_zip_archive!(temp_path)
raise 'missing artifacts metadata' unless build.artifacts_metadata? raise 'missing artifacts metadata' unless build.artifacts_metadata?
...@@ -96,10 +98,12 @@ module Projects ...@@ -96,10 +98,12 @@ module Projects
# -n never overwrite existing files # -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories # We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*') site_path = File.join(SITE_PATH, '*')
unless system(*%W(unzip -n #{artifacts} #{site_path} -d #{temp_path})) build.artifacts_file.use_file do |artifacts_path|
unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path}))
raise 'pages failed to extract' raise 'pages failed to extract'
end end
end end
end
def deploy_page!(archive_public_path) def deploy_page!(archive_public_path)
# Do atomic move of pages # Do atomic move of pages
...@@ -128,6 +132,10 @@ module Projects ...@@ -128,6 +132,10 @@ module Projects
1 + max_size / BLOCK_SIZE 1 + max_size / BLOCK_SIZE
end end
def artifacts_filename
build.artifacts_file.filename
end
def max_size def max_size
current_application_settings.max_pages_size.megabytes || MAX_SIZE current_application_settings.max_pages_size.megabytes || MAX_SIZE
end end
...@@ -152,10 +160,6 @@ module Projects ...@@ -152,10 +160,6 @@ module Projects
build.ref build.ref
end end
def artifacts
build.artifacts_file.path
end
def latest_sha def latest_sha
project.commit(build.ref).try(:sha).to_s project.commit(build.ref).try(:sha).to_s
end end
......
class ArtifactUploader < GitlabUploader class ArtifactUploader < ObjectStoreUploader
storage :file storage_options Gitlab.config.artifacts
attr_reader :job, :field
def self.local_artifacts_store def self.local_artifacts_store
Gitlab.config.artifacts.path Gitlab.config.artifacts.path
...@@ -11,12 +9,12 @@ class ArtifactUploader < GitlabUploader ...@@ -11,12 +9,12 @@ class ArtifactUploader < GitlabUploader
File.join(self.local_artifacts_store, 'tmp/uploads/') File.join(self.local_artifacts_store, 'tmp/uploads/')
end end
def initialize(job, field)
@job, @field = job, field
end
def store_dir def store_dir
if file_storage?
default_local_path default_local_path
else
default_path
end
end end
def cache_dir def cache_dir
...@@ -30,6 +28,6 @@ class ArtifactUploader < GitlabUploader ...@@ -30,6 +28,6 @@ class ArtifactUploader < GitlabUploader
end end
def default_path def default_path
File.join(job.created_at.utc.strftime('%Y_%m'), job.project_id.to_s, job.id.to_s) File.join(subject.created_at.utc.strftime('%Y_%m'), subject.project_id.to_s, subject.id.to_s)
end end
end end
require 'fog/aws'
require 'carrierwave/storage/fog'
class ObjectStoreUploader < GitlabUploader
before :store, :set_default_local_store
before :store, :verify_license!
LOCAL_STORE = 1
REMOTE_STORE = 2
class << self
def storage_options(options)
@storage_options = options
end
def object_store_options
@storage_options&.object_store
end
def object_store_enabled?
object_store_options&.enabled
end
end
attr_reader :subject, :field
def initialize(subject, field)
@subject = subject
@field = field
end
def object_store
subject.public_send(:"#{field}_store")
end
def object_store=(value)
@storage = nil
subject.public_send(:"#{field}_store=", value)
end
def use_file
if file_storage?
return yield path
end
begin
cache_stored_file!
yield cache_path
ensure
cache_storage.delete_dir!(cache_path(nil))
end
end
def filename
super || file&.filename
end
def migrate!(new_store)
raise 'Undefined new store' unless new_store
return unless object_store != new_store
return unless file
old_file = file
old_store = object_store
# for moving remote file we need to first store it locally
cache_stored_file! unless file_storage?
# change storage
self.object_store = new_store
storage.store!(file).tap do |new_file|
# since we change storage store the new storage
# in case of failure delete new file
begin
subject.save!
rescue => e
new_file.delete
self.object_store = old_store
raise e
end
old_file.delete
end
end
def fog_directory
self.class.object_store_options.remote_directory
end
def fog_credentials
self.class.object_store_options.connection
end
def fog_public
false
end
def move_to_store
file.try(:storage) == storage
end
def move_to_cache
file.try(:storage) == cache_storage
end
# We block storing artifacts on Object Storage, not receiving
def verify_license!(new_file)
return if file_storage?
raise 'Object Storage feature is missing' unless subject.project.feature_available?(:object_storage)
end
private
def set_default_local_store(new_file)
self.object_store = LOCAL_STORE unless self.object_store
end
def storage
@storage ||=
if object_store == REMOTE_STORE
remote_storage
else
local_storage
end
end
def remote_storage
raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
CarrierWave::Storage::Fog.new(self)
end
def local_storage
CarrierWave::Storage::File.new(self)
end
end
- path_to_file = file_namespace_project_job_artifacts_path(@project.namespace, @project, @build, path: file.path) - path_to_file = file_namespace_project_job_artifacts_path(@project.namespace, @project, @build, path: file.path) if @build.downloadable_single_artifacts_file?
%tr.tree-item{ 'data-link' => path_to_file } %tr.tree-item{ 'data-link' => path_to_file }
- blob = file.blob - blob = file.blob
%td.tree-item-file-name %td.tree-item-file-name
= tree_icon('file', blob.mode, blob.name) = tree_icon('file', blob.mode, blob.name)
= link_to path_to_file do %span.str-truncated
%span.str-truncated= blob.name - if path_to_file
= link_to file.name, path_to_file
- else
= file.name
%td %td
= number_to_human_size(blob.size, precision: 2) = number_to_human_size(blob.size, precision: 2)
...@@ -36,7 +36,7 @@ ...@@ -36,7 +36,7 @@
= link_to download_namespace_project_job_artifacts_path(@project.namespace, @project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do = link_to download_namespace_project_job_artifacts_path(@project.namespace, @project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do
Download Download
- if @build.artifacts_metadata? - if @build.browsable_artifacts?
= link_to browse_namespace_project_job_artifacts_path(@project.namespace, @project, @build), class: 'btn btn-sm btn-default' do = link_to browse_namespace_project_job_artifacts_path(@project.namespace, @project, @build), class: 'btn btn-sm btn-default' do
Browse Browse
......
---
title: Allow to Store Artifacts on Object Storage
merge_request:
author:
...@@ -28,7 +28,8 @@ module Gitlab ...@@ -28,7 +28,8 @@ module Gitlab
#{config.root}/app/models/members #{config.root}/app/models/members
#{config.root}/app/models/project_services #{config.root}/app/models/project_services
#{config.root}/app/workers/concerns #{config.root}/app/workers/concerns
#{config.root}/app/services/concerns)) #{config.root}/app/services/concerns
#{config.root}/app/uploaders/concerns))
config.generators.templates.push("#{config.root}/generator_templates") config.generators.templates.push("#{config.root}/generator_templates")
......
...@@ -138,6 +138,14 @@ production: &base ...@@ -138,6 +138,14 @@ production: &base
enabled: true enabled: true
# The location where build artifacts are stored (default: shared/artifacts). # The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts # path: shared/artifacts
# object_store:
# enabled: false
# remote_directory: artifacts
# connection:
# provider: AWS # Only AWS supported at the moment
# aws_access_key_id: AWS_ACCESS_KEY_ID
# aws_secret_access_key: AWS_SECRET_ACCESS_KEY
# region: eu-central-1
## Git LFS ## Git LFS
lfs: lfs:
......
...@@ -311,6 +311,12 @@ Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil? ...@@ -311,6 +311,12 @@ Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
Settings.artifacts['path'] = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts")) Settings.artifacts['path'] = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts"))
Settings.artifacts['max_size'] ||= 100 # in megabytes Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil
# Convert upload connection settings to use symbol keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_symbolize_keys!
# #
# Registry # Registry
# #
......
class AddArtifactsStoreToCiBuild < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_column_with_default(:ci_builds, :artifacts_file_store, :integer, default: 1)
add_column_with_default(:ci_builds, :artifacts_metadata_store, :integer, default: 1)
end
def down
remove_column(:ci_builds, :artifacts_file_store)
remove_column(:ci_builds, :artifacts_metadata_store)
end
end
...@@ -285,6 +285,8 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -285,6 +285,8 @@ ActiveRecord::Schema.define(version: 20170602003304) do
t.string "coverage_regex" t.string "coverage_regex"
t.integer "auto_canceled_by_id" t.integer "auto_canceled_by_id"
t.boolean "retried" t.boolean "retried"
t.integer "artifacts_file_store", default: 1, null: false
t.integer "artifacts_metadata_store", default: 1, null: false
end end
add_index "ci_builds", ["auto_canceled_by_id"], name: "index_ci_builds_on_auto_canceled_by_id", using: :btree add_index "ci_builds", ["auto_canceled_by_id"], name: "index_ci_builds_on_auto_canceled_by_id", using: :btree
......
...@@ -82,6 +82,42 @@ _The artifacts are stored by default in ...@@ -82,6 +82,42 @@ _The artifacts are stored by default in
1. Save the file and [restart GitLab][] for the changes to take effect. 1. Save the file and [restart GitLab][] for the changes to take effect.
---
**Using Object Store**
The previously mentioned methods use the local disk to store artifacts. However,
there is the option to use object stores like AWS' S3. To do this, set the
`object_store` in your `gitlab.yml`. This relies on valid AWS
credentials to be configured already.
```yaml
artifacts:
enabled: true
path: /mnt/storage/artifacts
object_store:
enabled: true
remote_directory: my-bucket-name
connection:
provider: AWS
aws_access_key_id: S3_KEY_ID
aws_secret_key_id: S3_SECRET_KEY_ID
region: eu-central-1
```
This will allow you to migrate existing artifacts to object store,
but all new artifacts will still be stored on the local disk.
In the future you will be given an option to define a default storage artifacts
for all new files. Currently the artifacts migration has to be executed manually:
```bash
gitlab-rake gitlab:artifacts:migrate
```
Please note, that enabling this feature
will have the effect that artifacts are _not_ browsable anymore through the web
interface. This limitation will be removed in one of the upcoming releases.
## Expiring artifacts ## Expiring artifacts
If an expiry date is used for the artifacts, they are marked for deletion If an expiry date is used for the artifacts, they are marked for deletion
......
...@@ -334,7 +334,7 @@ module API ...@@ -334,7 +334,7 @@ module API
if artifacts_file.file_storage? if artifacts_file.file_storage?
present_file!(artifacts_file.path, artifacts_file.filename) present_file!(artifacts_file.path, artifacts_file.filename)
else else
redirect_to(artifacts_file.url) redirect(artifacts_file.url)
end end
end end
......
...@@ -192,7 +192,7 @@ module Ci ...@@ -192,7 +192,7 @@ module Ci
end end
unless artifacts_file.file_storage? unless artifacts_file.file_storage?
return redirect_to build.artifacts_file.url return redirect(build.artifacts_file.url)
end end
present_file!(artifacts_file.path, artifacts_file.filename) present_file!(artifacts_file.path, artifacts_file.filename)
......
desc "GitLab | Migrate files for artifacts to comply with new storage format"
namespace :gitlab do
namespace :artifacts do
task migrate: :environment do
puts 'Artifacts'.color(:yellow)
Ci::Build.joins(:project).with_artifacts
.where(artifacts_file_store: ArtifactUploader::LOCAL_STORE)
.find_each(batch_size: 100) do |issue|
begin
build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE)
build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE)
print '.'
rescue
print 'F'
end
end
end
end
end
...@@ -162,6 +162,11 @@ FactoryGirl.define do ...@@ -162,6 +162,11 @@ FactoryGirl.define do
end end
end end
trait :remote_store do
artifacts_file_store ArtifactUploader::REMOTE_STORE
artifacts_metadata_store ArtifactUploader::REMOTE_STORE
end
trait :artifacts_expired do trait :artifacts_expired do
after(:create) do |build, _| after(:create) do |build, _|
build.artifacts_file = build.artifacts_file =
......
...@@ -224,7 +224,9 @@ CommitStatus: ...@@ -224,7 +224,9 @@ CommitStatus:
- target_url - target_url
- description - description
- artifacts_file - artifacts_file
- artifacts_file_store
- artifacts_metadata - artifacts_metadata
- artifacts_metadata_store
- erased_by_id - erased_by_id
- erased_at - erased_at
- artifacts_expire_at - artifacts_expire_at
......
...@@ -107,6 +107,50 @@ describe Ci::Build, :models do ...@@ -107,6 +107,50 @@ describe Ci::Build, :models do
end end
end end
describe '#browsable_artifacts?' do
subject { build.browsable_artifacts? }
context 'artifacts metadata does not exist' do
before do
build.update_attributes(artifacts_metadata: nil)
end
it { is_expected.to be_falsy }
end
context 'artifacts metadata does exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy }
end
end
describe '#downloadable_single_artifacts_file?' do
let(:build) { create(:ci_build, :artifacts, artifacts_file_store: store) }
subject { build.downloadable_single_artifacts_file? }
before do
expect_any_instance_of(Ci::Build).to receive(:artifacts_metadata?).and_call_original
end
context 'artifacts are stored locally' do
let(:store) { ObjectStoreUploader::LOCAL_STORE }
it { is_expected.to be_truthy }
end
context 'artifacts are stored remotely' do
let(:store) { ObjectStoreUploader::REMOTE_STORE }
before do
stub_artifacts_object_storage
end
it { is_expected.to be_falsey }
end
end
describe '#artifacts_expired?' do describe '#artifacts_expired?' do
subject { build.artifacts_expired? } subject { build.artifacts_expired? }
......
require 'spec_helper' require 'spec_helper'
describe API::Jobs, :api do describe API::Jobs, :api do
let!(:project) do let(:project) do
create(:project, :repository, public_builds: false) create(:project, :repository, public_builds: false)
end end
let!(:pipeline) do let(:pipeline) do
create(:ci_empty_pipeline, project: project, create(:ci_empty_pipeline, project: project,
sha: project.commit.id, sha: project.commit.id,
ref: project.default_branch) ref: project.default_branch)
end end
let!(:build) { create(:ci_build, pipeline: pipeline) } let(:build) { create(:ci_build, pipeline: pipeline) }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:api_user) { user } let(:api_user) { user }
...@@ -26,6 +26,7 @@ describe API::Jobs, :api do ...@@ -26,6 +26,7 @@ describe API::Jobs, :api do
let(:query) { Hash.new } let(:query) { Hash.new }
before do before do
build
get api("/projects/#{project.id}/jobs", api_user), query get api("/projects/#{project.id}/jobs", api_user), query
end end
...@@ -89,6 +90,7 @@ describe API::Jobs, :api do ...@@ -89,6 +90,7 @@ describe API::Jobs, :api do
let(:query) { Hash.new } let(:query) { Hash.new }
before do before do
build
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end end
...@@ -189,10 +191,12 @@ describe API::Jobs, :api do ...@@ -189,10 +191,12 @@ describe API::Jobs, :api do
describe 'GET /projects/:id/jobs/:job_id/artifacts' do describe 'GET /projects/:id/jobs/:job_id/artifacts' do
before do before do
stub_artifacts_object_storage
get api("/projects/#{project.id}/jobs/#{build.id}/artifacts", api_user) get api("/projects/#{project.id}/jobs/#{build.id}/artifacts", api_user)
end end
context 'job with artifacts' do context 'job with artifacts' do
context 'when artifacts are stored locally' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
context 'authorized user' do context 'authorized user' do
...@@ -217,6 +221,15 @@ describe API::Jobs, :api do ...@@ -217,6 +221,15 @@ describe API::Jobs, :api do
end end
end end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
it 'returns location redirect' do
expect(response).to have_http_status(302)
end
end
end
it 'does not return job artifacts if not uploaded' do it 'does not return job artifacts if not uploaded' do
expect(response).to have_http_status(404) expect(response).to have_http_status(404)
end end
...@@ -227,6 +240,7 @@ describe API::Jobs, :api do ...@@ -227,6 +240,7 @@ describe API::Jobs, :api do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do before do
stub_artifacts_object_storage
build.success build.success
end end
...@@ -282,6 +296,7 @@ describe API::Jobs, :api do ...@@ -282,6 +296,7 @@ describe API::Jobs, :api do
context 'find proper job' do context 'find proper job' do
shared_examples 'a valid file' do shared_examples 'a valid file' do
context 'when artifacts are stored locally' do
let(:download_headers) do let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary', { 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'Content-Disposition' =>
...@@ -292,6 +307,15 @@ describe API::Jobs, :api do ...@@ -292,6 +307,15 @@ describe API::Jobs, :api do
it { expect(response.headers).to include(download_headers) } it { expect(response.headers).to include(download_headers) }
end end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
it 'returns location redirect' do
expect(response).to have_http_status(302)
end
end
end
context 'with regular branch' do context 'with regular branch' do
before do before do
pipeline.reload pipeline.reload
......
...@@ -185,7 +185,7 @@ describe API::Runner do ...@@ -185,7 +185,7 @@ describe API::Runner do
let(:project) { create(:empty_project, shared_runners_enabled: false) } let(:project) { create(:empty_project, shared_runners_enabled: false) }
let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') } let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
let(:runner) { create(:ci_runner) } let(:runner) { create(:ci_runner) }
let!(:job) do let(:job) do
create(:ci_build, :artifacts, :extended_options, create(:ci_build, :artifacts, :extended_options,
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate") pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate")
end end
...@@ -197,7 +197,10 @@ describe API::Runner do ...@@ -197,7 +197,10 @@ describe API::Runner do
let!(:new_update) { } let!(:new_update) { }
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' } let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
before { stub_container_registry_config(enabled: false) } before do
job
stub_container_registry_config(enabled: false)
end
shared_examples 'no jobs available' do shared_examples 'no jobs available' do
before { request_job } before { request_job }
...@@ -767,7 +770,10 @@ describe API::Runner do ...@@ -767,7 +770,10 @@ describe API::Runner do
let(:file_upload) { fixture_file_upload(Rails.root + 'spec/fixtures/banana_sample.gif', 'image/gif') } let(:file_upload) { fixture_file_upload(Rails.root + 'spec/fixtures/banana_sample.gif', 'image/gif') }
let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') } let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') }
before { job.run! } before do
stub_artifacts_object_storage
job.run!
end
describe 'POST /api/v4/jobs/:id/artifacts/authorize' do describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
context 'when using token as parameter' do context 'when using token as parameter' do
...@@ -1059,18 +1065,29 @@ describe API::Runner do ...@@ -1059,18 +1065,29 @@ describe API::Runner do
context 'when job has artifacts' do context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts) } let(:job) { create(:ci_build, :artifacts) }
context 'when using job token' do
context 'when artifacts are stored locally' do
let(:download_headers) do let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary', { 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end end
context 'when using job token' do
it 'download artifacts' do it 'download artifacts' do
expect(response).to have_http_status(200) expect(response).to have_http_status(200)
expect(response.headers).to include download_headers expect(response.headers).to include download_headers
end end
end end
context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, :artifacts, :remote_store) }
it 'download artifacts' do
expect(response).to have_http_status(302)
end
end
end
context 'when using runnners token' do context 'when using runnners token' do
let(:token) { job.project.runners_token } let(:token) { job.project.runners_token }
......
...@@ -7,13 +7,14 @@ describe API::V3::Builds do ...@@ -7,13 +7,14 @@ describe API::V3::Builds do
let!(:developer) { create(:project_member, :developer, user: user, project: project) } let!(:developer) { create(:project_member, :developer, user: user, project: project) }
let(:reporter) { create(:project_member, :reporter, project: project) } let(:reporter) { create(:project_member, :reporter, project: project) }
let(:guest) { create(:project_member, :guest, project: project) } let(:guest) { create(:project_member, :guest, project: project) }
let!(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) } let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
let!(:build) { create(:ci_build, pipeline: pipeline) } let(:build) { create(:ci_build, pipeline: pipeline) }
describe 'GET /projects/:id/builds ' do describe 'GET /projects/:id/builds ' do
let(:query) { '' } let(:query) { '' }
before do before do
build
create(:ci_build, :skipped, pipeline: pipeline) create(:ci_build, :skipped, pipeline: pipeline)
get v3_api("/projects/#{project.id}/builds?#{query}", api_user) get v3_api("/projects/#{project.id}/builds?#{query}", api_user)
...@@ -87,6 +88,10 @@ describe API::V3::Builds do ...@@ -87,6 +88,10 @@ describe API::V3::Builds do
end end
describe 'GET /projects/:id/repository/commits/:sha/builds' do describe 'GET /projects/:id/repository/commits/:sha/builds' do
before do
build
end
context 'when commit does not exist in repository' do context 'when commit does not exist in repository' do
before do before do
get v3_api("/projects/#{project.id}/repository/commits/1a271fd1/builds", api_user) get v3_api("/projects/#{project.id}/repository/commits/1a271fd1/builds", api_user)
...@@ -187,10 +192,12 @@ describe API::V3::Builds do ...@@ -187,10 +192,12 @@ describe API::V3::Builds do
describe 'GET /projects/:id/builds/:build_id/artifacts' do describe 'GET /projects/:id/builds/:build_id/artifacts' do
before do before do
stub_artifacts_object_storage
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user) get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end end
context 'job with artifacts' do context 'job with artifacts' do
context 'when artifacts are stored locally' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
context 'authorized user' do context 'authorized user' do
...@@ -205,6 +212,15 @@ describe API::V3::Builds do ...@@ -205,6 +212,15 @@ describe API::V3::Builds do
expect(response.body).to match_file(build.artifacts_file.file.file) expect(response.body).to match_file(build.artifacts_file.file.file)
end end
end end
end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
it 'returns location redirect' do
expect(response).to have_http_status(302)
end
end
context 'unauthorized user' do context 'unauthorized user' do
let(:api_user) { nil } let(:api_user) { nil }
...@@ -225,6 +241,7 @@ describe API::V3::Builds do ...@@ -225,6 +241,7 @@ describe API::V3::Builds do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do before do
stub_artifacts_object_storage
build.success build.success
end end
...@@ -280,6 +297,7 @@ describe API::V3::Builds do ...@@ -280,6 +297,7 @@ describe API::V3::Builds do
context 'find proper job' do context 'find proper job' do
shared_examples 'a valid file' do shared_examples 'a valid file' do
context 'when artifacts are stored locally' do
let(:download_headers) do let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary', { 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'Content-Disposition' =>
...@@ -290,6 +308,15 @@ describe API::V3::Builds do ...@@ -290,6 +308,15 @@ describe API::V3::Builds do
it { expect(response.headers).to include(download_headers) } it { expect(response.headers).to include(download_headers) }
end end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
it 'returns location redirect' do
expect(response).to have_http_status(302)
end
end
end
context 'with regular branch' do context 'with regular branch' do
before do before do
pipeline.reload pipeline.reload
......
...@@ -455,7 +455,10 @@ describe Ci::API::Builds do ...@@ -455,7 +455,10 @@ describe Ci::API::Builds do
let(:token) { build.token } let(:token) { build.token }
let(:headers_with_token) { headers.merge(Ci::API::Helpers::BUILD_TOKEN_HEADER => token) } let(:headers_with_token) { headers.merge(Ci::API::Helpers::BUILD_TOKEN_HEADER => token) }
before { build.run! } before do
stub_artifacts_object_storage
build.run!
end
describe "POST /builds/:id/artifacts/authorize" do describe "POST /builds/:id/artifacts/authorize" do
context "authorizes posting artifact to running build" do context "authorizes posting artifact to running build" do
...@@ -789,19 +792,29 @@ describe Ci::API::Builds do ...@@ -789,19 +792,29 @@ describe Ci::API::Builds do
end end
context 'build has artifacts' do context 'build has artifacts' do
shared_examples 'having downloadable artifacts' do
context 'when stored locally' do
let(:build) { create(:ci_build, :artifacts) } let(:build) { create(:ci_build, :artifacts) }
let(:download_headers) do let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary', { 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end end
shared_examples 'having downloadable artifacts' do
it 'download artifacts' do it 'download artifacts' do
expect(response).to have_http_status(200) expect(response).to have_http_status(200)
expect(response.headers).to include download_headers expect(response.headers).to include download_headers
end end
end end
context 'when stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store) }
it 'redirect to artifacts file' do
expect(response).to have_http_status(302)
end
end
end
context 'when using build token' do context 'when using build token' do
let(:token) { build.token } let(:token) { build.token }
......
...@@ -22,7 +22,8 @@ describe Ci::RetryBuildService, :services do ...@@ -22,7 +22,8 @@ describe Ci::RetryBuildService, :services do
%i[type lock_version target_url base_tags %i[type lock_version target_url base_tags
commit_id deployments erased_by_id last_deployment project_id commit_id deployments erased_by_id last_deployment project_id
runner_id tag_taggings taggings tags trigger_request_id runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried sourced_pipelines].freeze user_id auto_canceled_by_id retried sourced_pipelines
artifacts_file_store artifacts_metadata_store].freeze
shared_examples 'build duplication' do shared_examples 'build duplication' do
let(:build) do let(:build) do
......
module StubConfiguration
def stub_artifacts_object_storage(enabled: true)
Fog.mock!
allow(Gitlab.config.artifacts.object_store).to receive_messages(
enabled: enabled,
remote_directory: 'artifacts',
connection: {
provider: 'AWS',
aws_access_key_id: 'AWS_ACCESS_KEY_ID',
aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY',
region: 'eu-central-1'
}
)
allow_any_instance_of(ArtifactUploader).to receive(:verify_license!) { true }
return unless enabled
::Fog::Storage.new(Gitlab.config.artifacts.object_store.connection).tap do |connection|
begin
connection.directories.create(key: 'artifacts')
rescue Excon::Error::Conflict
end
end
end
end
require 'rails_helper' require 'rails_helper'
describe ArtifactUploader do describe ArtifactUploader do
let(:job) { create(:ci_build) } let(:store) { described_class::LOCAL_STORE }
let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :artifacts_file) } let(:uploader) { described_class.new(job, :artifacts_file) }
let(:path) { Gitlab.config.artifacts.path } let(:local_path) { Gitlab.config.artifacts.path }
describe '.local_artifacts_store' do describe '.local_artifacts_store' do
subject { described_class.local_artifacts_store } subject { described_class.local_artifacts_store }
...@@ -18,21 +19,35 @@ describe ArtifactUploader do ...@@ -18,21 +19,35 @@ describe ArtifactUploader do
describe '.artifacts_upload_path' do describe '.artifacts_upload_path' do
subject { described_class.artifacts_upload_path } subject { described_class.artifacts_upload_path }
it { is_expected.to start_with(path) } it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/uploads/') } it { is_expected.to end_with('tmp/uploads/') }
end end
describe '#store_dir' do describe '#store_dir' do
subject { uploader.store_dir } subject { uploader.store_dir }
it { is_expected.to start_with(path) } let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
it { is_expected.to end_with("#{job.project_id}/#{job.id}") }
context 'when using local storage' do
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with(path) }
end
context 'when using remote storage' do
let(:store) { described_class::REMOTE_STORE }
before do
stub_artifacts_object_storage
end
it { is_expected.to eq(path) }
end
end end
describe '#cache_dir' do describe '#cache_dir' do
subject { uploader.cache_dir } subject { uploader.cache_dir }
it { is_expected.to start_with(path) } it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/cache') } it { is_expected.to end_with('tmp/cache') }
end end
end end
require 'rails_helper'
require 'carrierwave/storage/fog'
describe ObjectStoreUploader do
let(:uploader_class) { Class.new(described_class) }
let(:object) { double }
let(:uploader) { uploader_class.new(object, :artifacts_file) }
describe '#object_store' do
it "calls artifacts_file_store on object" do
expect(object).to receive(:artifacts_file_store)
uploader.object_store
end
end
describe '#object_store=' do
it "calls artifacts_file_store= on object" do
expect(object).to receive(:artifacts_file_store=).with(described_class::REMOTE_STORE)
uploader.object_store = described_class::REMOTE_STORE
end
end
context 'when using ArtifactsUploader' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
let(:uploader) { job.artifacts_file }
context 'checking described_class' do
let(:store) { described_class::LOCAL_STORE }
it "uploader is of a described_class" do
expect(uploader).to be_a(described_class)
end
end
describe '#use_file' do
context 'when file is stored locally' do
let(:store) { described_class::LOCAL_STORE }
it "calls a regular path" do
expect { |b| uploader.use_file(&b) }.not_to yield_with_args(/tmp\/cache/)
end
end
context 'when file is stored remotely' do
let(:store) { described_class::REMOTE_STORE }
before do
stub_artifacts_object_storage
end
it "calls a cache path" do
expect { |b| uploader.use_file(&b) }.to yield_with_args(/tmp\/cache/)
end
end
end
describe '#migrate!' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
let(:uploader) { job.artifacts_file }
let(:store) { described_class::LOCAL_STORE }
subject { uploader.migrate!(new_store) }
context 'when using the same storage' do
let(:new_store) { store }
it "to not migrate the storage" do
subject
expect(uploader.object_store).to eq(store)
end
end
context 'when migrating to local storage' do
let(:store) { described_class::REMOTE_STORE }
let(:new_store) { described_class::LOCAL_STORE }
before do
stub_artifacts_object_storage
end
it "local file does not exist" do
expect(File.exist?(uploader.path)).to eq(false)
end
it "does migrate the file" do
subject
expect(uploader.object_store).to eq(new_store)
expect(File.exist?(uploader.path)).to eq(true)
end
end
context 'when migrating to remote storage' do
let(:new_store) { described_class::REMOTE_STORE }
let!(:current_path) { uploader.path }
it "file does exist" do
expect(File.exist?(current_path)).to eq(true)
end
context 'when storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
end
it "to raise an error" do
expect { subject }.to raise_error(/Object Storage is not enabled/)
end
end
context 'when credentials are set' do
before do
stub_artifacts_object_storage
end
it "does migrate the file" do
subject
expect(uploader.object_store).to eq(new_store)
expect(File.exist?(current_path)).to eq(false)
end
it "does delete original file" do
subject
expect(File.exist?(current_path)).to eq(false)
end
context 'when subject save fails' do
before do
expect(job).to receive(:save!).and_raise(RuntimeError, "exception")
end
it "does catch an error" do
expect { subject }.to raise_error(/exception/)
end
it "original file is not removed" do
begin
subject
rescue
end
expect(File.exist?(current_path)).to eq(true)
end
end
end
end
end
end
describe '#fog_directory' do
let(:remote_directory) { 'directory' }
before do
uploader_class.storage_options double(
object_store: double(remote_directory: remote_directory))
end
subject { uploader.fog_directory }
it { is_expected.to eq(remote_directory) }
end
describe '#fog_credentials' do
let(:connection) { 'connection' }
before do
uploader_class.storage_options double(
object_store: double(connection: connection))
end
subject { uploader.fog_credentials }
it { is_expected.to eq(connection) }
end
describe '#fog_public' do
subject { uploader.fog_public }
it { is_expected.to eq(false) }
end
describe '#verify_license!' do
subject { uploader.verify_license!(nil) }
context 'when using local storage' do
before do
expect(object).to receive(:artifacts_file_store) { described_class::LOCAL_STORE }
end
it "does not raise an error" do
expect { subject }.not_to raise_error
end
end
context 'when using remote storage' do
let(:project) { double }
before do
uploader_class.storage_options double(
object_store: double(enabled: true))
expect(object).to receive(:artifacts_file_store) { described_class::REMOTE_STORE }
expect(object).to receive(:project) { project }
end
context 'feature is not available' do
before do
expect(project).to receive(:feature_available?).with(:object_storage) { false }
end
it "does raise an error" do
expect { subject }.to raise_error(/Object Storage feature is missing/)
end
end
context 'feature is available' do
before do
expect(project).to receive(:feature_available?).with(:object_storage) { true }
end
it "does not raise an error" do
expect { subject }.not_to raise_error
end
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment