Commit 845a6246 authored by Grzegorz Bizon's avatar Grzegorz Bizon

Merge branch 'zj-multiple-artifacts-ee' into 'master'

Multiple artifacts ee

See merge request gitlab-org/gitlab-ee!3276
parents 32ad854e 31dcfcc5
......@@ -42,8 +42,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end
def raw
path = Gitlab::Ci::Build::Artifacts::Path
.new(params[:path])
path = Gitlab::Ci::Build::Artifacts::Path.new(params[:path])
send_artifacts_entry(build, path)
end
......@@ -72,7 +71,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end
def validate_artifacts!
render_404 unless build && build.artifacts?
render_404 unless build&.artifacts?
end
def build
......
module Ci
class Build < CommitStatus
prepend ArtifactMigratable
include TokenAuthenticatable
include AfterCommitQueue
include Presentable
......@@ -13,9 +14,14 @@ module Ci
has_many :sourced_pipelines, class_name: Ci::Sources::Pipeline, foreign_key: :source_job_id
has_many :deployments, as: :deployable
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
# The "environment" field for builds is a String, and is the unexpanded name
def persisted_environment
@persisted_environment ||= Environment.find_by(
......@@ -34,16 +40,19 @@ module Ci
scope :unstarted, ->() { where(runner_id: nil) }
scope :ignore_failures, ->() { where(allow_failure: false) }
scope :with_artifacts, ->() { where.not(artifacts_file: [nil, '']) }
scope :with_artifacts, ->() do
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id'))
end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, ArtifactUploader::LOCAL_STORE]) }
scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::LOCAL_STORE]) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) }
mount_uploader :artifacts_file, ArtifactUploader
mount_uploader :artifacts_metadata, ArtifactUploader
mount_uploader :legacy_artifacts_file, LegacyArtifactUploader, mount_on: :artifacts_file
mount_uploader :legacy_artifacts_metadata, LegacyArtifactUploader, mount_on: :artifacts_metadata
acts_as_taggable
......@@ -330,18 +339,10 @@ module Ci
project.running_or_pending_build_count(force: true)
end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def browsable_artifacts?
artifacts_metadata?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_metadata_entry(path, **options)
artifacts_metadata.use_file do |metadata_path|
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
......@@ -396,6 +397,7 @@ module Ci
def keep_artifacts!
self.update(artifacts_expire_at: nil)
self.job_artifacts.update_all(expire_at: nil)
end
def coverage_regex
......@@ -483,11 +485,7 @@ module Ci
private
def update_artifacts_size
self.artifacts_size = if artifacts_file.exists?
artifacts_file.size
else
nil
end
self.artifacts_size = legacy_artifacts_file&.size
end
def erase_trace!
......
module Ci
class JobArtifact < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
before_save :set_size, if: :file_changed?
mount_uploader :file, JobArtifactUploader
enum file_type: {
archive: 1,
metadata: 2
}
def self.artifacts_size_for(project)
self.where(project: project).sum(:size)
end
def set_size
self.size = file.size
end
def expire_in
expire_at - Time.now if expire_at
end
def expire_in=(value)
self.expire_at =
if value
ChronicDuration.parse(value)&.seconds&.from_now
end
end
end
end
# Adapter class to unify the interface between mounted uploaders and the
# Ci::Artifact model
# Meant to be prepended so the interface can stay the same
module ArtifactMigratable
def artifacts_file
job_artifacts_archive&.file || legacy_artifacts_file
end
def artifacts_metadata
job_artifacts_metadata&.file || legacy_artifacts_metadata
end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_file_changed?
job_artifacts_archive&.file_changed? || attribute_changed?(:artifacts_file)
end
def remove_artifacts_file!
if job_artifacts_archive
job_artifacts_archive.destroy
else
remove_legacy_artifacts_file!
end
end
def remove_artifacts_metadata!
if job_artifacts_metadata
job_artifacts_metadata.destroy
else
remove_legacy_artifacts_metadata!
end
end
def artifacts_size
read_attribute(:artifacts_size).to_i +
job_artifacts_archive&.size.to_i + job_artifacts_metadata&.size.to_i
end
end
......@@ -39,7 +39,9 @@ class ProjectStatistics < ActiveRecord::Base
end
def update_build_artifacts_size
self.build_artifacts_size = project.builds.sum(:artifacts_size)
self.build_artifacts_size =
project.builds.sum(:artifacts_size) +
Ci::JobArtifact.artifacts_size_for(self)
end
def update_storage_size
......
......@@ -18,7 +18,7 @@ module Projects
@status.enqueue!
@status.run!
raise 'missing pages artifacts' unless build.artifacts_file?
raise 'missing pages artifacts' unless build.artifacts?
raise 'pages are outdated' unless latest?
# Create temporary directory in which we will extract the artifacts
......
class JobArtifactUploader < ObjectStoreUploader
storage_options Gitlab.config.artifacts
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_store_path, 'tmp/uploads/')
end
def size
return super if model.size.nil?
model.size
end
private
def default_path
creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
creation_date, model.job_id.to_s, model.id.to_s)
end
def disk_hash
@disk_hash ||= Digest::SHA2.hexdigest(model.project_id.to_s)
end
end
class ArtifactUploader < ObjectStoreUploader
class LegacyArtifactUploader < ObjectStoreUploader
storage_options Gitlab.config.artifacts
def self.local_store_path
......@@ -12,6 +12,6 @@ class ArtifactUploader < ObjectStoreUploader
private
def default_path
File.join(subject.created_at.utc.strftime('%Y_%m'), subject.project_id.to_s, subject.id.to_s)
File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end
end
......@@ -7,12 +7,12 @@ class LfsObjectUploader < ObjectStoreUploader
end
def filename
subject.oid[4..-1]
model.oid[4..-1]
end
private
def default_path
"#{subject.oid[0, 2]}/#{subject.oid[2, 2]}"
"#{model.oid[0, 2]}/#{model.oid[2, 2]}"
end
end
......@@ -764,6 +764,7 @@ test:
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
artifacts:
path: tmp/tests/artifacts
enabled: true
# The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts
......
......@@ -131,11 +131,11 @@ class Gitlab::Seeder::Pipelines
return unless %w[build test].include?(build.stage)
artifacts_cache_file(artifacts_archive_path) do |file|
build.artifacts_file = file
build.job_artifacts.build(project: build.project, file_type: :archive, file: file)
end
artifacts_cache_file(artifacts_metadata_path) do |file|
build.artifacts_metadata = file
build.job_artifacts.build(project: build.project, file_type: :metadata, file: file)
end
end
......
class CreateJobArtifacts < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table :ci_job_artifacts do |t|
t.belongs_to :project, null: false, index: true, foreign_key: { on_delete: :cascade }
t.integer :job_id, null: false
t.integer :file_type, null: false
t.integer :size, limit: 8
t.datetime_with_timezone :created_at, null: false
t.datetime_with_timezone :updated_at, null: false
t.datetime_with_timezone :expire_at
t.string :file
t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade
t.index [:job_id, :file_type], unique: true
end
end
end
class AddFileStoreJobArtifacts < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
def up
add_column(:ci_job_artifacts, :file_store, :integer)
end
def down
remove_column(:ci_job_artifacts, :file_store)
end
end
......@@ -396,6 +396,21 @@ ActiveRecord::Schema.define(version: 20171124165823) do
add_index "ci_group_variables", ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree
create_table "ci_job_artifacts", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "job_id", null: false
t.integer "file_type", null: false
t.integer "file_store"
t.integer "size", limit: 8
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.datetime_with_timezone "expire_at"
t.string "file"
end
add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree
add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree
create_table "ci_pipeline_schedule_variables", force: :cascade do |t|
t.string "key", null: false
t.text "value"
......@@ -2404,6 +2419,8 @@ ActiveRecord::Schema.define(version: 20171124165823) do
add_foreign_key "ci_builds", "ci_stages", column: "stage_id", name: "fk_3a9eaa254d", on_delete: :cascade
add_foreign_key "ci_builds", "projects", name: "fk_befce0568a", on_delete: :cascade
add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade
add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify
......
......@@ -38,13 +38,6 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
end
end
attr_reader :subject, :field
def initialize(subject, field)
@subject = subject
@field = field
end
def file_storage?
storage.is_a?(CarrierWave::Storage::File)
end
......@@ -54,7 +47,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
end
def real_object_store
subject.public_send(:"#{field}_store") # rubocop:disable GitlabSecurity/PublicSend
model.public_send(store_serialization_column) # rubocop:disable GitlabSecurity/PublicSend
end
def object_store
......@@ -63,7 +56,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
def object_store=(value)
@storage = nil
subject.public_send(:"#{field}_store=", value) # rubocop:disable GitlabSecurity/PublicSend
model.public_send(:"#{store_serialization_column}=", value) # rubocop:disable GitlabSecurity/PublicSend
end
def store_dir
......@@ -111,7 +104,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
# since we change storage store the new storage
# in case of failure delete new file
begin
subject.save!
model.save!
rescue => e
new_file.delete
self.object_store = old_store
......@@ -125,7 +118,7 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
def schedule_migration_to_object_storage(new_file)
if self.class.object_store_enabled? && licensed? && file_storage?
ObjectStorageUploadWorker.perform_async(self.class.name, subject.class.name, field, subject.id)
ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
end
end
......@@ -194,6 +187,14 @@ class ObjectStoreUploader < CarrierWave::Uploader::Base
raise NotImplementedError
end
def serialization_column
model.class.uploader_option(mounted_as, :mount_on) || mounted_as
end
def store_serialization_column
:"#{serialization_column}_store"
end
def storage
@storage ||=
if object_store == REMOTE_STORE
......
......@@ -44,8 +44,8 @@ class Spinach::Features::ProjectPages < Spinach::FeatureSteps
project: @project,
pipeline: pipeline,
ref: 'HEAD',
artifacts_file: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip'),
artifacts_metadata: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
legacy_artifacts_file: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip'),
legacy_artifacts_metadata: fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
)
result = ::Projects::UpdatePagesService.new(@project, build).execute
......
......@@ -37,13 +37,13 @@ module SharedBuilds
step 'recent build has artifacts available' do
artifacts = Rails.root + 'spec/fixtures/ci_build_artifacts.zip'
archive = fixture_file_upload(artifacts, 'application/zip')
@build.update_attributes(artifacts_file: archive)
@build.update_attributes(legacy_artifacts_file: archive)
end
step 'recent build has artifacts metadata available' do
metadata = Rails.root + 'spec/fixtures/ci_build_artifacts_metadata.gz'
gzip = fixture_file_upload(metadata, 'application/x-gzip')
@build.update_attributes(artifacts_metadata: gzip)
@build.update_attributes(legacy_artifacts_metadata: gzip)
end
step 'recent build has a build trace' do
......
......@@ -1178,13 +1178,9 @@ module API
expose :type, :url, :username, :password
end
class ArtifactFile < Grape::Entity
expose :filename, :size
end
class Dependency < Grape::Entity
expose :id, :name, :token
expose :artifacts_file, using: ArtifactFile, if: ->(job, _) { job.artifacts? }
expose :artifacts_file, using: JobArtifactFile, if: ->(job, _) { job.artifacts? }
end
class Response < Grape::Entity
......
......@@ -215,18 +215,20 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
artifacts_upload_path = ArtifactUploader.artifacts_upload_path
artifacts_upload_path = JobArtifactUploader.artifacts_upload_path
artifacts = uploaded_file(:file, artifacts_upload_path)
metadata = uploaded_file(:metadata, artifacts_upload_path)
bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size
job.artifacts_file = artifacts
job.artifacts_metadata = metadata
job.artifacts_expire_in = params['expire_in'] ||
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.build_job_artifacts_archive(project: job.project, file_type: :archive, file: artifacts, expire_in: expire_in)
job.build_job_artifacts_metadata(project: job.project, file_type: :metadata, file: metadata, expire_in: expire_in) if metadata
job.artifacts_expire_in = expire_in
if job.save
present job, with: Entities::JobRequest::Response
else
......
......@@ -3,7 +3,7 @@ require 'backup/files'
module Backup
class Artifacts < Files
def initialize
super('artifacts', ArtifactUploader.local_store_path)
super('artifacts', LegacyArtifactUploader.local_store_path)
end
def create_files_dir
......
......@@ -58,7 +58,7 @@ module Gitlab
end
def artifact_upload_ok
{ TempPath: ArtifactUploader.artifacts_upload_path }
{ TempPath: JobArtifactUploader.artifacts_upload_path }
end
def send_git_blob(repository, blob)
......
......@@ -12,8 +12,8 @@ namespace :gitlab do
.with_artifacts_stored_locally
.find_each(batch_size: 10) do |build|
begin
build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE)
build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE)
build.artifacts_file.migrate!(ObjectStoreUploader::REMOTE_STORE)
build.artifacts_metadata.migrate!(ObjectStoreUploader::REMOTE_STORE)
logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
rescue => e
......
require 'spec_helper'
describe Projects::ArtifactsController do
set(:user) { create(:user) }
let(:user) { project.owner }
set(:project) { create(:project, :repository, :public) }
let(:pipeline) do
......@@ -15,8 +15,6 @@ describe Projects::ArtifactsController do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
before do
project.add_developer(user)
sign_in(user)
end
......@@ -117,12 +115,12 @@ describe Projects::ArtifactsController do
context 'when the file exists' do
let(:path) { 'ci_artifacts.txt' }
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline, artifacts_file_store: store, artifacts_metadata_store: store) }
shared_examples 'a valid file' do
it 'serves the file using workhorse' do
subject
expect(response).to have_gitlab_http_status(200)
expect(send_data).to start_with('artifacts-entry:')
expect(params.keys).to eq(%w(Archive Entry))
......@@ -146,8 +144,9 @@ describe Projects::ArtifactsController do
context 'when using local file storage' do
it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
let(:store) { ObjectStoreUploader::LOCAL_STORE }
let(:archive_path) { ArtifactUploader.local_store_path }
let(:archive_path) { JobArtifactUploader.local_store_path }
end
end
......@@ -157,6 +156,8 @@ describe Projects::ArtifactsController do
end
it_behaves_like 'a valid file' do
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
let(:store) { ObjectStoreUploader::REMOTE_STORE }
let(:archive_path) { 'https://' }
end
......
......@@ -154,39 +154,27 @@ FactoryGirl.define do
runner factory: :ci_runner
end
trait :artifacts do
trait :legacy_artifacts do
after(:create) do |build, _|
build.artifacts_file =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'),
'application/zip')
build.artifacts_metadata =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'),
'application/x-gzip')
build.save!
build.update!(
legacy_artifacts_file: fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip'),
legacy_artifacts_metadata: fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
)
end
end
trait :remote_store do
artifacts_file_store ArtifactUploader::REMOTE_STORE
artifacts_metadata_store ArtifactUploader::REMOTE_STORE
trait :artifacts do
after(:create) do |build|
create(:ci_job_artifact, :archive, job: build)
create(:ci_job_artifact, :metadata, job: build)
build.reload
end
trait :artifacts_expired do
after(:create) do |build, _|
build.artifacts_file =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'),
'application/zip')
build.artifacts_metadata =
fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'),
'application/x-gzip')
build.artifacts_expire_at = 1.minute.ago
build.save!
end
trait :expired do
artifacts_expire_at 1.minute.ago
end
trait :with_commit do
......
include ActionDispatch::TestProcess
FactoryGirl.define do
factory :ci_job_artifact, class: Ci::JobArtifact do
job factory: :ci_build
file_type :archive
trait :remote_store do
file_store JobArtifactUploader::REMOTE_STORE
end
after :build do |artifact|
artifact.project ||= artifact.job.project
end
trait :archive do
file_type :archive
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
end
trait :metadata do
file_type :metadata
after(:build) do |artifact, _|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
end
end
end
end
......@@ -89,7 +89,7 @@ describe 'Commits' do
context 'Download artifacts' do
before do
build.update_attributes(artifacts_file: artifacts_file)
build.update_attributes(legacy_artifacts_file: artifacts_file)
end
it do
......@@ -146,7 +146,7 @@ describe 'Commits' do
context "when logged as reporter" do
before do
project.team << [user, :reporter]
build.update_attributes(artifacts_file: artifacts_file)
build.update_attributes(legacy_artifacts_file: artifacts_file)
visit pipeline_path(pipeline)
end
......@@ -168,7 +168,7 @@ describe 'Commits' do
project.update(
visibility_level: Gitlab::VisibilityLevel::INTERNAL,
public_builds: false)
build.update_attributes(artifacts_file: artifacts_file)
build.update_attributes(legacy_artifacts_file: artifacts_file)
visit pipeline_path(pipeline)
end
......
......@@ -28,14 +28,14 @@ feature 'Mini Pipeline Graph', :js do
let(:artifacts_file2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png') }
before do
create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file1)
create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file1)
create(:ci_build, pipeline: pipeline, when: 'manual')
end
it 'avoids repeated database queries' do
before = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
create(:ci_build, pipeline: pipeline, artifacts_file: artifacts_file2)
create(:ci_build, pipeline: pipeline, legacy_artifacts_file: artifacts_file2)
create(:ci_build, pipeline: pipeline, when: 'manual')
after = ActiveRecord::QueryRecorder.new { visit_merge_request(:json) }
......
......@@ -187,7 +187,7 @@ feature 'Jobs' do
context "Download artifacts" do
before do
job.update_attributes(artifacts_file: artifacts_file)
job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job)
end
......@@ -198,7 +198,7 @@ feature 'Jobs' do
context 'Artifacts expire date' do
before do
job.update_attributes(artifacts_file: artifacts_file,
job.update_attributes(legacy_artifacts_file: artifacts_file,
artifacts_expire_at: expire_at)
visit project_job_path(project, job)
......@@ -433,14 +433,14 @@ feature 'Jobs' do
describe "GET /:project/jobs/:id/download" do
before do
job.update_attributes(artifacts_file: artifacts_file)
job.update_attributes(legacy_artifacts_file: artifacts_file)
visit project_job_path(project, job)
click_link 'Download'
end
context "Build from other project" do
before do
job2.update_attributes(artifacts_file: artifacts_file)
job2.update_attributes(legacy_artifacts_file: artifacts_file)
visit download_project_job_artifacts_path(project, job2)
end
......
......@@ -304,7 +304,7 @@ describe 'Pipelines', :js do
context 'with artifacts expired' do
let!(:with_artifacts_expired) do
create(:ci_build, :artifacts_expired, :success,
create(:ci_build, :expired, :success,
pipeline: pipeline,
name: 'rspec',
stage: 'test')
......
......@@ -16,20 +16,22 @@ describe MigrateOldArtifacts do
end
context 'with migratable data' do
let(:project1) { create(:project, ci_id: 2) }
let(:project2) { create(:project, ci_id: 3) }
let(:project3) { create(:project) }
set(:project1) { create(:project, ci_id: 2) }
set(:project2) { create(:project, ci_id: 3) }
set(:project3) { create(:project) }
let(:pipeline1) { create(:ci_empty_pipeline, project: project1) }
let(:pipeline2) { create(:ci_empty_pipeline, project: project2) }
let(:pipeline3) { create(:ci_empty_pipeline, project: project3) }
set(:pipeline1) { create(:ci_empty_pipeline, project: project1) }
set(:pipeline2) { create(:ci_empty_pipeline, project: project2) }
set(:pipeline3) { create(:ci_empty_pipeline, project: project3) }
let!(:build_with_legacy_artifacts) { create(:ci_build, pipeline: pipeline1) }
let!(:build_without_artifacts) { create(:ci_build, pipeline: pipeline1) }
let!(:build2) { create(:ci_build, :artifacts, pipeline: pipeline2) }
let!(:build3) { create(:ci_build, :artifacts, pipeline: pipeline3) }
let!(:build2) { create(:ci_build, pipeline: pipeline2) }
let!(:build3) { create(:ci_build, pipeline: pipeline3) }
before do
setup_builds(build2, build3)
store_artifacts_in_legacy_path(build_with_legacy_artifacts)
end
......@@ -38,7 +40,7 @@ describe MigrateOldArtifacts do
end
it "legacy artifacts are set" do
expect(build_with_legacy_artifacts.artifacts_file_identifier).not_to be_nil
expect(build_with_legacy_artifacts.legacy_artifacts_file_identifier).not_to be_nil
end
describe '#min_id' do
......@@ -113,5 +115,24 @@ describe MigrateOldArtifacts do
build.project.ci_id.to_s,
build.id.to_s)
end
def new_legacy_path(build)
File.join(directory,
build.created_at.utc.strftime('%Y_%m'),
build.project_id.to_s,
build.id.to_s)
end
def setup_builds(*builds)
builds.each do |build|
FileUtils.mkdir_p(new_legacy_path(build))
build.update_columns(
artifacts_file: 'ci_build_artifacts.zip',
artifacts_metadata: 'ci_build_artifacts_metadata.gz')
build.reload
end
end
end
end
......@@ -24,6 +24,8 @@ describe Ci::Build do
it { is_expected.to respond_to(:has_trace?) }
it { is_expected.to respond_to(:trace) }
it { is_expected.to be_a(ArtifactMigratable) }
describe 'callbacks' do
context 'when running after_create callback' do
it 'triggers asynchronous build hooks worker' do
......@@ -131,44 +133,65 @@ describe Ci::Build do
end
describe '#artifacts?' do
context 'when new artifacts are used' do
let(:build) { create(:ci_build, :artifacts) }
subject { build.artifacts? }
context 'artifacts archive does not exist' do
before do
build.update_attributes(artifacts_file: nil)
end
let(:build) { create(:ci_build) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy }
context 'is expired' do
before do
build.update(artifacts_expire_at: Time.now - 7.days)
end
let!(:build) { create(:ci_build, :artifacts, :expired) }
it { is_expected.to be_falsy }
end
context 'is not expired' do
before do
build.update(artifacts_expire_at: Time.now + 7.days)
it { is_expected.to be_truthy }
end
end
end
context 'when legacy artifacts are used' do
let(:build) { create(:ci_build, :legacy_artifacts) }
subject { build.artifacts? }
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
it { is_expected.to be_truthy }
context 'is expired' do
let!(:build) { create(:ci_build, :legacy_artifacts, :expired) }
it { is_expected.to be_falsy }
end
context 'is not expired' do
it { is_expected.to be_truthy }
end
end
end
end
describe '#browsable_artifacts?' do
subject { build.browsable_artifacts? }
context 'artifacts metadata does not exist' do
before do
build.update_attributes(artifacts_metadata: nil)
build.update_attributes(legacy_artifacts_metadata: nil)
end
it { is_expected.to be_falsy }
......@@ -631,11 +654,13 @@ describe Ci::Build do
describe '#erasable?' do
subject { build.erasable? }
it { is_expected.to eq false }
end
end
context 'build is erasable' do
context 'new artifacts' do
let!(:build) { create(:ci_build, :trace, :success, :artifacts) }
describe '#erase' do
......@@ -702,6 +727,77 @@ describe Ci::Build do
end
end
context 'old artifacts' do
context 'build is erasable' do
context 'new artifacts' do
let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
describe '#erase' do
before do
build.erase(erased_by: user)
end
context 'erased by user' do
let!(:user) { create(:user, username: 'eraser') }
include_examples 'erasable'
it 'records user who erased a build' do
expect(build.erased_by).to eq user
end
end
context 'erased by system' do
let(:user) { nil }
include_examples 'erasable'
it 'does not set user who erased a build' do
expect(build.erased_by).to be_nil
end
end
end
describe '#erasable?' do
subject { build.erasable? }
it { is_expected.to be_truthy }
end
describe '#erased?' do
let!(:build) { create(:ci_build, :trace, :success, :legacy_artifacts) }
subject { build.erased? }
context 'job has not been erased' do
it { is_expected.to be_falsey }
end
context 'job has been erased' do
before do
build.erase
end
it { is_expected.to be_truthy }
end
end
context 'metadata and build trace are not available' do
let!(:build) { create(:ci_build, :success, :legacy_artifacts) }
before do
build.remove_artifacts_metadata!
end
describe '#erase' do
it 'does not raise error' do
expect { build.erase }.not_to raise_error
end
end
end
end
end
end
end
describe '#first_pending' do
let!(:first) { create(:ci_build, pipeline: pipeline, status: 'pending', created_at: Date.yesterday) }
let!(:second) { create(:ci_build, pipeline: pipeline, status: 'pending') }
......@@ -931,11 +1027,23 @@ describe Ci::Build do
describe '#keep_artifacts!' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now + 7.days) }
subject { build.keep_artifacts! }
it 'to reset expire_at' do
build.keep_artifacts!
subject
expect(build.artifacts_expire_at).to be_nil
end
context 'when having artifacts files' do
let!(:artifact) { create(:ci_job_artifact, job: build, expire_in: '7 days') }
it 'to reset dependent objects' do
subject
expect(artifact.reload.expire_at).to be_nil
end
end
end
describe '#merge_request' do
......
require 'spec_helper'
describe Ci::JobArtifact do
set(:artifact) { create(:ci_job_artifact, :archive) }
describe "Associations" do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:job) }
end
it { is_expected.to respond_to(:file) }
it { is_expected.to respond_to(:created_at) }
it { is_expected.to respond_to(:updated_at) }
describe '#set_size' do
it 'sets the size' do
expect(artifact.size).to eq(106365)
end
end
describe '#file' do
subject { artifact.file }
context 'the uploader api' do
it { is_expected.to respond_to(:store_dir) }
it { is_expected.to respond_to(:cache_dir) }
it { is_expected.to respond_to(:work_dir) }
end
end
describe '#expire_in' do
subject { artifact.expire_in }
it { is_expected.to be_nil }
context 'when expire_at is specified' do
let(:expire_at) { Time.now + 7.days }
before do
artifact.expire_at = expire_at
end
it { is_expected.to be_within(5).of(expire_at - Time.now) }
end
end
describe '#expire_in=' do
subject { artifact.expire_in }
it 'when assigning valid duration' do
artifact.expire_in = '7 days'
is_expected.to be_within(10).of(7.days.to_i)
end
it 'when assigning invalid duration' do
expect { artifact.expire_in = '7 elephants' }.to raise_error(ChronicDuration::DurationParseError)
is_expected.to be_nil
end
it 'when resetting value' do
artifact.expire_in = nil
is_expected.to be_nil
end
it 'when setting to 0' do
artifact.expire_in = '0'
is_expected.to be_nil
end
end
end
......@@ -133,15 +133,29 @@ describe ProjectStatistics do
describe '#update_build_artifacts_size' do
let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:build1) { create(:ci_build, pipeline: pipeline, artifacts_size: 45.megabytes) }
let!(:build2) { create(:ci_build, pipeline: pipeline, artifacts_size: 56.megabytes) }
context 'when new job artifacts are calculated' do
let(:ci_build) { create(:ci_build, pipeline: pipeline) }
before do
create(:ci_job_artifact, :archive, project: pipeline.project, job: ci_build)
end
it "stores the size of related build artifacts" do
statistics.update_build_artifacts_size
expect(statistics.build_artifacts_size).to be(106365)
end
end
context 'when legacy artifacts are used' do
let!(:ci_build) { create(:ci_build, pipeline: pipeline, artifacts_size: 10.megabytes) }
it "stores the size of related build artifacts" do
expect(statistics.build_artifacts_size).to eq 101.megabytes
statistics.update_build_artifacts_size
expect(statistics.build_artifacts_size).to eq(10.megabytes)
end
end
end
......
......@@ -299,13 +299,16 @@ describe API::Jobs do
context 'normal authentication' do
before do
stub_artifacts_object_storage
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'job with artifacts' do
context 'when artifacts are stored locally' do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'authorized user' do
it_behaves_like 'downloads artifact'
end
......@@ -320,7 +323,14 @@ describe API::Jobs do
end
context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
let(:job) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302)
......@@ -328,6 +338,8 @@ describe API::Jobs do
end
it 'does not return job artifacts if not uploaded' do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(404)
end
end
......@@ -440,7 +452,14 @@ describe API::Jobs do
end
context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline, user: api_user) }
let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302)
......
......@@ -948,7 +948,7 @@ describe API::Runner do
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
end
context 'when job has been erased' do
......@@ -988,15 +988,6 @@ describe API::Runner do
it_behaves_like 'successful artifacts upload'
end
context 'when updates artifact' do
before do
upload_artifacts(file_upload2, headers_with_token)
upload_artifacts(file_upload, headers_with_token)
end
it_behaves_like 'successful artifacts upload'
end
context 'when using runners token' do
it 'responds with forbidden' do
upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
......@@ -1109,7 +1100,7 @@ describe API::Runner do
expect(response).to have_gitlab_http_status(201)
expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename)
expect(stored_metadata_file.original_filename).to eq(metadata.original_filename)
expect(stored_artifacts_size).to eq(71759)
expect(stored_artifacts_size).to eq(72821)
end
end
......@@ -1134,7 +1125,7 @@ describe API::Runner do
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
end
after do
......@@ -1161,13 +1152,16 @@ describe API::Runner do
describe 'GET /api/v4/jobs/:id/artifacts' do
let(:token) { job.token }
context 'when job has artifacts' do
let(:job) { create(:ci_build) }
let(:store) { JobArtifactUploader::LOCAL_STORE }
before do
create(:ci_job_artifact, :archive, file_store: store, job: job)
download_artifact
end
context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts) }
context 'when using job token' do
context 'when artifacts are stored locally' do
let(:download_headers) do
......@@ -1182,7 +1176,8 @@ describe API::Runner do
end
context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, :artifacts, :remote_store) }
let(:store) { JobArtifactUploader::REMOTE_STORE }
let!(:job) { create(:ci_build) }
it 'download artifacts' do
expect(response).to have_gitlab_http_status(302)
......@@ -1201,12 +1196,16 @@ describe API::Runner do
context 'when job does not has artifacts' do
it 'responds with not found' do
download_artifact
expect(response).to have_gitlab_http_status(404)
end
end
def download_artifact(params = {}, request_headers = headers)
params = params.merge(token: token)
job.reload
get api("/jobs/#{job.id}/artifacts"), params, request_headers
end
end
......
......@@ -3,7 +3,7 @@ require 'spec_helper'
describe API::V3::Builds do
set(:user) { create(:user) }
let(:api_user) { user }
let!(:project) { create(:project, :repository, creator: user, public_builds: false) }
set(:project) { create(:project, :repository, creator: user, public_builds: false) }
let!(:developer) { create(:project_member, :developer, user: user, project: project) }
let(:reporter) { create(:project_member, :reporter, project: project) }
let(:guest) { create(:project_member, :guest, project: project) }
......@@ -215,9 +215,12 @@ describe API::V3::Builds do
end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
it 'returns location redirect' do
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(302)
end
end
......@@ -309,7 +312,14 @@ describe API::V3::Builds do
end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
before do
build.reload
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302)
......
require 'spec_helper'
describe PipelineSerializer do
let(:user) { create(:user) }
set(:user) { create(:user) }
let(:serializer) do
described_class.new(current_user: user)
......@@ -117,7 +117,8 @@ describe PipelineSerializer do
shared_examples 'no N+1 queries' do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
expect(recorded.count).to be_within(1).of(61)
expect(recorded.count).to be_within(1).of(40)
expect(recorded.cached_count).to eq(0)
end
end
......
......@@ -17,7 +17,7 @@ describe Ci::RetryBuildService do
%i[id status user token coverage trace runner artifacts_expire_at
artifacts_file artifacts_metadata artifacts_size created_at
updated_at started_at finished_at queued_at erased_by
erased_at auto_canceled_by].freeze
erased_at auto_canceled_by job_artifacts job_artifacts_archive job_artifacts_metadata].freeze
IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections
......@@ -35,7 +35,7 @@ describe Ci::RetryBuildService do
end
let(:build) do
create(:ci_build, :failed, :artifacts_expired, :erased,
create(:ci_build, :failed, :artifacts, :expired, :erased,
:queued, :coverage, :tags, :allowed_to_fail, :on_tag,
:triggered, :trace, :teardown_environment,
description: 'my-job', stage: 'test', pipeline: pipeline,
......
require "spec_helper"
describe Projects::UpdatePagesService do
let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
set(:project) { create(:project, :repository) }
set(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
set(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png') }
let(:extension) { 'zip' }
let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{extension}") }
let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{extension}") }
let(:metadata) do
filename = Rails.root + "spec/fixtures/pages.#{extension}.meta"
fixture_file_upload(filename) if File.exist?(filename)
end
subject { described_class.new(project, build) }
......@@ -12,18 +20,85 @@ describe Projects::UpdatePagesService do
project.remove_pages
end
context 'legacy artifacts' do
%w(tar.gz zip).each do |format|
let(:extension) { format }
context "for valid #{format}" do
let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{format}") }
let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{format}") }
let(:metadata) do
filename = Rails.root + "spec/fixtures/pages.#{format}.meta"
fixture_file_upload(filename) if File.exist?(filename)
before do
build.update_attributes(legacy_artifacts_file: file)
build.update_attributes(legacy_artifacts_metadata: metadata)
end
describe 'pages artifacts' do
context 'with expiry date' do
before do
build.update_attributes(artifacts_file: file)
build.update_attributes(artifacts_metadata: metadata)
build.artifacts_expire_in = "2 days"
end
it "doesn't delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(true)
end
end
context 'without expiry date' do
it "does delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts?).to eq(false)
end
end
end
it 'succeeds' do
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
# Check that all expected files are extracted
%w[index.html zero .hidden/file].each do |filename|
expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy
end
end
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(execute).not_to eq(:success)
end
it 'removes pages after destroy' do
expect(PagesWorker).to receive(:perform_in)
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
project.destroy
expect(project.pages_deployed?).to be_falsey
end
it 'fails if sha on branch is not latest' do
build.update_attributes(ref: 'feature')
expect(execute).not_to eq(:success)
end
it 'fails for empty file fails' do
build.update_attributes(legacy_artifacts_file: empty_file)
expect(execute).not_to eq(:success)
end
end
end
end
context 'for new artifacts' do
context "for a valid job" do
before do
create(:ci_job_artifact, file: file, job: build)
create(:ci_job_artifact, file_type: :metadata, file: metadata, job: build)
build.reload
end
describe 'pages artifacts' do
......@@ -35,7 +110,7 @@ describe Projects::UpdatePagesService do
it "doesn't delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts_file?).to eq(true)
expect(build.artifacts?).to eq(true)
end
end
......@@ -43,7 +118,7 @@ describe Projects::UpdatePagesService do
it "does delete artifacts" do
expect(execute).to eq(:success)
expect(build.reload.artifacts_file?).to eq(false)
expect(build.reload.artifacts?).to eq(false)
end
end
end
......@@ -74,13 +149,14 @@ describe Projects::UpdatePagesService do
end
it 'fails if sha on branch is not latest' do
pipeline.update_attributes(sha: 'old_sha')
build.update_attributes(artifacts_file: file)
build.update_attributes(ref: 'feature')
expect(execute).not_to eq(:success)
end
it 'fails for empty file fails' do
build.update_attributes(artifacts_file: empty_file)
build.job_artifacts_archive.update_attributes(file: empty_file)
expect(execute).not_to eq(:success)
end
end
......@@ -97,7 +173,7 @@ describe Projects::UpdatePagesService do
end
it 'fails for invalid archive' do
build.update_attributes(artifacts_file: invalid_file)
build.update_attributes(legacy_artifacts_file: invalid_file)
expect(execute).not_to eq(:success)
end
......@@ -108,8 +184,8 @@ describe Projects::UpdatePagesService do
file = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip')
metafile = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta')
build.update_attributes(artifacts_file: file)
build.update_attributes(artifacts_metadata: metafile)
build.update_attributes(legacy_artifacts_file: file)
build.update_attributes(legacy_artifacts_metadata: metafile)
allow(build).to receive(:artifacts_metadata_entry)
.and_return(metadata)
......
......@@ -18,7 +18,7 @@ module StubConfiguration
def stub_artifacts_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
uploader: ArtifactUploader,
uploader: JobArtifactUploader,
remote_directory: 'artifacts',
**params)
end
......
......@@ -121,6 +121,7 @@ module TestEnv
FileUtils.mkdir_p(repos_path)
FileUtils.mkdir_p(backup_path)
FileUtils.mkdir_p(pages_path)
FileUtils.mkdir_p(artifacts_path)
end
def clean_gitlab_test_path
......@@ -234,6 +235,10 @@ module TestEnv
Gitlab.config.pages.path
end
def artifacts_path
Gitlab.config.artifacts.path
end
# When no cached assets exist, manually hit the root path to create them
#
# Otherwise they'd be created by the first test, often timing out and
......
require 'rake_helper'
describe 'gitlab:artifacts namespace rake task' do
before :all do
before(:context) do
Rake.application.rake_require 'tasks/gitlab/artifacts'
end
describe 'migrate' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
let(:object_storage_enabled) { false }
before do
stub_artifacts_object_storage(enabled: object_storage_enabled)
end
subject { run_rake_task('gitlab:artifacts:migrate') }
context 'legacy artifacts' do
describe 'migrate' do
let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
context 'when local storage is used' do
let(:store) { ObjectStoreUploader::LOCAL_STORE }
context 'and job does not have file store defined' do
before do
stub_artifacts_object_storage
job.update(artifacts_file_store: nil)
end
let(:object_storage_enabled) { true }
let(:store) { nil }
it "migrates file to remote storage" do
subject
expect(job.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(job.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage
job
end
let(:object_storage_enabled) { true }
it "migrates file to remote storage" do
subject
expect(job.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(job.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
context 'and remote storage is not defined' do
before do
job
end
it "fails to migrate to remote storage" do
subject
expect(job.reload.artifacts_file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
expect(job.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::LOCAL_STORE)
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::LOCAL_STORE)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStoreUploader::REMOTE_STORE }
before do
stub_artifacts_object_storage
job
it "file stays on remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
end
end
context 'job artifacts' do
let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
context 'when local storage is used' do
let(:store) { ObjectStoreUploader::LOCAL_STORE }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
let(:store) { nil }
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
context 'and remote storage is defined' do
let(:object_storage_enabled) { true }
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
context 'and remote storage is not defined' do
it "fails to migrate to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStoreUploader::REMOTE_STORE }
it "file stays on remote storage" do
subject
expect(job.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(job.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
end
......
require 'spec_helper'
describe JobArtifactUploader do
let(:store) { described_class::LOCAL_STORE }
let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) }
let(:local_path) { Gitlab.config.artifacts.path }
describe '#store_dir' do
subject { uploader.store_dir }
let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" }
context 'when using local storage' do
it { is_expected.to start_with(local_path) }
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
it { is_expected.to end_with(path) }
end
context 'when using remote storage' do
let(:store) { described_class::REMOTE_STORE }
before do
stub_artifacts_object_storage
end
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
it { is_expected.to end_with(path) }
end
end
describe '#cache_dir' do
subject { uploader.cache_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/cache') }
end
describe '#work_dir' do
subject { uploader.work_dir }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('/tmp/work') }
end
context 'file is stored in valid local_path' do
let(:file) do
fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
before do
uploader.store!(file)
end
subject { uploader.file.path }
it { is_expected.to start_with(local_path) }
it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
it { is_expected.to include("/#{job_artifact.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
end
require 'rails_helper'
describe ArtifactUploader do
describe LegacyArtifactUploader do
let(:store) { described_class::LOCAL_STORE }
let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :artifacts_file) }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:local_path) { Gitlab.config.artifacts.path }
describe '.local_store_path' do
......@@ -66,11 +66,23 @@ describe ArtifactUploader do
subject { uploader.filename }
it { is_expected.to be_nil }
end
context 'with artifacts' do
let(:job) { create(:ci_build, :artifacts) }
context 'file is stored in valid path' do
let(:file) do
fixture_file_upload(
Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
end
it { is_expected.not_to be_nil }
before do
uploader.store!(file)
end
subject { uploader.file.path }
it { is_expected.to start_with(local_path) }
it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
it { is_expected.to include("/#{job.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
end
......@@ -4,18 +4,22 @@ require 'carrierwave/storage/fog'
describe ObjectStoreUploader do
let(:uploader_class) { Class.new(described_class) }
let(:object) { double }
let(:uploader) { uploader_class.new(object, :artifacts_file) }
let(:uploader) { uploader_class.new(object, :file) }
before do
allow(object.class).to receive(:uploader_option).with(:file, :mount_on) { nil }
end
describe '#object_store' do
it "calls artifacts_file_store on object" do
expect(object).to receive(:artifacts_file_store)
expect(object).to receive(:file_store)
uploader.object_store
end
context 'when store is null' do
before do
expect(object).to receive(:artifacts_file_store).twice.and_return(nil)
expect(object).to receive(:file_store).twice.and_return(nil)
end
it "returns LOCAL_STORE" do
......@@ -26,7 +30,7 @@ describe ObjectStoreUploader do
context 'when value is set' do
before do
expect(object).to receive(:artifacts_file_store).twice.and_return(described_class::REMOTE_STORE)
expect(object).to receive(:file_store).twice.and_return(described_class::REMOTE_STORE)
end
it "returns given value" do
......@@ -38,7 +42,7 @@ describe ObjectStoreUploader do
describe '#object_store=' do
it "calls artifacts_file_store= on object" do
expect(object).to receive(:artifacts_file_store=).with(described_class::REMOTE_STORE)
expect(object).to receive(:file_store=).with(described_class::REMOTE_STORE)
uploader.object_store = described_class::REMOTE_STORE
end
......@@ -47,7 +51,7 @@ describe ObjectStoreUploader do
describe '#file_storage?' do
context 'when file storage is used' do
before do
expect(object).to receive(:artifacts_file_store).and_return(described_class::LOCAL_STORE)
expect(object).to receive(:file_store).and_return(described_class::LOCAL_STORE)
end
it { expect(uploader).to be_file_storage }
......@@ -57,7 +61,7 @@ describe ObjectStoreUploader do
before do
uploader_class.storage_options double(
object_store: double(enabled: true))
expect(object).to receive(:artifacts_file_store).and_return(described_class::REMOTE_STORE)
expect(object).to receive(:file_store).and_return(described_class::REMOTE_STORE)
end
it { expect(uploader).not_to be_file_storage }
......@@ -82,9 +86,9 @@ describe ObjectStoreUploader do
end
end
context 'when using ArtifactsUploader' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
let(:uploader) { job.artifacts_file }
context 'when using JobArtifactsUploader' do
let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
let(:uploader) { artifact.file }
context 'checking described_class' do
let(:store) { described_class::LOCAL_STORE }
......@@ -103,7 +107,7 @@ describe ObjectStoreUploader do
let(:store) { nil }
it "sets the store to LOCAL_STORE" do
expect(job.artifacts_file_store).to eq(described_class::LOCAL_STORE)
expect(artifact.file_store).to eq(described_class::LOCAL_STORE)
end
end
......@@ -130,8 +134,8 @@ describe ObjectStoreUploader do
end
describe '#migrate!' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
let(:uploader) { job.artifacts_file }
let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
let(:uploader) { artifact.file }
let(:store) { described_class::LOCAL_STORE }
subject { uploader.migrate!(new_store) }
......@@ -214,7 +218,7 @@ describe ObjectStoreUploader do
context 'when subject save fails' do
before do
expect(job).to receive(:save!).and_raise(RuntimeError, "exception")
expect(artifact).to receive(:save!).and_raise(RuntimeError, "exception")
end
it "does catch an error" do
......@@ -272,7 +276,7 @@ describe ObjectStoreUploader do
context 'when using local storage' do
before do
expect(object).to receive(:artifacts_file_store) { described_class::LOCAL_STORE }
expect(object).to receive(:file_store) { described_class::LOCAL_STORE }
end
it "does not raise an error" do
......@@ -284,7 +288,7 @@ describe ObjectStoreUploader do
before do
uploader_class.storage_options double(
object_store: double(enabled: true))
expect(object).to receive(:artifacts_file_store) { described_class::REMOTE_STORE }
expect(object).to receive(:file_store) { described_class::REMOTE_STORE }
end
context 'feature is not available' do
......
......@@ -11,12 +11,8 @@ describe ExpireBuildInstanceArtifactsWorker do
end
context 'with expired artifacts' do
let(:artifacts_expiry) { { artifacts_expire_at: Time.now - 7.days } }
context 'when associated project is valid' do
let(:build) do
create(:ci_build, :artifacts, artifacts_expiry)
end
let(:build) { create(:ci_build, :artifacts, :expired) }
it 'does expire' do
expect(build.reload.artifacts_expired?).to be_truthy
......@@ -26,14 +22,14 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_falsey
end
it 'does nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).to be_nil
it 'does remove the job artifact record' do
expect(build.reload.job_artifacts_archive).to be_nil
end
end
end
context 'with not yet expired artifacts' do
let(:build) do
set(:build) do
create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days)
end
......@@ -45,8 +41,8 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
it 'does not nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).not_to be_nil
it 'does not remove the job artifact record' do
expect(build.reload.job_artifacts_archive).not_to be_nil
end
end
......@@ -61,13 +57,13 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
it 'does not nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).not_to be_nil
it 'does not remove the job artifact record' do
expect(build.reload.job_artifacts_archive).not_to be_nil
end
end
context 'for expired artifacts' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now - 7.days) }
let(:build) { create(:ci_build, :expired) }
it 'is still expired' do
expect(build.reload.artifacts_expired?).to be_truthy
......
......@@ -48,12 +48,12 @@ describe ObjectStorageUploadWorker do
end
end
context 'for artifacts' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
let(:uploader_class) { ArtifactUploader }
context 'for legacy artifacts' do
let(:build) { create(:ci_build, :legacy_artifacts) }
let(:uploader_class) { LegacyArtifactUploader }
let(:subject_class) { Ci::Build }
let(:file_field) { :artifacts_file }
let(:subject_id) { job.id }
let(:subject_id) { build.id }
context 'when local storage is used' do
let(:store) { local }
......@@ -61,13 +61,12 @@ describe ObjectStorageUploadWorker do
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage
job
end
it "migrates file to remote storage" do
perform
expect(job.reload.artifacts_file_store).to eq(remote)
expect(build.reload.artifacts_file_store).to eq(remote)
end
context 'for artifacts_metadata' do
......@@ -76,10 +75,34 @@ describe ObjectStorageUploadWorker do
it 'migrates metadata to remote storage' do
perform
expect(job.reload.artifacts_metadata_store).to eq(remote)
expect(build.reload.artifacts_metadata_store).to eq(remote)
end
end
end
end
end
context 'for job artifacts' do
let(:artifact) { create(:ci_job_artifact, :archive) }
let(:uploader_class) { JobArtifactUploader }
let(:subject_class) { Ci::JobArtifact }
let(:file_field) { :file }
let(:subject_id) { artifact.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage
end
it "migrates file to remote storage" do
perform
expect(artifact.reload.file_store).to eq(remote)
end
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment